first commit
This commit is contained in:
@@ -0,0 +1,404 @@
|
||||
% Choose pra, prb, prc, prd, pre, prl, prstab, or rmp for journal
|
||||
% Add 'draft' option to mark overfull boxes with black boxes
|
||||
% Add 'showpacs' option to make PACS codes appear
|
||||
% for review and submission
|
||||
%\documentclass[aps,preprint,showpacs,superscriptaddress,groupedaddress]{revtex4} % for double-spaced preprint
|
||||
% needed for figures
|
||||
% needed for some tables
|
||||
% for math
|
||||
% for math
|
||||
% for crossing out text
|
||||
% for coloring text
|
||||
%\input{tcilatex}
|
||||
|
||||
|
||||
\documentclass[aps,prl,twocolumn,showpacs,superscriptaddress,groupedaddress]{revtex4}
|
||||
|
||||
\usepackage{graphicx}
|
||||
\usepackage{dcolumn}
|
||||
\usepackage{bm}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{soul}
|
||||
\usepackage{color}
|
||||
|
||||
%TCIDATA{OutputFilter=LATEX.DLL}
|
||||
%TCIDATA{Version=5.50.0.2960}
|
||||
%TCIDATA{<META NAME="SaveForMode" CONTENT="1">}
|
||||
%TCIDATA{BibliographyScheme=BibTeX}
|
||||
%TCIDATA{LastRevised=Tuesday, May 20, 2014 03:06:00}
|
||||
%TCIDATA{<META NAME="GraphicsSave" CONTENT="32">}
|
||||
|
||||
\hyphenation{ALPGEN}
|
||||
\hyphenation{EVTGEN}
|
||||
\hyphenation{PYTHIA}
|
||||
\def\be{\begin{equation}}
|
||||
\def\ee{\end{equation}}
|
||||
\def\bea{\begin{eqnarray}}
|
||||
\def\eea{\end{eqnarray}}
|
||||
%\input{tcilatex}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\title{Transport measurements of the spin wave gap of Mn}
|
||||
\input author_list.tex
|
||||
\date{\today}
|
||||
|
||||
\begin{abstract}
|
||||
Temperature dependent transport measurements on ultrathin antiferromagnetic
|
||||
Mn films reveal a heretofore unknown non-universal weak localization
|
||||
correction to the conductivity which extends to disorder strengths greater than
|
||||
100~k$\Omega$ per square. The inelastic scattering of electrons off of
|
||||
gapped antiferromagnetic spin waves gives rise to an inelastic scattering
|
||||
length which is short enough to place the system in the 3D regime. The
|
||||
extracted fitting parameters provide estimates of the energy gap ($\Delta
|
||||
\approx$~16~K) and exchange energy ($\bar{J} \approx$~320~K). %\st{which are in
|
||||
%agreement with values obtained with other techniques}.
|
||||
\end{abstract}
|
||||
|
||||
\pacs{75}
|
||||
|
||||
\maketitle
|
||||
|
||||
Hello world
|
||||
|
||||
|
||||
|
||||
Thin-film transition metal ferromagnets (Fe, Co, Ni, Gd) and
|
||||
antiferromagnets (Mn, Cr) and their alloys are not only ubiquitous in
|
||||
present day technologies but are also expected to play an important role in
|
||||
future developments~\cite{thompson_2008}. Understanding magnetism in these
|
||||
materials, especially when the films are thin enough so that disorder plays
|
||||
an important role, is complicated by the long standing controversy about the
|
||||
relative importance of itinerant and local moments~\cite%
|
||||
{slater_1936,van_vleck_1953,aharoni_2000}. For the itinerant transition
|
||||
metal magnets, a related fundamental issue centers on the question of how
|
||||
itinerancy is compromised by disorder. Clearly with sufficient disorder the
|
||||
charge carriers become localized, but questions arise as to what happens to
|
||||
the spins and associated spin waves and whether the outcome depends on the
|
||||
ferro/antiferro alignment of spins in the itinerant parent. Ferromagnets
|
||||
which have magnetization as the order parameter are fundamentally different
|
||||
than antiferromagnets which have staggered magnetization (i.e., difference
|
||||
between the magnetization on each sublattice) as the order parameter~\cite%
|
||||
{blundell_2001}. Ferromagnetism thus distinguishes itself by having soft
|
||||
modes at zero wave number whereas antiferromagnets have soft modes at finite
|
||||
wave number~\cite{belitz_2005}. Accordingly, the respective spin wave
|
||||
spectrums are radically different. These distinctions are particularly
|
||||
important when comparing quantum corrections to the conductivity near
|
||||
quantum critical points for ferromagnets~\cite{paul_2005} and
|
||||
antiferromagnets~\cite{syzranov_2012}.
|
||||
|
||||
Surprisingly, although there have been systematic studies of the effect of
|
||||
disorder on the longitudinal $\sigma_{xx}$ and transverse $\sigma_{xy}$
|
||||
conductivity of ferromagnetic films~\cite%
|
||||
{bergmann_1978,bergmann_1991,mitra_2007,misra_2009,kurzweil_2009}, there
|
||||
have been few if any such studies on antiferromagnetic films. In this paper
|
||||
we remedy this situation by presenting transport data on systematically
|
||||
disordered Mn films that are sputter deposited in a custom designed vacuum
|
||||
chamber and then transferred without exposure to air into an adjacent
|
||||
cryostat for transport studies to low temperature. The experimental
|
||||
procedures are similar to those reported previously: disorder, characterized
|
||||
by the sheet resistance $R_0$ measured at $T=$~5~K, can be changed either by
|
||||
growing separate samples or by gentle annealing of a given sample through
|
||||
incremental stages of disorder~\cite{misra_2011}. Using these same procedures our results for
|
||||
antiferromagnets however are decidedly different. The data are well
|
||||
described over a large range of disorder strengths by a non-universal three
|
||||
dimensional (3d) quantum correction that applies only to spin wave gapped
|
||||
antiferromagnets. This finding implies the presence of strong inelastic
|
||||
electron scattering off of antiferromagnetic spin waves. The theory is
|
||||
validated not only by good fits to the data but also by extraction from the
|
||||
fitting parameters of a value for the spin wave gap $\Delta$ that is in
|
||||
agreement with the value expected for Mn. On the other hand, the
|
||||
exchange energy $\bar{J}$ could be sensitive to the high disorder in our
|
||||
ultra thin films, and it turns out to be much smaller compared to the known values.
|
||||
|
||||
In previous work the inelastic scattering of electrons off of spin waves has
|
||||
been an essential ingredient in understanding disordered ferromagnets. For
|
||||
example, to explain the occurrence of weak-localization corrections to the
|
||||
anomalous Hall effect in polycrystalline Fe films~\cite{mitra_2007}, it was
|
||||
necessary to invoke a contribution to the inelastic phase breaking rate $%
|
||||
\tau_{\varphi}^{-1}$ due to spin-conserving inelastic scattering off
|
||||
spin-wave excitations. This phase breaking rate, anticipated by theory~\cite%
|
||||
{tatara_2004} and seen experimentally in spin polarized electron energy loss
|
||||
spectroscopy (SPEELS) measurements of ultrathin Fe films~\cite%
|
||||
{plihal_1999,zhang_2010}, is linear in temperature and significantly larger
|
||||
than the phase breaking rate due to electron-electron interactions, thus
|
||||
allowing a wide temperature range to observe weak localization corrections~%
|
||||
\cite{mitra_2007}. The effect of a high $\tau_{\varphi}^{-1}$ due to
|
||||
inelastic scattering off spin-wave excitations is also seen in Gd films
|
||||
where in addition to a localizing log($T$) quantum correction to the
|
||||
conductance, a localizing linear-in-$T$ quantum correction is present and is
|
||||
interpreted as a spin-wave mediated Altshuler-Aronov type correction to the
|
||||
conductivity~\cite{misra_2009}.
|
||||
|
||||
Interestingly, this high rate of inelastic spin rate scattering becomes even
|
||||
more important for the thinnest films as shown in theoretical calculations
|
||||
on Fe and Ni which point to extremely short spin-dependent inelastic mean
|
||||
free paths~\cite{hong_2000} and in spin-polarized electron energy-loss
|
||||
spectroscopy (SPEELS) measurements on few monolayer-thick Fe/W(110) films in
|
||||
which a strong nonmonotonic enhancement of localized spin wave energies is
|
||||
found on the thinnest films~\cite{zhang_2010}.
|
||||
|
||||
Inelastic spin wave scattering in highly disordered ferromagnetic films can
|
||||
be strong enough to assure that the associated $T$-dependent dephasing
|
||||
length $L_{\varphi }(T)=\sqrt{D\tau _{\varphi }}$ (with $D$ the diffusion
|
||||
constant)~\cite{lee_1985} is less than the film thickness $t$, thus putting
|
||||
thin films into the 3d limit where a metal-insulator
|
||||
transition is observed~\cite{misra_2011}. Recognizing that similarly high
|
||||
inelastic scattering rates must apply to highly disordered antiferromagnetic
|
||||
films, we first proceed with a theoretical approach that takes into account
|
||||
the scattering of antiferromagnetic spin waves on the phase relaxation rate
|
||||
and find a heretofore unrecognized non-universal 3d weak localization
|
||||
correction to the conductivity that allows an interpretation of our experimental
|
||||
results.
|
||||
|
||||
We mention in passing that the 3d interaction-induced quantum correction
|
||||
found to be dominant in the case of ferromagnetic Gd
|
||||
films which undergo a metal-insulator transition\cite{misra_2011} is
|
||||
found to be much smaller in the present case and will not be considered further (for an estimate of this contribution see \cite{muttalib_unpub}.
|
||||
|
||||
As discussed in detail in Ref.~[\onlinecite{wm10}], the phase relaxation
|
||||
time $\tau _{\varphi }$ limits the phase coherence in a particle-particle
|
||||
diffusion propagator $C(q,\omega )$ (Cooperon) in the form
|
||||
\begin{equation}
|
||||
C(q,\omega _{l})=\frac{1}{2\pi N_{0}\tau ^{2}}\frac{1}{Dq^{2}+|\omega
|
||||
_{l}|+1/\tau _{\varphi }}.
|
||||
\end{equation}
|
||||
where $N_{0}$ is the density of states at the Fermi level, $\tau $ is the
|
||||
elastic scattering time and $\omega _{l}=2\pi lT$ is the Matsubara
|
||||
frequency. Labeling the Cooperon propagator in the absence of interactions
|
||||
as $C_{0}$, we can write
|
||||
\begin{equation}
|
||||
\frac{1}{\tau _{\varphi }}=\frac{1}{2\pi N_{0}\tau ^{2}}[C^{-1}-C_{0}^{-1}].
|
||||
\end{equation}
|
||||
|
||||
In general, $C(q,\omega )$ can be evaluated diagrammatically in the presence
|
||||
of interactions and disorder in a ladder approximation \cite{fa} that can be
|
||||
symbolically written as $C=C_{0}+C_{0}KC$ where the interaction vertex $K$
|
||||
contains self energy as well as vertex corrections due to both interactions
|
||||
and disorder. It then follows that $1/\tau _{\varphi }$ is given by
|
||||
\begin{equation}
|
||||
\frac{1}{\tau _{\varphi }}=-\frac{1}{2\pi N_{0}\tau ^{2}}K.
|
||||
\end{equation}%
|
||||
In Ref.~[\onlinecite{wm10}], the leading temperature and disorder dependence
|
||||
of the inelastic diffusion propagator was evaluated diagrammatically, in the
|
||||
presence of ferromagnetic spin-wave mediated electron-electron interactions.
|
||||
Here we consider the antiferromagnetic case. We only consider large
|
||||
spin-wave gap where the damping can be ignored. Using the antiferromagnetic
|
||||
dispersion relation $\omega _{q}=\Delta +Aq$, where $A$ is the spin
|
||||
stiffness, the inelastic lifetime is given by
|
||||
\be
|
||||
\frac{\hbar }{\tau _{\varphi }}=\frac{4}{\pi \hbar }nJ^{2}\int_{0}^{1/l}%
|
||||
\frac{q^{d-1}dq}{\sinh \beta \omega _{q}}\frac{Dq^{2}+1/\tau _{\varphi }}{%
|
||||
(Dq^{2}+1/\tau _{\varphi })^{2}+\omega _{q}^{2}}
|
||||
\ee%
|
||||
where $n=k_{F}^{3}/3\pi ^{2}$ is the 3d density, $J$ is the effective
|
||||
spin-exchange interaction and $\beta =1/k_{B}T$. Here we will consider the
|
||||
limit $\hbar /\tau _{\varphi }\ll \Delta $, relevant for our experiment on
|
||||
Mn. In this limit we can neglect the $1/\tau _{\varphi }$ terms inside the
|
||||
integral. The upper limit should be restricted to $\Delta /A$ in the limit $%
|
||||
\Delta /A<1/l$. For large disorder, we expect the parameter $x\equiv
|
||||
\hbar Dk_{F}^{2}\Delta / \bar{J}^{2}\ll 1$, where the spin-exchange energy
|
||||
is given by $\bar{J}=Ak_{F}$. In this limit, $L_{\varphi }$ can be
|
||||
simplified as
|
||||
\be
|
||||
k_{F}L_{\varphi }\approx \left( \frac{\bar{J}}{\Delta }\right) ^{3/2}\left(
|
||||
\frac{5\sinh \frac{\Delta }{T}}{12\pi }\right) ^{1/2},\;\;\;x\ll 1
|
||||
\label{L-phi-3d}
|
||||
\ee%
|
||||
which is independent of $x$, and therefore, independent of disorder.
|
||||
|
||||
Given the inelastic lifetime, the weak localization correction in 3d is
|
||||
usually given by \cite{lee_1985} $\delta \sigma _{3d}=\frac{e^{2}}{\hbar \pi
|
||||
^{3}}\frac{1}{L_{\varphi }},$ where the prefactor to the inverse inelastic
|
||||
length is a universal number, independent of disorder. However, at large
|
||||
enough disorder, we show that there exists a disorder dependent correction,
|
||||
due to the scale dependent diffusion coefficient near the Anderson
|
||||
metal-insulator transition. In fact, the diffusion coefficient obeys the
|
||||
self consistent equation \cite{WV}
|
||||
\begin{equation}
|
||||
\frac{D_{0}}{D(\omega )}=1+\frac{k_{F}^{2-d}}{\pi m}\int_{0}^{1/l}dQ\frac{%
|
||||
Q^{d-1}}{-i\omega +D(\omega )Q^{2}}
|
||||
\end{equation}%
|
||||
where $D_{0}=v_{F}l/d$ is the diffusion coefficient at weak disorder. While
|
||||
the significance of the prefactor to the integral is not clear, the above
|
||||
equation remains qualitatively accurate over a wide range near the Anderson
|
||||
transition. Setting $\omega =i/\tau _{\varphi }$ and doing the $Q$-integral
|
||||
in 3d,
|
||||
\bea
|
||||
\frac{D_{0}}{D} &\approx & 1+\frac{1}{\pi mk_{F}}\int_{1/L_{\phi }}^{1/l}dQ\frac{%
|
||||
Q^{2}}{DQ^{2}}\cr
|
||||
&=& 1+\frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}-\delta
|
||||
\left( \frac{D_{0}}{D}\right) ,
|
||||
\label{delta}
|
||||
\eea%
|
||||
where
|
||||
\bea
|
||||
\delta \equiv \frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{%
|
||||
L_{\varphi }}
|
||||
\eea
|
||||
is assumed to be a small correction, and Eq.~(\ref{delta})
|
||||
should not be solved self-consistently. This follows from the fact that the
|
||||
diffusion coefficient of electrons at fixed energy entering the Cooperon
|
||||
expression is that of non-interacting electrons, and is given by the limit $%
|
||||
T\rightarrow 0$, $L_{\varphi }\rightarrow \infty $ and therefore $\delta
|
||||
\rightarrow 0$. Then the correction at finite $T$ is given by
|
||||
\bea
|
||||
\frac{D}{D_{0}} &=& \frac{1}{\left( \frac{D_{0}}{D}\right) _{0}-\delta \left(
|
||||
\frac{D_{0}}{D}\right) }\cr
|
||||
&\approx & \left( \frac{D}{D_{0}}\right) _{0}+\left( \frac{D}{D_{0}}\right) _{0}
|
||||
\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{L_{\varphi }}%
|
||||
\eea%
|
||||
where
|
||||
\be
|
||||
\lim_{T\rightarrow 0}\frac{D}{D_{0}}\equiv \left( \frac{D}{D_{0}}\right)
|
||||
_{0}.
|
||||
\ee%
|
||||
Using the relation $\sigma _{3d}=(e^{2}/\hbar )nD$ where the longitudinal
|
||||
sheet conductance $\sigma _{\square }=\sigma _{3d}t$, with $t$ being the
|
||||
film thickness, we finally get the temperature dependent weak localization
|
||||
correction term
|
||||
\bea
|
||||
\frac{\delta \sigma _{\square }}{L_{00}} &=& \left( \frac{D}{D_{0}}\right) _{0}%
|
||||
\frac{2}{\pi }\frac{t}{L_{\varphi }}\cr
|
||||
\left( \frac{D}{D_{0}}\right)_{0} &\approx &\frac{2}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}}
|
||||
\label{WL}
|
||||
\eea%
|
||||
where $R_{0}=L_{00}/\sigma _{\square }(T$=$0)$, $L_{00}=e^{2}/\pi h$, $%
|
||||
a=3\pi/2k_{F}tb_{0}$, $b_{0}$ is a number of order unity and we
|
||||
have solved the self-consistent equation for $D$ in order to express $D_{0%
|
||||
\text{ }}$in terms of $D$ and finally $R_{0}$. Thus in this case, the weak
|
||||
localization correction has a prefactor which is not universal. While this
|
||||
reduces to the well-known universal result at weak disorder $R_{0}\ll a$, it
|
||||
becomes dependent on disorder characterized by the sheet resistance $R_{0}$
|
||||
at strong disorder and at the same time substantially extends the 3d regime
|
||||
near the transition.
|
||||
|
||||
Using the expression for $L_{\varphi }$ (Eq.~(\ref{L-phi-3d})) into Eq.~(\ref%
|
||||
{WL}), we finally obtain the total conductivity, including the quantum
|
||||
correction to the conductivity due to weak localization in 3d arising from
|
||||
scattering of electrons off antiferromagnetic spin waves in Mn,
|
||||
\begin{equation}
|
||||
\frac{\sigma _{\square }}{L_{00}}=A+\frac{B}{\sqrt{\sinh [\Delta /T]}},
|
||||
\label{sigmaWL}
|
||||
\end{equation}%
|
||||
\textbf{\textbf{}}where the parameter $A$ is temperature independent and the parameter
|
||||
\bea
|
||||
B &\equiv & \left( \frac{D}{D_{0}}\right) _{0}\frac{2}{\pi ^{2}}\left( \frac{%
|
||||
12\pi }{5}\right) ^{1/2}\left( \frac{\Delta }{\bar{J}}\right) ^{3/2}tk_{F}\cr%
|
||||
&=&\frac{2c}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}},
|
||||
\label{BFit}
|
||||
\eea%
|
||||
where
|
||||
\be
|
||||
c\equiv \left( \frac{\Delta }{\bar{J}}\right) ^{3/2}\left( \frac{%
|
||||
48t^{2}k_{F}^{2}}{5\pi}\right) ^{1/2}.
|
||||
\label{cFit}
|
||||
\ee
|
||||
|
||||
The data presented here is for a single film prepared with an initial $R_0
|
||||
\approx$~6~k$\Omega$. Disorder was consequently increased in incremental
|
||||
stages up to 180~k$\Omega$ by annealing at approximately 280~K~\cite%
|
||||
{misra_2011}. Additional samples were grown at intermediate disorder and
|
||||
measured to check reproducibility.
|
||||
|
||||
Figure~\ref{fig:cond} shows the conductivity data for two samples with
|
||||
disorder $R_{0}=$~17573~$\Omega $ and 63903~$\Omega $ with corresponding
|
||||
fittings to the expression (\ref{sigmaWL}) where $A$ and $B$ are taken as
|
||||
fitting parameters and $\Delta =$~16~K is the spin wave gap. The fits are
|
||||
sensitive to the parameters $A$ and $B$ but relatively insensitive to $%
|
||||
\Delta $. We find that $\Delta =$~16~$\pm $~4~K provides good fittings in
|
||||
the whole range of disorder (from 6 to 180~k$\Omega $).
|
||||
|
||||
\begin{figure}[tbp]
|
||||
\begin{center}
|
||||
\includegraphics[width=9cm]{fig_1_16.eps}
|
||||
\end{center}
|
||||
\caption{The temperature-dependent normalized conductivity (open squares)
|
||||
for two samples with the indicated disorder strengths of $R_0 =$~17573~$%
|
||||
\Omega$ and 63903~$\Omega$ show good agreement with theory (solid lines).
|
||||
The fitting parameters $A$ and $B$ are indicated for each curve with the
|
||||
error in the least significant digit indicated in parentheses.}
|
||||
\label{fig:cond}
|
||||
\end{figure}
|
||||
|
||||
Figure~\ref{fig:parb} shows the dependence of the parameter $B$ on the
|
||||
disorder strength $R_0$ (open squares) and a theoretical fit (solid line)
|
||||
using Eq.~(\ref{BFit}), where $c$ and $a$ are fitting parameters. The solid
|
||||
line for this two-paramener fit is drawn for the best-fit values $c=0.67 \pm
|
||||
0.04$ and $a= 28 \pm 3$~k$\Omega$. We note that the fit is of reasonable
|
||||
quality over most of the disorder range except for the film with the least
|
||||
disorder ($R_0 = 6$~k$\Omega$) where $B = 0.77$,
|
||||
somewhat below the saturated value
|
||||
$B = c = 0.67$ evaluated from Eq.~(\ref{BFit}) at $R_0 = 0$. Using higher
|
||||
values of $c$ (e.g., $c=0.8$) and lower values of $a$ (eg., $a = 22$~k$\Omega$)
|
||||
improves the fit at low disorder strengths but
|
||||
increases the discrepancy at higher disorder strengths.
|
||||
|
||||
%L_phi/t = 2/pi*2/(1+sqrt(1+16))/0.5, 2/pi*2/(1+sqrt(1+1))/0.25
|
||||
|
||||
%http://hyperphysics.phy-astr.gsu.edu/hbase/tables/fermi.html , k_F = sqrt(2*m_e*(10.9 eV))/(hbar) = 1.7E10 1/m
|
||||
|
||||
% (bar(J) / \Delta) ^ 3/2 = (48*(2e-9)^2*(2.7e9)^2/5/pi/(0.65)^2) ^0.5 = 8360 = 20 ^ 3
|
||||
%A = \bar{J} / k_F , \bar{J} = nJ
|
||||
|
||||
Substituting the Fermi energy for bulk Mn~\cite{ashcroft_1976},
|
||||
a thickness $t=2$~nm known to 20\% accuracy, together with the best-fit
|
||||
value for $c$ into Eq.~(\ref{cFit}), we calculate the value $\bar{J} =$~320~$%
|
||||
\pm$~93~K. Gao et al.~\cite{gao_2008} performed inelastic scanning tunneling
|
||||
spectroscopy (ISTS) on thin Mn films and reported $\Delta$ in the range from
|
||||
30 to 60~K and $\bar{J}=vk_F=$~3150~$\pm$~200~K. The agreement of energy gaps is
|
||||
good; however our significantly lower value of $\bar{J}$ is probably due to the
|
||||
high disorder in our ultra thin films.
|
||||
|
||||
Since the temperature-dependent correction $B/\sqrt{\sinh (\Delta /T)}$ of
|
||||
Eq.~\ref{sigmaWL} is small compared to the parameter $A$, we can write
|
||||
$\sigma_{\square} \approx 1/R_0$ so that Eq.~\ref{sigmaWL} reduces to the
|
||||
expression $A \approx 1/L_{00}R_0$. The logarithmic plot derived by taking the
|
||||
logarithm of both sides of this approximation is shown in the inset of
|
||||
Fig.~\ref{fig:parb}. The slope of -1 confirms the linear dependence of $A$ on
|
||||
$1/R_0$ and the intercept of 5.01 (10$^{5.01}\approx $~102~k$\Omega$) is
|
||||
within 20\% of the expected theoretical value $L_{00}=$~81~k$\Omega $,
|
||||
for the normalization constant. Accordingly, the conductivity corrections in
|
||||
Eq.~\ref{sigmaWL} are small compared to the zero temperature conductivity and
|
||||
the normalization constant $L_{00}$ for the conductivity is close to the
|
||||
expected theoretical value.
|
||||
|
||||
Using Eq.~(\ref{WL}) and the obtained value for $a\approx $~28~k$\Omega $ we can
|
||||
compare the dephasing length ($L_{\varphi }$) with the thickness ($t\approx $%
|
||||
~2~nm) at 16~K. For the sample with $R_{0}=$~63903~$\Omega $ the ratio $%
|
||||
L_{\varphi }/t\approx $~0.5 and for the sample with $R_{0}=$~17573~$\Omega $
|
||||
$L_{\varphi }/t\approx $~2. The latter estimate assumes no spin
|
||||
polarization, while a full polarization would imply $L_{\varphi }/t\approx $%
|
||||
~1. Thus $L_{\varphi }$ is smaller than or close to the thickness of the
|
||||
film, which keeps the film in the three-dimensional regime for almost all
|
||||
temperatures and disorder strengths considered.
|
||||
|
||||
\begin{figure}[tbp]
|
||||
\begin{center}
|
||||
\includegraphics[width=9cm]{fig_2_16.eps}
|
||||
\end{center}
|
||||
\caption{Dependence of the fitting parameters $B$ and $A$ (inset) on
|
||||
disorder $R_0$ for $\Delta=$~16~K. The fitting parameters are indicated for
|
||||
each curve with the error in the least significant digit indicated in
|
||||
parentheses.}
|
||||
\label{fig:parb}
|
||||
\end{figure}
|
||||
|
||||
In conclusion, we have performed \textit{in situ} transport measurements on
|
||||
ultra thin Mn films, systematically varying the disorder ($R_{0}=R_{xx}$($T=$%
|
||||
~5~K)). The obtained data were analyzed within a weak localization theory in
|
||||
3d generalized to strong disorder. In the temperature range considered
|
||||
inelastic scattering off spin waves is found to be strong giving rise to a
|
||||
dephasing length shorter than the film thickness, which places these systems
|
||||
into the 3d regime. The obtained value for the spin wave gap was close to
|
||||
the one measured by Gao et al.~\cite{gao_2008} using ISTS, while the
|
||||
exchange energy was much smaller.
|
||||
|
||||
This work has been supported by the NSF under Grant No 1305783 (AFH).
|
||||
PW thanks A.\ M.\ \ Finkel'stein for useful discussions and acknowledges
|
||||
partial support through the DFG research unit "Quantum phase transitions".
|
||||
|
||||
\bibliographystyle{apsrev}
|
||||
\bibliography{bibl}
|
||||
|
||||
\end{document}
|
@@ -0,0 +1,3 @@
|
||||
Hello world
|
||||
|
||||
One two three
|
@@ -0,0 +1,5 @@
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six
|
@@ -0,0 +1,7 @@
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six
|
||||
|
||||
Seven eight nine
|
@@ -0,0 +1,404 @@
|
||||
% Choose pra, prb, prc, prd, pre, prl, prstab, or rmp for journal
|
||||
% Add 'draft' option to mark overfull boxes with black boxes
|
||||
% Add 'showpacs' option to make PACS codes appear
|
||||
% for review and submission
|
||||
%\documentclass[aps,preprint,showpacs,superscriptaddress,groupedaddress]{revtex4} % for double-spaced preprint
|
||||
% needed for figures
|
||||
% needed for some tables
|
||||
% for math
|
||||
% for math
|
||||
% for crossing out text
|
||||
% for coloring text
|
||||
%\input{tcilatex}
|
||||
|
||||
|
||||
\documentclass[aps,prl,twocolumn,showpacs,superscriptaddress,groupedaddress]{revtex4}
|
||||
|
||||
\usepackage{graphicx}
|
||||
\usepackage{dcolumn}
|
||||
\usepackage{bm}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{soul}
|
||||
\usepackage{color}
|
||||
|
||||
%TCIDATA{OutputFilter=LATEX.DLL}
|
||||
%TCIDATA{Version=5.50.0.2960}
|
||||
%TCIDATA{<META NAME="SaveForMode" CONTENT="1">}
|
||||
%TCIDATA{BibliographyScheme=BibTeX}
|
||||
%TCIDATA{LastRevised=Tuesday, May 20, 2014 03:06:00}
|
||||
%TCIDATA{<META NAME="GraphicsSave" CONTENT="32">}
|
||||
|
||||
\hyphenation{ALPGEN}
|
||||
\hyphenation{EVTGEN}
|
||||
\hyphenation{PYTHIA}
|
||||
\def\be{\begin{equation}}
|
||||
\def\ee{\end{equation}}
|
||||
\def\bea{\begin{eqnarray}}
|
||||
\def\eea{\end{eqnarray}}
|
||||
%\input{tcilatex}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\title{Transport measurements of the spin wave gap of Mn}
|
||||
\input author_list.tex
|
||||
\date{\today}
|
||||
|
||||
\begin{abstract}
|
||||
Temperature dependent transport measurements on ultrathin antiferromagnetic
|
||||
Mn films reveal a heretofore unknown non-universal weak localization
|
||||
correction to the conductivity which extends to disorder strengths greater than
|
||||
100~k$\Omega$ per square. The inelastic scattering of electrons off of
|
||||
gapped antiferromagnetic spin waves gives rise to an inelastic scattering
|
||||
length which is short enough to place the system in the 3D regime. The
|
||||
extracted fitting parameters provide estimates of the energy gap ($\Delta
|
||||
\approx$~16~K) and exchange energy ($\bar{J} \approx$~320~K). %\st{which are in
|
||||
%agreement with values obtained with other techniques}.
|
||||
\end{abstract}
|
||||
|
||||
\pacs{75}
|
||||
|
||||
\maketitle
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Thin-film transition metal ferromagnets (Fe, Co, Ni, Gd) and
|
||||
antiferromagnets (Mn, Cr) and their alloys are not only ubiquitous in
|
||||
present day technologies but are also expected to play an important role in
|
||||
future developments~\cite{thompson_2008}. Understanding magnetism in these
|
||||
materials, especially when the films are thin enough so that disorder plays
|
||||
an important role, is complicated by the long standing controversy about the
|
||||
relative importance of itinerant and local moments~\cite%
|
||||
{slater_1936,van_vleck_1953,aharoni_2000}. For the itinerant transition
|
||||
metal magnets, a related fundamental issue centers on the question of how
|
||||
itinerancy is compromised by disorder. Clearly with sufficient disorder the
|
||||
charge carriers become localized, but questions arise as to what happens to
|
||||
the spins and associated spin waves and whether the outcome depends on the
|
||||
ferro/antiferro alignment of spins in the itinerant parent. Ferromagnets
|
||||
which have magnetization as the order parameter are fundamentally different
|
||||
than antiferromagnets which have staggered magnetization (i.e., difference
|
||||
between the magnetization on each sublattice) as the order parameter~\cite%
|
||||
{blundell_2001}. Ferromagnetism thus distinguishes itself by having soft
|
||||
modes at zero wave number whereas antiferromagnets have soft modes at finite
|
||||
wave number~\cite{belitz_2005}. Accordingly, the respective spin wave
|
||||
spectrums are radically different. These distinctions are particularly
|
||||
important when comparing quantum corrections to the conductivity near
|
||||
quantum critical points for ferromagnets~\cite{paul_2005} and
|
||||
antiferromagnets~\cite{syzranov_2012}.
|
||||
|
||||
Surprisingly, although there have been systematic studies of the effect of
|
||||
disorder on the longitudinal $\sigma_{xx}$ and transverse $\sigma_{xy}$
|
||||
conductivity of ferromagnetic films~\cite%
|
||||
{bergmann_1978,bergmann_1991,mitra_2007,misra_2009,kurzweil_2009}, there
|
||||
have been few if any such studies on antiferromagnetic films. In this paper
|
||||
we remedy this situation by presenting transport data on systematically
|
||||
disordered Mn films that are sputter deposited in a custom designed vacuum
|
||||
chamber and then transferred without exposure to air into an adjacent
|
||||
cryostat for transport studies to low temperature. The experimental
|
||||
procedures are similar to those reported previously: disorder, characterized
|
||||
by the sheet resistance $R_0$ measured at $T=$~5~K, can be changed either by
|
||||
growing separate samples or by gentle annealing of a given sample through
|
||||
incremental stages of disorder~\cite{misra_2011}. Using these same procedures our results for
|
||||
antiferromagnets however are decidedly different. The data are well
|
||||
described over a large range of disorder strengths by a non-universal three
|
||||
dimensional (3d) quantum correction that applies only to spin wave gapped
|
||||
antiferromagnets. This finding implies the presence of strong inelastic
|
||||
electron scattering off of antiferromagnetic spin waves. The theory is
|
||||
validated not only by good fits to the data but also by extraction from the
|
||||
fitting parameters of a value for the spin wave gap $\Delta$ that is in
|
||||
agreement with the value expected for Mn. On the other hand, the
|
||||
exchange energy $\bar{J}$ could be sensitive to the high disorder in our
|
||||
ultra thin films, and it turns out to be much smaller compared to the known values.
|
||||
|
||||
In previous work the inelastic scattering of electrons off of spin waves has
|
||||
been an essential ingredient in understanding disordered ferromagnets. For
|
||||
example, to explain the occurrence of weak-localization corrections to the
|
||||
anomalous Hall effect in polycrystalline Fe films~\cite{mitra_2007}, it was
|
||||
necessary to invoke a contribution to the inelastic phase breaking rate $%
|
||||
\tau_{\varphi}^{-1}$ due to spin-conserving inelastic scattering off
|
||||
spin-wave excitations. This phase breaking rate, anticipated by theory~\cite%
|
||||
{tatara_2004} and seen experimentally in spin polarized electron energy loss
|
||||
spectroscopy (SPEELS) measurements of ultrathin Fe films~\cite%
|
||||
{plihal_1999,zhang_2010}, is linear in temperature and significantly larger
|
||||
than the phase breaking rate due to electron-electron interactions, thus
|
||||
allowing a wide temperature range to observe weak localization corrections~%
|
||||
\cite{mitra_2007}. The effect of a high $\tau_{\varphi}^{-1}$ due to
|
||||
inelastic scattering off spin-wave excitations is also seen in Gd films
|
||||
where in addition to a localizing log($T$) quantum correction to the
|
||||
conductance, a localizing linear-in-$T$ quantum correction is present and is
|
||||
interpreted as a spin-wave mediated Altshuler-Aronov type correction to the
|
||||
conductivity~\cite{misra_2009}.
|
||||
|
||||
Interestingly, this high rate of inelastic spin rate scattering becomes even
|
||||
more important for the thinnest films as shown in theoretical calculations
|
||||
on Fe and Ni which point to extremely short spin-dependent inelastic mean
|
||||
free paths~\cite{hong_2000} and in spin-polarized electron energy-loss
|
||||
spectroscopy (SPEELS) measurements on few monolayer-thick Fe/W(110) films in
|
||||
which a strong nonmonotonic enhancement of localized spin wave energies is
|
||||
found on the thinnest films~\cite{zhang_2010}.
|
||||
|
||||
Inelastic spin wave scattering in highly disordered ferromagnetic films can
|
||||
be strong enough to assure that the associated $T$-dependent dephasing
|
||||
length $L_{\varphi }(T)=\sqrt{D\tau _{\varphi }}$ (with $D$ the diffusion
|
||||
constant)~\cite{lee_1985} is less than the film thickness $t$, thus putting
|
||||
thin films into the 3d limit where a metal-insulator
|
||||
transition is observed~\cite{misra_2011}. Recognizing that similarly high
|
||||
inelastic scattering rates must apply to highly disordered antiferromagnetic
|
||||
films, we first proceed with a theoretical approach that takes into account
|
||||
the scattering of antiferromagnetic spin waves on the phase relaxation rate
|
||||
and find a heretofore unrecognized non-universal 3d weak localization
|
||||
correction to the conductivity that allows an interpretation of our experimental
|
||||
results.
|
||||
|
||||
We mention in passing that the 3d interaction-induced quantum correction
|
||||
found to be dominant in the case of ferromagnetic Gd
|
||||
films which undergo a metal-insulator transition\cite{misra_2011} is
|
||||
found to be much smaller in the present case and will not be considered further (for an estimate of this contribution see \cite{muttalib_unpub}.
|
||||
|
||||
As discussed in detail in Ref.~[\onlinecite{wm10}], the phase relaxation
|
||||
time $\tau _{\varphi }$ limits the phase coherence in a particle-particle
|
||||
diffusion propagator $C(q,\omega )$ (Cooperon) in the form
|
||||
\begin{equation}
|
||||
C(q,\omega _{l})=\frac{1}{2\pi N_{0}\tau ^{2}}\frac{1}{Dq^{2}+|\omega
|
||||
_{l}|+1/\tau _{\varphi }}.
|
||||
\end{equation}
|
||||
where $N_{0}$ is the density of states at the Fermi level, $\tau $ is the
|
||||
elastic scattering time and $\omega _{l}=2\pi lT$ is the Matsubara
|
||||
frequency. Labeling the Cooperon propagator in the absence of interactions
|
||||
as $C_{0}$, we can write
|
||||
\begin{equation}
|
||||
\frac{1}{\tau _{\varphi }}=\frac{1}{2\pi N_{0}\tau ^{2}}[C^{-1}-C_{0}^{-1}].
|
||||
\end{equation}
|
||||
|
||||
In general, $C(q,\omega )$ can be evaluated diagrammatically in the presence
|
||||
of interactions and disorder in a ladder approximation \cite{fa} that can be
|
||||
symbolically written as $C=C_{0}+C_{0}KC$ where the interaction vertex $K$
|
||||
contains self energy as well as vertex corrections due to both interactions
|
||||
and disorder. It then follows that $1/\tau _{\varphi }$ is given by
|
||||
\begin{equation}
|
||||
\frac{1}{\tau _{\varphi }}=-\frac{1}{2\pi N_{0}\tau ^{2}}K.
|
||||
\end{equation}%
|
||||
In Ref.~[\onlinecite{wm10}], the leading temperature and disorder dependence
|
||||
of the inelastic diffusion propagator was evaluated diagrammatically, in the
|
||||
presence of ferromagnetic spin-wave mediated electron-electron interactions.
|
||||
Here we consider the antiferromagnetic case. We only consider large
|
||||
spin-wave gap where the damping can be ignored. Using the antiferromagnetic
|
||||
dispersion relation $\omega _{q}=\Delta +Aq$, where $A$ is the spin
|
||||
stiffness, the inelastic lifetime is given by
|
||||
\be
|
||||
\frac{\hbar }{\tau _{\varphi }}=\frac{4}{\pi \hbar }nJ^{2}\int_{0}^{1/l}%
|
||||
\frac{q^{d-1}dq}{\sinh \beta \omega _{q}}\frac{Dq^{2}+1/\tau _{\varphi }}{%
|
||||
(Dq^{2}+1/\tau _{\varphi })^{2}+\omega _{q}^{2}}
|
||||
\ee%
|
||||
where $n=k_{F}^{3}/3\pi ^{2}$ is the 3d density, $J$ is the effective
|
||||
spin-exchange interaction and $\beta =1/k_{B}T$. Here we will consider the
|
||||
limit $\hbar /\tau _{\varphi }\ll \Delta $, relevant for our experiment on
|
||||
Mn. In this limit we can neglect the $1/\tau _{\varphi }$ terms inside the
|
||||
integral. The upper limit should be restricted to $\Delta /A$ in the limit $%
|
||||
\Delta /A<1/l$. For large disorder, we expect the parameter $x\equiv
|
||||
\hbar Dk_{F}^{2}\Delta / \bar{J}^{2}\ll 1$, where the spin-exchange energy
|
||||
is given by $\bar{J}=Ak_{F}$. In this limit, $L_{\varphi }$ can be
|
||||
simplified as
|
||||
\be
|
||||
k_{F}L_{\varphi }\approx \left( \frac{\bar{J}}{\Delta }\right) ^{3/2}\left(
|
||||
\frac{5\sinh \frac{\Delta }{T}}{12\pi }\right) ^{1/2},\;\;\;x\ll 1
|
||||
\label{L-phi-3d}
|
||||
\ee%
|
||||
which is independent of $x$, and therefore, independent of disorder.
|
||||
|
||||
Given the inelastic lifetime, the weak localization correction in 3d is
|
||||
usually given by \cite{lee_1985} $\delta \sigma _{3d}=\frac{e^{2}}{\hbar \pi
|
||||
^{3}}\frac{1}{L_{\varphi }},$ where the prefactor to the inverse inelastic
|
||||
length is a universal number, independent of disorder. However, at large
|
||||
enough disorder, we show that there exists a disorder dependent correction,
|
||||
due to the scale dependent diffusion coefficient near the Anderson
|
||||
metal-insulator transition. In fact, the diffusion coefficient obeys the
|
||||
self consistent equation \cite{WV}
|
||||
\begin{equation}
|
||||
\frac{D_{0}}{D(\omega )}=1+\frac{k_{F}^{2-d}}{\pi m}\int_{0}^{1/l}dQ\frac{%
|
||||
Q^{d-1}}{-i\omega +D(\omega )Q^{2}}
|
||||
\end{equation}%
|
||||
where $D_{0}=v_{F}l/d$ is the diffusion coefficient at weak disorder. While
|
||||
the significance of the prefactor to the integral is not clear, the above
|
||||
equation remains qualitatively accurate over a wide range near the Anderson
|
||||
transition. Setting $\omega =i/\tau _{\varphi }$ and doing the $Q$-integral
|
||||
in 3d,
|
||||
\bea
|
||||
\frac{D_{0}}{D} &\approx & 1+\frac{1}{\pi mk_{F}}\int_{1/L_{\phi }}^{1/l}dQ\frac{%
|
||||
Q^{2}}{DQ^{2}}\cr
|
||||
&=& 1+\frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}-\delta
|
||||
\left( \frac{D_{0}}{D}\right) ,
|
||||
\label{delta}
|
||||
\eea%
|
||||
where
|
||||
\bea
|
||||
\delta \equiv \frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{%
|
||||
L_{\varphi }}
|
||||
\eea
|
||||
is assumed to be a small correction, and Eq.~(\ref{delta})
|
||||
should not be solved self-consistently. This follows from the fact that the
|
||||
diffusion coefficient of electrons at fixed energy entering the Cooperon
|
||||
expression is that of non-interacting electrons, and is given by the limit $%
|
||||
T\rightarrow 0$, $L_{\varphi }\rightarrow \infty $ and therefore $\delta
|
||||
\rightarrow 0$. Then the correction at finite $T$ is given by
|
||||
\bea
|
||||
\frac{D}{D_{0}} &=& \frac{1}{\left( \frac{D_{0}}{D}\right) _{0}-\delta \left(
|
||||
\frac{D_{0}}{D}\right) }\cr
|
||||
&\approx & \left( \frac{D}{D_{0}}\right) _{0}+\left( \frac{D}{D_{0}}\right) _{0}
|
||||
\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{L_{\varphi }}%
|
||||
\eea%
|
||||
where
|
||||
\be
|
||||
\lim_{T\rightarrow 0}\frac{D}{D_{0}}\equiv \left( \frac{D}{D_{0}}\right)
|
||||
_{0}.
|
||||
\ee%
|
||||
Using the relation $\sigma _{3d}=(e^{2}/\hbar )nD$ where the longitudinal
|
||||
sheet conductance $\sigma _{\square }=\sigma _{3d}t$, with $t$ being the
|
||||
film thickness, we finally get the temperature dependent weak localization
|
||||
correction term
|
||||
\bea
|
||||
\frac{\delta \sigma _{\square }}{L_{00}} &=& \left( \frac{D}{D_{0}}\right) _{0}%
|
||||
\frac{2}{\pi }\frac{t}{L_{\varphi }}\cr
|
||||
\left( \frac{D}{D_{0}}\right)_{0} &\approx &\frac{2}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}}
|
||||
\label{WL}
|
||||
\eea%
|
||||
where $R_{0}=L_{00}/\sigma _{\square }(T$=$0)$, $L_{00}=e^{2}/\pi h$, $%
|
||||
a=3\pi/2k_{F}tb_{0}$, $b_{0}$ is a number of order unity and we
|
||||
have solved the self-consistent equation for $D$ in order to express $D_{0%
|
||||
\text{ }}$in terms of $D$ and finally $R_{0}$. Thus in this case, the weak
|
||||
localization correction has a prefactor which is not universal. While this
|
||||
reduces to the well-known universal result at weak disorder $R_{0}\ll a$, it
|
||||
becomes dependent on disorder characterized by the sheet resistance $R_{0}$
|
||||
at strong disorder and at the same time substantially extends the 3d regime
|
||||
near the transition.
|
||||
|
||||
Using the expression for $L_{\varphi }$ (Eq.~(\ref{L-phi-3d})) into Eq.~(\ref%
|
||||
{WL}), we finally obtain the total conductivity, including the quantum
|
||||
correction to the conductivity due to weak localization in 3d arising from
|
||||
scattering of electrons off antiferromagnetic spin waves in Mn,
|
||||
\begin{equation}
|
||||
\frac{\sigma _{\square }}{L_{00}}=A+\frac{B}{\sqrt{\sinh [\Delta /T]}},
|
||||
\label{sigmaWL}
|
||||
\end{equation}%
|
||||
\textbf{\textbf{}}where the parameter $A$ is temperature independent and the parameter
|
||||
\bea
|
||||
B &\equiv & \left( \frac{D}{D_{0}}\right) _{0}\frac{2}{\pi ^{2}}\left( \frac{%
|
||||
12\pi }{5}\right) ^{1/2}\left( \frac{\Delta }{\bar{J}}\right) ^{3/2}tk_{F}\cr%
|
||||
&=&\frac{2c}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}},
|
||||
\label{BFit}
|
||||
\eea%
|
||||
where
|
||||
\be
|
||||
c\equiv \left( \frac{\Delta }{\bar{J}}\right) ^{3/2}\left( \frac{%
|
||||
48t^{2}k_{F}^{2}}{5\pi}\right) ^{1/2}.
|
||||
\label{cFit}
|
||||
\ee
|
||||
|
||||
The data presented here is for a single film prepared with an initial $R_0
|
||||
\approx$~6~k$\Omega$. Disorder was consequently increased in incremental
|
||||
stages up to 180~k$\Omega$ by annealing at approximately 280~K~\cite%
|
||||
{misra_2011}. Additional samples were grown at intermediate disorder and
|
||||
measured to check reproducibility.
|
||||
|
||||
Figure~\ref{fig:cond} shows the conductivity data for two samples with
|
||||
disorder $R_{0}=$~17573~$\Omega $ and 63903~$\Omega $ with corresponding
|
||||
fittings to the expression (\ref{sigmaWL}) where $A$ and $B$ are taken as
|
||||
fitting parameters and $\Delta =$~16~K is the spin wave gap. The fits are
|
||||
sensitive to the parameters $A$ and $B$ but relatively insensitive to $%
|
||||
\Delta $. We find that $\Delta =$~16~$\pm $~4~K provides good fittings in
|
||||
the whole range of disorder (from 6 to 180~k$\Omega $).
|
||||
|
||||
\begin{figure}[tbp]
|
||||
\begin{center}
|
||||
\includegraphics[width=9cm]{fig_1_16.eps}
|
||||
\end{center}
|
||||
\caption{The temperature-dependent normalized conductivity (open squares)
|
||||
for two samples with the indicated disorder strengths of $R_0 =$~17573~$%
|
||||
\Omega$ and 63903~$\Omega$ show good agreement with theory (solid lines).
|
||||
The fitting parameters $A$ and $B$ are indicated for each curve with the
|
||||
error in the least significant digit indicated in parentheses.}
|
||||
\label{fig:cond}
|
||||
\end{figure}
|
||||
|
||||
Figure~\ref{fig:parb} shows the dependence of the parameter $B$ on the
|
||||
disorder strength $R_0$ (open squares) and a theoretical fit (solid line)
|
||||
using Eq.~(\ref{BFit}), where $c$ and $a$ are fitting parameters. The solid
|
||||
line for this two-paramener fit is drawn for the best-fit values $c=0.67 \pm
|
||||
0.04$ and $a= 28 \pm 3$~k$\Omega$. We note that the fit is of reasonable
|
||||
quality over most of the disorder range except for the film with the least
|
||||
disorder ($R_0 = 6$~k$\Omega$) where $B = 0.77$,
|
||||
somewhat below the saturated value
|
||||
$B = c = 0.67$ evaluated from Eq.~(\ref{BFit}) at $R_0 = 0$. Using higher
|
||||
values of $c$ (e.g., $c=0.8$) and lower values of $a$ (eg., $a = 22$~k$\Omega$)
|
||||
improves the fit at low disorder strengths but
|
||||
increases the discrepancy at higher disorder strengths.
|
||||
|
||||
%L_phi/t = 2/pi*2/(1+sqrt(1+16))/0.5, 2/pi*2/(1+sqrt(1+1))/0.25
|
||||
|
||||
%http://hyperphysics.phy-astr.gsu.edu/hbase/tables/fermi.html , k_F = sqrt(2*m_e*(10.9 eV))/(hbar) = 1.7E10 1/m
|
||||
|
||||
% (bar(J) / \Delta) ^ 3/2 = (48*(2e-9)^2*(2.7e9)^2/5/pi/(0.65)^2) ^0.5 = 8360 = 20 ^ 3
|
||||
%A = \bar{J} / k_F , \bar{J} = nJ
|
||||
|
||||
Substituting the Fermi energy for bulk Mn~\cite{ashcroft_1976},
|
||||
a thickness $t=2$~nm known to 20\% accuracy, together with the best-fit
|
||||
value for $c$ into Eq.~(\ref{cFit}), we calculate the value $\bar{J} =$~320~$%
|
||||
\pm$~93~K. Gao et al.~\cite{gao_2008} performed inelastic scanning tunneling
|
||||
spectroscopy (ISTS) on thin Mn films and reported $\Delta$ in the range from
|
||||
30 to 60~K and $\bar{J}=vk_F=$~3150~$\pm$~200~K. The agreement of energy gaps is
|
||||
good; however our significantly lower value of $\bar{J}$ is probably due to the
|
||||
high disorder in our ultra thin films.
|
||||
|
||||
Since the temperature-dependent correction $B/\sqrt{\sinh (\Delta /T)}$ of
|
||||
Eq.~\ref{sigmaWL} is small compared to the parameter $A$, we can write
|
||||
$\sigma_{\square} \approx 1/R_0$ so that Eq.~\ref{sigmaWL} reduces to the
|
||||
expression $A \approx 1/L_{00}R_0$. The logarithmic plot derived by taking the
|
||||
logarithm of both sides of this approximation is shown in the inset of
|
||||
Fig.~\ref{fig:parb}. The slope of -1 confirms the linear dependence of $A$ on
|
||||
$1/R_0$ and the intercept of 5.01 (10$^{5.01}\approx $~102~k$\Omega$) is
|
||||
within 20\% of the expected theoretical value $L_{00}=$~81~k$\Omega $,
|
||||
for the normalization constant. Accordingly, the conductivity corrections in
|
||||
Eq.~\ref{sigmaWL} are small compared to the zero temperature conductivity and
|
||||
the normalization constant $L_{00}$ for the conductivity is close to the
|
||||
expected theoretical value.
|
||||
|
||||
Using Eq.~(\ref{WL}) and the obtained value for $a\approx $~28~k$\Omega $ we can
|
||||
compare the dephasing length ($L_{\varphi }$) with the thickness ($t\approx $%
|
||||
~2~nm) at 16~K. For the sample with $R_{0}=$~63903~$\Omega $ the ratio $%
|
||||
L_{\varphi }/t\approx $~0.5 and for the sample with $R_{0}=$~17573~$\Omega $
|
||||
$L_{\varphi }/t\approx $~2. The latter estimate assumes no spin
|
||||
polarization, while a full polarization would imply $L_{\varphi }/t\approx $%
|
||||
~1. Thus $L_{\varphi }$ is smaller than or close to the thickness of the
|
||||
film, which keeps the film in the three-dimensional regime for almost all
|
||||
temperatures and disorder strengths considered.
|
||||
|
||||
\begin{figure}[tbp]
|
||||
\begin{center}
|
||||
\includegraphics[width=9cm]{fig_2_16.eps}
|
||||
\end{center}
|
||||
\caption{Dependence of the fitting parameters $B$ and $A$ (inset) on
|
||||
disorder $R_0$ for $\Delta=$~16~K. The fitting parameters are indicated for
|
||||
each curve with the error in the least significant digit indicated in
|
||||
parentheses.}
|
||||
\label{fig:parb}
|
||||
\end{figure}
|
||||
|
||||
In conclusion, we have performed \textit{in situ} transport measurements on
|
||||
ultra thin Mn films, systematically varying the disorder ($R_{0}=R_{xx}$($T=$%
|
||||
~5~K)). The obtained data were analyzed within a weak localization theory in
|
||||
3d generalized to strong disorder. In the temperature range considered
|
||||
inelastic scattering off spin waves is found to be strong giving rise to a
|
||||
dephasing length shorter than the film thickness, which places these systems
|
||||
into the 3d regime. The obtained value for the spin wave gap was close to
|
||||
the one measured by Gao et al.~\cite{gao_2008} using ISTS, while the
|
||||
exchange energy was much smaller.
|
||||
|
||||
This work has been supported by the NSF under Grant No 1305783 (AFH).
|
||||
PW thanks A.\ M.\ \ Finkel'stein for useful discussions and acknowledges
|
||||
partial support through the DFG research unit "Quantum phase transitions".
|
||||
|
||||
\bibliographystyle{apsrev}
|
||||
\bibliography{bibl}
|
||||
|
||||
\end{document}
|
@@ -0,0 +1,74 @@
|
||||
{
|
||||
"chunk": {
|
||||
"history": {
|
||||
"snapshot": {
|
||||
"files": {
|
||||
"bar.tex": {
|
||||
"hash": "4f785a4c192155b240e3042b3a7388b47603f423",
|
||||
"stringLength": 26
|
||||
},
|
||||
"main.tex": {
|
||||
"hash": "f28571f561d198b87c24cc6a98b78e87b665e22d",
|
||||
"stringLength": 20638,
|
||||
"metadata": {
|
||||
"main": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"changes": [
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "main.tex",
|
||||
"textOperation": [
|
||||
1912,
|
||||
"Hello world",
|
||||
18726
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:23:35.633Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
},
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "bar.tex",
|
||||
"newPathname": "foo.tex"
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:27:26.874Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
},
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "foo.tex",
|
||||
"textOperation": [
|
||||
26,
|
||||
"\n\nFour five six"
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:28:33.724Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"startVersion": 0
|
||||
},
|
||||
"authors": [
|
||||
{
|
||||
"id": 31,
|
||||
"email": "james.allen@overleaf.com",
|
||||
"name": "James"
|
||||
}
|
||||
]
|
||||
}
|
@@ -0,0 +1,74 @@
|
||||
{
|
||||
"chunk": {
|
||||
"history": {
|
||||
"snapshot": {
|
||||
"files": {
|
||||
"main.tex": {
|
||||
"hash": "35c9bd86574d61dcadbce2fdd3d4a0684272c6ea",
|
||||
"stringLength": 20649,
|
||||
"metadata": {
|
||||
"main": true
|
||||
}
|
||||
},
|
||||
"foo.tex": {
|
||||
"hash": "c6654ea913979e13e22022653d284444f284a172",
|
||||
"stringLength": 41
|
||||
}
|
||||
}
|
||||
},
|
||||
"changes": [
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "foo.tex",
|
||||
"textOperation": [
|
||||
41,
|
||||
"\n\nSeven eight nince"
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:29:17.786Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
},
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "foo.tex",
|
||||
"textOperation": [
|
||||
58,
|
||||
-1,
|
||||
1
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:29:22.905Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
},
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "foo.tex",
|
||||
"newPathname": "bar.tex"
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:29:26.120Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"startVersion": 3
|
||||
},
|
||||
"authors": [
|
||||
{
|
||||
"id": 31,
|
||||
"email": "james.allen@overleaf.com",
|
||||
"name": "James"
|
||||
}
|
||||
]
|
||||
}
|
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"chunk": {
|
||||
"history": {
|
||||
"snapshot": {
|
||||
"files": {
|
||||
"main.tex": {
|
||||
"hash": "35c9bd86574d61dcadbce2fdd3d4a0684272c6ea",
|
||||
"stringLength": 20649,
|
||||
"metadata": {
|
||||
"main": true
|
||||
}
|
||||
},
|
||||
"bar.tex": {
|
||||
"hash": "e13c315d53aaef3aa34550a86b09cff091ace220",
|
||||
"stringLength": 59
|
||||
}
|
||||
}
|
||||
},
|
||||
"changes": [
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "main.tex",
|
||||
"textOperation": [
|
||||
1923,
|
||||
" also updated",
|
||||
18726
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:32:47.277Z",
|
||||
"authors": [
|
||||
31
|
||||
]
|
||||
},
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"pathname": "bar.tex",
|
||||
"textOperation": [
|
||||
28,
|
||||
-15,
|
||||
16
|
||||
]
|
||||
}
|
||||
],
|
||||
"timestamp": "2017-12-04T10:32:52.877Z",
|
||||
"v2Authors": [
|
||||
"5a5637efdac84e81b71014c4"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"startVersion": 6
|
||||
},
|
||||
"authors": [
|
||||
{
|
||||
"id": 31,
|
||||
"email": "james.allen@overleaf.com",
|
||||
"name": "James"
|
||||
}
|
||||
]
|
||||
}
|
@@ -0,0 +1,83 @@
|
||||
import { expect } from 'chai'
|
||||
import nock from 'nock'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('Deleting project', function () {
|
||||
beforeEach(function (done) {
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.historyId = new ObjectId().toString()
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: this.historyId } },
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/latest/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
MockHistoryStore().delete(`/api/projects/${this.historyId}`).reply(204)
|
||||
ProjectHistoryApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe('when the project has no pending updates', function (done) {
|
||||
it('successfully deletes the project', function (done) {
|
||||
ProjectHistoryClient.deleteProject(this.projectId, done)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the project has pending updates', function (done) {
|
||||
beforeEach(function (done) {
|
||||
ProjectHistoryClient.pushRawUpdate(
|
||||
this.projectId,
|
||||
{
|
||||
pathname: '/main.tex',
|
||||
docLines: 'hello',
|
||||
doc: this.docId,
|
||||
meta: { userId: this.userId, ts: new Date() },
|
||||
},
|
||||
err => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
ProjectHistoryClient.setFirstOpTimestamp(
|
||||
this.projectId,
|
||||
Date.now(),
|
||||
err => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
ProjectHistoryClient.deleteProject(this.projectId, done)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('clears pending updates', function (done) {
|
||||
ProjectHistoryClient.getDump(this.projectId, (err, dump) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(dump.updates).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('clears the first op timestamp', function (done) {
|
||||
ProjectHistoryClient.getFirstOpTimestamp(this.projectId, (err, ts) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(ts).to.be.null
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
415
services/project-history/test/acceptance/js/DiffTests.js
Normal file
415
services/project-history/test/acceptance/js/DiffTests.js
Normal file
@@ -0,0 +1,415 @@
|
||||
import { expect } from 'chai'
|
||||
import request from 'request'
|
||||
import crypto from 'node:crypto'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
function createMockBlob(historyId, content) {
|
||||
const sha = crypto.createHash('sha1').update(content).digest('hex')
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/blobs/${sha}`)
|
||||
.reply(200, content)
|
||||
.persist()
|
||||
return sha
|
||||
}
|
||||
|
||||
describe('Diffs', function () {
|
||||
beforeEach(function (done) {
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
this.historyId = new ObjectId().toString()
|
||||
this.projectId = new ObjectId().toString()
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: this.historyId } },
|
||||
})
|
||||
|
||||
ProjectHistoryClient.initializeProject(this.historyId, error => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it('should return a diff of the updates to a doc from a single chunk', function (done) {
|
||||
this.blob = 'one two three five'
|
||||
this.sha = createMockBlob(this.historyId, this.blob)
|
||||
this.v2AuthorId = '123456789'
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: this.sha,
|
||||
stringLength: this.blob.length,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [13, ' four', 5],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [4, -4, 15],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [19, ' six'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:26.120Z',
|
||||
v2Authors: [this.v2AuthorId],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [31],
|
||||
})
|
||||
|
||||
ProjectHistoryClient.getDiff(
|
||||
this.projectId,
|
||||
'foo.tex',
|
||||
3,
|
||||
6,
|
||||
(error, diff) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
u: 'one ',
|
||||
},
|
||||
{
|
||||
d: 'two ',
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383362905,
|
||||
end_ts: 1512383362905,
|
||||
},
|
||||
},
|
||||
{
|
||||
u: 'three',
|
||||
},
|
||||
{
|
||||
i: ' four',
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383357786,
|
||||
end_ts: 1512383357786,
|
||||
},
|
||||
},
|
||||
{
|
||||
u: ' five',
|
||||
},
|
||||
{
|
||||
i: ' six',
|
||||
meta: {
|
||||
users: [this.v2AuthorId],
|
||||
start_ts: 1512383366120,
|
||||
end_ts: 1512383366120,
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a diff of the updates to a doc across multiple chunks', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: createMockBlob(this.historyId, 'one two three five'),
|
||||
stringLength: 'one three four five'.length,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [13, ' four', 5],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [4, -4, 15],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: createMockBlob(this.historyId, 'one three four five'),
|
||||
stringLength: 'one three four five'.length,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [19, ' six'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:26.120Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [23, ' seven'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:26.120Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 5,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
ProjectHistoryClient.getDiff(
|
||||
this.projectId,
|
||||
'foo.tex',
|
||||
4,
|
||||
6,
|
||||
(error, diff) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
u: 'one ',
|
||||
},
|
||||
{
|
||||
d: 'two ',
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383362905,
|
||||
end_ts: 1512383362905,
|
||||
},
|
||||
},
|
||||
{
|
||||
u: 'three four five',
|
||||
},
|
||||
{
|
||||
i: ' six',
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383366120,
|
||||
end_ts: 1512383366120,
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 404 when there are no changes for the file in the range', function (done) {
|
||||
this.blob = 'one two three five'
|
||||
this.sha = createMockBlob(this.historyId, this.blob)
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: this.sha,
|
||||
stringLength: this.blob.length,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [13, ' four', 5],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [31],
|
||||
})
|
||||
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${this.projectId}/diff`,
|
||||
qs: {
|
||||
pathname: 'not_here.tex',
|
||||
from: 3,
|
||||
to: 6,
|
||||
},
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a binary flag with a diff of a binary file', function (done) {
|
||||
this.blob = 'one two three five'
|
||||
this.sha = createMockBlob(this.historyId, this.blob)
|
||||
this.binaryBlob = Buffer.from([1, 2, 3, 4])
|
||||
this.binarySha = createMockBlob(this.historyId, this.binaryBlob)
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'binary.tex': {
|
||||
hash: this.binarySha,
|
||||
byteLength: this.binaryBlob.length, // Indicates binary
|
||||
},
|
||||
'foo.tex': {
|
||||
hash: this.sha,
|
||||
stringLength: this.blob.length, // Indicates binary
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [13, ' four', 5],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [4, -4, 15],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: [19, ' six'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:26.120Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
ProjectHistoryClient.getDiff(
|
||||
this.projectId,
|
||||
'binary.tex',
|
||||
3,
|
||||
6,
|
||||
(error, diff) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: {
|
||||
binary: true,
|
||||
},
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
@@ -0,0 +1,73 @@
|
||||
/* eslint-disable
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import async from 'async'
|
||||
import sinon from 'sinon'
|
||||
import { expect } from 'chai'
|
||||
import Settings from '@overleaf/settings'
|
||||
import assert from 'node:assert'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
describe('DiscardingUpdates', function () {
|
||||
beforeEach(function (done) {
|
||||
this.timestamp = new Date()
|
||||
|
||||
return ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.user_id = new ObjectId().toString()
|
||||
this.project_id = new ObjectId().toString()
|
||||
this.doc_id = new ObjectId().toString()
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: 0,
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${this.project_id}/details`)
|
||||
.reply(200, { name: 'Test Project' })
|
||||
return ProjectHistoryClient.initializeProject(this.project_id, done)
|
||||
})
|
||||
})
|
||||
|
||||
return it('should discard updates', function (done) {
|
||||
return async.series(
|
||||
[
|
||||
cb => {
|
||||
const update = {
|
||||
pathname: '/main.tex',
|
||||
docLines: 'a\nb',
|
||||
doc: this.doc_id,
|
||||
meta: { user_id: this.user_id, ts: new Date() },
|
||||
}
|
||||
return ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb)
|
||||
},
|
||||
cb => {
|
||||
return ProjectHistoryClient.flushProject(this.project_id, cb)
|
||||
},
|
||||
],
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
880
services/project-history/test/acceptance/js/FileTreeDiffTests.js
Normal file
880
services/project-history/test/acceptance/js/FileTreeDiffTests.js
Normal file
@@ -0,0 +1,880 @@
|
||||
/* eslint-disable
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import sinon from 'sinon'
|
||||
import { expect } from 'chai'
|
||||
import Settings from '@overleaf/settings'
|
||||
import request from 'request'
|
||||
import assert from 'node:assert'
|
||||
import Path from 'node:path'
|
||||
import crypto from 'node:crypto'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
import * as HistoryId from './helpers/HistoryId.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockFileStore = () => nock('http://127.0.0.1:3009')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const sha = data => crypto.createHash('sha1').update(data).digest('hex')
|
||||
|
||||
describe('FileTree Diffs', function () {
|
||||
beforeEach(function (done) {
|
||||
return ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
||||
this.historyId = new ObjectId().toString()
|
||||
this.projectId = new ObjectId().toString()
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: this.historyId } },
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.initializeProject(
|
||||
this.historyId,
|
||||
(error, olProject) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return nock.cleanAll()
|
||||
})
|
||||
|
||||
it('should return a diff of the updates to a doc from a single chunk', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/7/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: sha('mock-sha-foo'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'renamed.tex': {
|
||||
hash: sha('mock-sha-renamed'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'deleted.tex': {
|
||||
hash: sha('mock-sha-deleted'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'renamed.tex',
|
||||
newPathname: 'newName.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: ['lorem ipsum'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'deleted.tex',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
file: {
|
||||
hash: sha('new-sha'),
|
||||
stringLength: 42,
|
||||
},
|
||||
pathname: 'added.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
7,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
operation: 'edited',
|
||||
},
|
||||
{
|
||||
pathname: 'deleted.tex',
|
||||
operation: 'removed',
|
||||
deletedAtV: 5,
|
||||
editable: true,
|
||||
},
|
||||
{
|
||||
newPathname: 'newName.tex',
|
||||
pathname: 'renamed.tex',
|
||||
operation: 'renamed',
|
||||
editable: true,
|
||||
},
|
||||
{
|
||||
pathname: 'added.tex',
|
||||
operation: 'added',
|
||||
editable: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a diff of the updates to a doc across multiple chunks', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
// Updated in this chunk
|
||||
hash: sha('mock-sha-foo'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'bar.tex': {
|
||||
// Updated in the next chunk
|
||||
hash: sha('mock-sha-bar'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'baz.tex': {
|
||||
// Not updated
|
||||
hash: sha('mock-sha-bar'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'renamed.tex': {
|
||||
hash: sha('mock-sha-renamed'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'deleted.tex': {
|
||||
hash: sha('mock-sha-deleted'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'renamed.tex',
|
||||
newPathname: 'newName.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
textOperation: ['lorem ipsum'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:19.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'deleted.tex',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 2,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/7/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: sha('mock-sha-foo'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'baz.tex': {
|
||||
hash: sha('mock-sha-bar'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'newName.tex': {
|
||||
hash: sha('mock-sha-renamed'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
file: {
|
||||
hash: sha('new-sha'),
|
||||
stringLength: 42,
|
||||
},
|
||||
pathname: 'added.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'bar.tex',
|
||||
textOperation: ['lorem ipsum'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:23.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 5,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
2,
|
||||
7,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
operation: 'edited',
|
||||
},
|
||||
{
|
||||
pathname: 'bar.tex',
|
||||
operation: 'edited',
|
||||
},
|
||||
{
|
||||
pathname: 'baz.tex',
|
||||
editable: true,
|
||||
},
|
||||
{
|
||||
pathname: 'deleted.tex',
|
||||
operation: 'removed',
|
||||
deletedAtV: 4,
|
||||
editable: true,
|
||||
},
|
||||
{
|
||||
newPathname: 'newName.tex',
|
||||
pathname: 'renamed.tex',
|
||||
operation: 'renamed',
|
||||
editable: true,
|
||||
},
|
||||
{
|
||||
pathname: 'added.tex',
|
||||
operation: 'added',
|
||||
editable: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a diff that includes multiple renames', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'one.tex': {
|
||||
hash: sha('mock-sha'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
newPathname: 'two.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'two.tex',
|
||||
newPathname: 'three.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
newPathname: 'three.tex',
|
||||
pathname: 'one.tex',
|
||||
operation: 'renamed',
|
||||
editable: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle deleting then re-adding a file', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'one.tex': {
|
||||
hash: sha('mock-sha'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
file: {
|
||||
hash: sha('mock-sha'),
|
||||
},
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
operation: 'added',
|
||||
editable: null,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle deleting the renaming a file to the same place', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'one.tex': {
|
||||
hash: sha('mock-sha-one'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'two.tex': {
|
||||
hash: sha('mock-sha-two'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'two.tex',
|
||||
newPathname: 'one.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'two.tex',
|
||||
newPathname: 'one.tex',
|
||||
operation: 'renamed',
|
||||
editable: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle adding then renaming a file', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
file: {
|
||||
hash: sha('mock-sha'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'one.tex',
|
||||
newPathname: 'two.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:22.905Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'two.tex',
|
||||
operation: 'added',
|
||||
editable: true,
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 422 with a chunk with an invalid rename', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: sha('mock-sha-foo'),
|
||||
stringLength: 42,
|
||||
},
|
||||
'bar.tex': {
|
||||
hash: sha('mock-sha-bar'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
newPathname: 'bar.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 5,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
5,
|
||||
6,
|
||||
(error, diff, statusCode) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(statusCode).to.equal(422)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 200 with a chunk with an invalid add', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
'foo.tex': {
|
||||
hash: sha('mock-sha-foo'),
|
||||
stringLength: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
file: {
|
||||
hash: sha('new-sha'),
|
||||
},
|
||||
pathname: 'foo.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 5,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
5,
|
||||
6,
|
||||
(error, diff, statusCode) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
pathname: 'foo.tex',
|
||||
operation: 'added',
|
||||
editable: null,
|
||||
},
|
||||
],
|
||||
})
|
||||
expect(statusCode).to.equal(200)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle edits of missing/invalid files ', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'new.tex',
|
||||
textOperation: ['lorem ipsum'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:18.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: '',
|
||||
textOperation: ['lorem ipsum'],
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [
|
||||
{
|
||||
operation: 'edited',
|
||||
pathname: 'new.tex',
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle deletions of missing/invalid files ', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'missing.tex',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: '',
|
||||
newPathname: '',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should handle renames of missing/invalid files ', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {},
|
||||
},
|
||||
changes: [
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: 'missing.tex',
|
||||
newPathname: 'missing-renamed.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
{
|
||||
operations: [
|
||||
{
|
||||
pathname: '',
|
||||
newPathname: 'missing-renamed-other.tex',
|
||||
},
|
||||
],
|
||||
timestamp: '2017-12-04T10:29:17.786Z',
|
||||
authors: [31],
|
||||
},
|
||||
],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }],
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.getFileTreeDiff(
|
||||
this.projectId,
|
||||
3,
|
||||
5,
|
||||
(error, diff) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(diff).to.deep.equal({
|
||||
diff: [],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
242
services/project-history/test/acceptance/js/FlushManagerTests.js
Normal file
242
services/project-history/test/acceptance/js/FlushManagerTests.js
Normal file
@@ -0,0 +1,242 @@
|
||||
import async from 'async'
|
||||
import nock from 'nock'
|
||||
import { expect } from 'chai'
|
||||
import request from 'request'
|
||||
import assert from 'node:assert'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
describe('Flushing old queues', function () {
|
||||
const historyId = new ObjectId().toString()
|
||||
|
||||
beforeEach(function (done) {
|
||||
this.timestamp = new Date()
|
||||
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.docId = new ObjectId().toString()
|
||||
this.fileId = new ObjectId().toString()
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: historyId,
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: {
|
||||
history: {
|
||||
id: historyId,
|
||||
},
|
||||
},
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/latest/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
startVersion: 0,
|
||||
history: {
|
||||
changes: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
ProjectHistoryClient.initializeProject(historyId, done)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe('retrying an unflushed project', function () {
|
||||
describe('when the update is older than the cutoff', function () {
|
||||
beforeEach(function (done) {
|
||||
this.flushCall = MockHistoryStore()
|
||||
.put(
|
||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
||||
)
|
||||
.reply(201)
|
||||
.post(`/api/projects/${historyId}/legacy_changes?end_version=0`)
|
||||
.reply(200)
|
||||
const update = {
|
||||
pathname: '/main.tex',
|
||||
docLines: 'a\nb',
|
||||
doc: this.docId,
|
||||
meta: { user_id: this.user_id, ts: new Date() },
|
||||
}
|
||||
async.series(
|
||||
[
|
||||
cb =>
|
||||
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
|
||||
cb =>
|
||||
ProjectHistoryClient.setFirstOpTimestamp(
|
||||
this.projectId,
|
||||
Date.now() - 24 * 3600 * 1000,
|
||||
cb
|
||||
),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('flushes the project history queue', function (done) {
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800',
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
assert(
|
||||
this.flushCall.isDone(),
|
||||
'made calls to history service to store updates'
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('flushes the project history queue in the background when requested', function (done) {
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/flush/old?maxAge=10800&background=1',
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.equal('{"message":"running flush in background"}')
|
||||
assert(
|
||||
!this.flushCall.isDone(),
|
||||
'did not make calls to history service to store updates in the foreground'
|
||||
)
|
||||
setTimeout(() => {
|
||||
assert(
|
||||
this.flushCall.isDone(),
|
||||
'made calls to history service to store updates in the background'
|
||||
)
|
||||
done()
|
||||
}, 100)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the update is newer than the cutoff', function () {
|
||||
beforeEach(function (done) {
|
||||
this.flushCall = MockHistoryStore()
|
||||
.put(
|
||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
||||
)
|
||||
.reply(201)
|
||||
.post(`/api/projects/${historyId}/legacy_changes?end_version=0`)
|
||||
.reply(200)
|
||||
const update = {
|
||||
pathname: '/main.tex',
|
||||
docLines: 'a\nb',
|
||||
doc: this.docId,
|
||||
meta: { user_id: this.user_id, ts: new Date() },
|
||||
}
|
||||
async.series(
|
||||
[
|
||||
cb =>
|
||||
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
|
||||
cb =>
|
||||
ProjectHistoryClient.setFirstOpTimestamp(
|
||||
this.projectId,
|
||||
Date.now() - 60 * 1000,
|
||||
cb
|
||||
),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('does not flush the project history queue', function (done) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
assert(
|
||||
!this.flushCall.isDone(),
|
||||
'did not make calls to history service to store updates'
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the update does not have a timestamp', function () {
|
||||
beforeEach(function (done) {
|
||||
this.flushCall = MockHistoryStore()
|
||||
.put(
|
||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
||||
)
|
||||
.reply(201)
|
||||
.post(`/api/projects/${historyId}/legacy_changes?end_version=0`)
|
||||
.reply(200)
|
||||
const update = {
|
||||
pathname: '/main.tex',
|
||||
docLines: 'a\nb',
|
||||
doc: this.docId,
|
||||
meta: { user_id: this.user_id, ts: new Date() },
|
||||
}
|
||||
this.startDate = Date.now()
|
||||
async.series(
|
||||
[
|
||||
cb =>
|
||||
ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb),
|
||||
cb =>
|
||||
ProjectHistoryClient.clearFirstOpTimestamp(this.projectId, cb),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('flushes the project history queue anyway', function (done) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
assert(
|
||||
this.flushCall.isDone(),
|
||||
'made calls to history service to store updates'
|
||||
)
|
||||
ProjectHistoryClient.getFirstOpTimestamp(
|
||||
this.projectId,
|
||||
(err, result) => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
expect(result).to.be.null
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@@ -0,0 +1,158 @@
|
||||
import { expect } from 'chai'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import Core from 'overleaf-editor-core'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
import latestChunk from '../fixtures/chunks/7-8.json' with { type: 'json' }
|
||||
import previousChunk from '../fixtures/chunks/4-6.json' with { type: 'json' }
|
||||
import firstChunk from '../fixtures/chunks/0-3.json' with { type: 'json' }
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('GetChangesInChunkSince', function () {
|
||||
let projectId, historyId
|
||||
beforeEach(function (done) {
|
||||
projectId = new ObjectId().toString()
|
||||
historyId = new ObjectId().toString()
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) throw error
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: historyId,
|
||||
})
|
||||
|
||||
ProjectHistoryClient.initializeProject(historyId, (error, olProject) => {
|
||||
if (error) throw error
|
||||
MockWeb()
|
||||
.get(`/project/${projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: olProject.id } },
|
||||
})
|
||||
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/latest/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/7/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/6/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/5/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/4/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/3/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/2/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/1/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/versions/0/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
function expectChangesSince(version, n, changes, done) {
|
||||
ProjectHistoryClient.getChangesInChunkSince(
|
||||
projectId,
|
||||
version,
|
||||
{},
|
||||
(error, got) => {
|
||||
if (error) throw error
|
||||
expect(got.latestStartVersion).to.equal(6)
|
||||
expect(got.changes).to.have.length(n)
|
||||
expect(got.changes.map(c => Core.Change.fromRaw(c))).to.deep.equal(
|
||||
changes.map(c => Core.Change.fromRaw(c))
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const cases = {
|
||||
8: {
|
||||
name: 'when up-to-date, return zero changes',
|
||||
n: 0,
|
||||
changes: [],
|
||||
},
|
||||
7: {
|
||||
name: 'when one version behind, return one change',
|
||||
n: 1,
|
||||
changes: latestChunk.chunk.history.changes.slice(1),
|
||||
},
|
||||
6: {
|
||||
name: 'when at current chunk boundary, return latest chunk in full',
|
||||
n: 2,
|
||||
changes: latestChunk.chunk.history.changes,
|
||||
},
|
||||
5: {
|
||||
name: 'when one version behind last chunk, return one change',
|
||||
n: 1,
|
||||
changes: previousChunk.chunk.history.changes.slice(2),
|
||||
},
|
||||
4: {
|
||||
name: 'when in last chunk, return two changes',
|
||||
n: 2,
|
||||
changes: previousChunk.chunk.history.changes.slice(1),
|
||||
},
|
||||
3: {
|
||||
name: 'when at previous chunk boundary, return just the previous chunk',
|
||||
n: 3,
|
||||
changes: previousChunk.chunk.history.changes,
|
||||
},
|
||||
2: {
|
||||
name: 'when at end of first chunk, return one change',
|
||||
n: 1,
|
||||
changes: firstChunk.chunk.history.changes.slice(2),
|
||||
},
|
||||
1: {
|
||||
name: 'when in first chunk, return two changes',
|
||||
n: 2,
|
||||
changes: firstChunk.chunk.history.changes.slice(1),
|
||||
},
|
||||
0: {
|
||||
name: 'when from zero, return just the first chunk',
|
||||
n: 3,
|
||||
changes: firstChunk.chunk.history.changes,
|
||||
},
|
||||
}
|
||||
|
||||
for (const [since, { name, n, changes }] of Object.entries(cases)) {
|
||||
it(name, function (done) {
|
||||
expectChangesSince(since, n, changes, done)
|
||||
})
|
||||
}
|
||||
|
||||
it('should return an error when past the end version', function (done) {
|
||||
ProjectHistoryClient.getChangesInChunkSince(
|
||||
projectId,
|
||||
9,
|
||||
{ allowErrors: true },
|
||||
(error, _body, statusCode) => {
|
||||
if (error) throw error
|
||||
expect(statusCode).to.equal(400)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
@@ -0,0 +1,76 @@
|
||||
/* eslint-disable
|
||||
no-undef,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import { expect } from 'chai'
|
||||
import settings from '@overleaf/settings'
|
||||
import request from 'request'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
describe('Health Check', function () {
|
||||
beforeEach(function (done) {
|
||||
const projectId = new ObjectId()
|
||||
const historyId = new ObjectId().toString()
|
||||
settings.history.healthCheck = { project_id: projectId }
|
||||
return ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: historyId,
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/latest/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
startVersion: 0,
|
||||
history: {
|
||||
snapshot: {},
|
||||
changes: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: {
|
||||
history: {
|
||||
id: historyId,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.initializeProject(historyId, done)
|
||||
})
|
||||
})
|
||||
|
||||
return it('should respond to the health check', function (done) {
|
||||
return request.get(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/health_check',
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
282
services/project-history/test/acceptance/js/LabelsTests.js
Normal file
282
services/project-history/test/acceptance/js/LabelsTests.js
Normal file
@@ -0,0 +1,282 @@
|
||||
import { expect } from 'chai'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('Labels', function () {
|
||||
beforeEach(function (done) {
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
||||
this.historyId = new ObjectId().toString()
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
|
||||
ProjectHistoryClient.initializeProject(
|
||||
this.historyId,
|
||||
(error, olProject) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.project_id = new ObjectId().toString()
|
||||
MockWeb()
|
||||
.get(`/project/${this.project_id}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: olProject.id } },
|
||||
})
|
||||
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/latest/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/7/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
.persist()
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/8/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
.persist()
|
||||
|
||||
this.comment = 'a saved version comment'
|
||||
this.comment2 = 'another saved version comment'
|
||||
this.user_id = new ObjectId().toString()
|
||||
this.created_at = new Date(1)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it('can create and get labels', function (done) {
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
7,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(labels).to.deep.equal([label])
|
||||
done()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('can create and get labels with no user id', function (done) {
|
||||
const userId = undefined
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
userId,
|
||||
7,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(labels).to.deep.equal([label])
|
||||
done()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('can delete labels', function (done) {
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
7,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.deleteLabel(this.project_id, label.id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(labels).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('can delete labels for the current user', function (done) {
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
7,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.deleteLabelForUser(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
label.id,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getLabels(this.project_id, (error, labels) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(labels).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('can transfer ownership of labels', function (done) {
|
||||
const fromUser = new ObjectId().toString()
|
||||
const toUser = new ObjectId().toString()
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
fromUser,
|
||||
7,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
fromUser,
|
||||
7,
|
||||
this.comment2,
|
||||
this.created_at,
|
||||
(error, label2) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.transferLabelOwnership(
|
||||
fromUser,
|
||||
toUser,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getLabels(
|
||||
this.project_id,
|
||||
(error, labels) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(labels).to.deep.equal([
|
||||
{
|
||||
id: label.id,
|
||||
comment: label.comment,
|
||||
version: label.version,
|
||||
created_at: label.created_at,
|
||||
user_id: toUser,
|
||||
},
|
||||
{
|
||||
id: label2.id,
|
||||
comment: label2.comment,
|
||||
version: label2.version,
|
||||
created_at: label2.created_at,
|
||||
user_id: toUser,
|
||||
},
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return labels with summarized updates', function (done) {
|
||||
ProjectHistoryClient.createLabel(
|
||||
this.project_id,
|
||||
this.user_id,
|
||||
8,
|
||||
this.comment,
|
||||
this.created_at,
|
||||
(error, label) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.project_id,
|
||||
{ min_count: 1 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates).to.deep.equal({
|
||||
nextBeforeTimestamp: 6,
|
||||
updates: [
|
||||
{
|
||||
fromV: 6,
|
||||
toV: 8,
|
||||
meta: {
|
||||
users: ['5a5637efdac84e81b71014c4', 31],
|
||||
start_ts: 1512383567277,
|
||||
end_ts: 1512383572877,
|
||||
},
|
||||
pathnames: ['bar.tex', 'main.tex'],
|
||||
project_ops: [],
|
||||
labels: [
|
||||
{
|
||||
id: label.id.toString(),
|
||||
comment: this.comment,
|
||||
version: 8,
|
||||
user_id: this.user_id,
|
||||
created_at: this.created_at.toISOString(),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
@@ -0,0 +1,78 @@
|
||||
import { expect } from 'chai'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('LatestSnapshot', function () {
|
||||
beforeEach(function (done) {
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
this.historyId = new ObjectId().toString()
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
|
||||
ProjectHistoryClient.initializeProject(
|
||||
this.historyId,
|
||||
(error, v1Project) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
this.projectId = new ObjectId().toString()
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: v1Project.id } },
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it('should return the snapshot with applied changes, metadata and without full content', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/latest/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
|
||||
ProjectHistoryClient.getLatestSnapshot(this.projectId, (error, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(body).to.deep.equal({
|
||||
snapshot: {
|
||||
files: {
|
||||
'main.tex': {
|
||||
hash: 'f28571f561d198b87c24cc6a98b78e87b665e22d',
|
||||
stringLength: 20649,
|
||||
operations: [{ textOperation: [1912, 'Hello world', 18726] }],
|
||||
metadata: { main: true },
|
||||
},
|
||||
'foo.tex': {
|
||||
hash: '4f785a4c192155b240e3042b3a7388b47603f423',
|
||||
stringLength: 41,
|
||||
operations: [{ textOperation: [26, '\n\nFour five six'] }],
|
||||
},
|
||||
},
|
||||
},
|
||||
version: 3,
|
||||
})
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
@@ -0,0 +1,298 @@
|
||||
import { expect } from 'chai'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('ReadSnapshot', function () {
|
||||
beforeEach(function (done) {
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
this.historyId = new ObjectId().toString()
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
|
||||
ProjectHistoryClient.initializeProject(
|
||||
this.historyId,
|
||||
(error, v1Project) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
this.projectId = new ObjectId().toString()
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: v1Project.id } },
|
||||
})
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe('of a text file', function () {
|
||||
it('should return the snapshot of a doc at the given version', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.replyWithFile(
|
||||
200,
|
||||
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
|
||||
)
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'foo.tex',
|
||||
5,
|
||||
(error, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(body).to.deep.equal(
|
||||
`\
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six
|
||||
|
||||
Seven eight nine\
|
||||
`.replace(/^\t/g, '')
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the snapshot of a doc at a different version', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/4/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.replyWithFile(
|
||||
200,
|
||||
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
|
||||
)
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'foo.tex',
|
||||
4,
|
||||
(error, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(body).to.deep.equal(
|
||||
`\
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six
|
||||
|
||||
Seven eight nince\
|
||||
`.replace(/^\t/g, '')
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the snapshot of a doc after a rename version', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.replyWithFile(
|
||||
200,
|
||||
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
|
||||
)
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'bar.tex',
|
||||
6,
|
||||
(error, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(body).to.deep.equal(
|
||||
`\
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six
|
||||
|
||||
Seven eight nine\
|
||||
`.replace(/^\t/g, '')
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('of a binary file', function () {
|
||||
beforeEach(function () {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/4/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
binary_file: {
|
||||
hash: 'c6654ea913979e13e22022653d284444f284a172',
|
||||
byteLength: 41,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the snapshot of the file at the given version', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.replyWithFile(
|
||||
200,
|
||||
fixture('blobs/c6654ea913979e13e22022653d284444f284a172')
|
||||
)
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'binary_file',
|
||||
4,
|
||||
(error, body) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(body).to.deep.equal(
|
||||
`\
|
||||
Hello world
|
||||
|
||||
One two three
|
||||
|
||||
Four five six\
|
||||
`.replace(/^\t/g, '')
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should return an error when the blob doesn't exist", function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/4/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
binary_file: {
|
||||
hash: 'c6654ea913979e13e22022653d284444f284a172',
|
||||
byteLength: 41,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [],
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.reply(404)
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'binary_file',
|
||||
4,
|
||||
{ allowErrors: true },
|
||||
(error, body, statusCode) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(statusCode).to.equal(500)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return an error when the blob request errors', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/4/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
history: {
|
||||
snapshot: {
|
||||
files: {
|
||||
binary_file: {
|
||||
hash: 'c6654ea913979e13e22022653d284444f284a172',
|
||||
byteLength: 41,
|
||||
},
|
||||
},
|
||||
},
|
||||
changes: [],
|
||||
},
|
||||
startVersion: 3,
|
||||
},
|
||||
authors: [],
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(
|
||||
`/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172`
|
||||
)
|
||||
.replyWithError('oh no!')
|
||||
|
||||
ProjectHistoryClient.getSnapshot(
|
||||
this.projectId,
|
||||
'binary_file',
|
||||
4,
|
||||
{ allowErrors: true },
|
||||
(error, body, statusCode) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
expect(statusCode).to.equal(500)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
194
services/project-history/test/acceptance/js/RetryTests.js
Normal file
194
services/project-history/test/acceptance/js/RetryTests.js
Normal file
@@ -0,0 +1,194 @@
|
||||
import async from 'async'
|
||||
import nock from 'nock'
|
||||
import { expect } from 'chai'
|
||||
import request from 'request'
|
||||
import assert from 'node:assert'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const MockCallback = () => nock('http://127.0.0.1')
|
||||
|
||||
describe('Retrying failed projects', function () {
|
||||
const historyId = new ObjectId().toString()
|
||||
|
||||
beforeEach(function (done) {
|
||||
this.timestamp = new Date()
|
||||
|
||||
ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
this.project_id = new ObjectId().toString()
|
||||
this.doc_id = new ObjectId().toString()
|
||||
this.file_id = new ObjectId().toString()
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: historyId,
|
||||
})
|
||||
MockWeb()
|
||||
.get(`/project/${this.project_id}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: {
|
||||
history: {
|
||||
id: historyId,
|
||||
},
|
||||
},
|
||||
})
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${historyId}/latest/history`)
|
||||
.reply(200, {
|
||||
chunk: {
|
||||
startVersion: 0,
|
||||
history: {
|
||||
changes: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
ProjectHistoryClient.initializeProject(historyId, done)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe('retrying project history', function () {
|
||||
describe('when there is a soft failure', function () {
|
||||
beforeEach(function (done) {
|
||||
this.flushCall = MockHistoryStore()
|
||||
.put(
|
||||
`/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb`
|
||||
)
|
||||
.reply(201)
|
||||
.post(`/api/projects/${historyId}/legacy_changes?end_version=0`)
|
||||
.reply(200)
|
||||
const update = {
|
||||
pathname: '/main.tex',
|
||||
docLines: 'a\nb',
|
||||
doc: this.doc_id,
|
||||
meta: { user_id: this.user_id, ts: new Date() },
|
||||
}
|
||||
async.series(
|
||||
[
|
||||
cb =>
|
||||
ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb),
|
||||
cb =>
|
||||
ProjectHistoryClient.setFailure(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
attempts: 1,
|
||||
error: 'soft-error',
|
||||
},
|
||||
cb
|
||||
),
|
||||
],
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('flushes the project history queue', function (done) {
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000',
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
assert(
|
||||
this.flushCall.isDone(),
|
||||
'made calls to history service to store updates'
|
||||
)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('retries in the background when requested', function (done) {
|
||||
this.callback = MockCallback()
|
||||
.matchHeader('Authorization', '123')
|
||||
.get('/ping')
|
||||
.reply(200)
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000&callbackUrl=http%3A%2F%2F127.0.0.1%2Fping',
|
||||
headers: {
|
||||
'X-CALLBACK-Authorization': '123',
|
||||
},
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.equal(
|
||||
'{"retryStatus":"running retryFailures in background"}'
|
||||
)
|
||||
assert(
|
||||
!this.flushCall.isDone(),
|
||||
'did not make calls to history service to store updates in the foreground'
|
||||
)
|
||||
setTimeout(() => {
|
||||
assert(
|
||||
this.flushCall.isDone(),
|
||||
'made calls to history service to store updates in the background'
|
||||
)
|
||||
assert(this.callback.isDone(), 'hit the callback url')
|
||||
done()
|
||||
}, 100)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a hard failure', function () {
|
||||
beforeEach(function (done) {
|
||||
MockWeb()
|
||||
.get(`/project/${this.project_id}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: {
|
||||
history: {
|
||||
id: historyId,
|
||||
},
|
||||
},
|
||||
})
|
||||
ProjectHistoryClient.setFailure(
|
||||
{
|
||||
project_id: this.project_id,
|
||||
attempts: 100,
|
||||
error: 'hard-error',
|
||||
},
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('calls web to resync the project', function (done) {
|
||||
const resyncCall = MockWeb()
|
||||
.post(`/project/${this.project_id}/history/resync`)
|
||||
.reply(200)
|
||||
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/retry/failures?failureType=hard&limit=1&timeout=10000',
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
assert(resyncCall.isDone(), 'made a call to web to resync project')
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
2197
services/project-history/test/acceptance/js/SendingUpdatesTests.js
Normal file
2197
services/project-history/test/acceptance/js/SendingUpdatesTests.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,249 @@
|
||||
/* eslint-disable
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import sinon from 'sinon'
|
||||
import { expect } from 'chai'
|
||||
import Settings from '@overleaf/settings'
|
||||
import request from 'request'
|
||||
import assert from 'node:assert'
|
||||
import mongodb from 'mongodb-legacy'
|
||||
import nock from 'nock'
|
||||
import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js'
|
||||
import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js'
|
||||
const { ObjectId } = mongodb
|
||||
|
||||
const MockHistoryStore = () => nock('http://127.0.0.1:3100')
|
||||
const MockFileStore = () => nock('http://127.0.0.1:3009')
|
||||
const MockWeb = () => nock('http://127.0.0.1:3000')
|
||||
|
||||
const fixture = path => new URL(`../fixtures/${path}`, import.meta.url)
|
||||
|
||||
describe('Summarized updates', function () {
|
||||
beforeEach(function (done) {
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.historyId = new ObjectId().toString()
|
||||
return ProjectHistoryApp.ensureRunning(error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
||||
MockHistoryStore().post('/api/projects').reply(200, {
|
||||
projectId: this.historyId,
|
||||
})
|
||||
|
||||
return ProjectHistoryClient.initializeProject(
|
||||
this.historyId,
|
||||
(error, olProject) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
MockWeb()
|
||||
.get(`/project/${this.projectId}/details`)
|
||||
.reply(200, {
|
||||
name: 'Test Project',
|
||||
overleaf: { history: { id: olProject.id } },
|
||||
})
|
||||
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/latest/history`)
|
||||
.replyWithFile(200, fixture('chunks/7-8.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/3/history`)
|
||||
.replyWithFile(200, fixture('chunks/0-3.json'))
|
||||
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
return nock.cleanAll()
|
||||
})
|
||||
|
||||
it('should return the latest summarized updates from a single chunk', function (done) {
|
||||
return ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.projectId,
|
||||
{ min_count: 1 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates).to.deep.equal({
|
||||
nextBeforeTimestamp: 6,
|
||||
updates: [
|
||||
{
|
||||
fromV: 6,
|
||||
toV: 8,
|
||||
meta: {
|
||||
users: ['5a5637efdac84e81b71014c4', 31],
|
||||
start_ts: 1512383567277,
|
||||
end_ts: 1512383572877,
|
||||
},
|
||||
pathnames: ['bar.tex', 'main.tex'],
|
||||
project_ops: [],
|
||||
labels: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the latest summarized updates, with min_count spanning multiple chunks', function (done) {
|
||||
return ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.projectId,
|
||||
{ min_count: 5 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates).to.deep.equal({
|
||||
updates: [
|
||||
{
|
||||
fromV: 6,
|
||||
toV: 8,
|
||||
meta: {
|
||||
users: ['5a5637efdac84e81b71014c4', 31],
|
||||
start_ts: 1512383567277,
|
||||
end_ts: 1512383572877,
|
||||
},
|
||||
pathnames: ['bar.tex', 'main.tex'],
|
||||
project_ops: [],
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
fromV: 5,
|
||||
toV: 6,
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383366120,
|
||||
end_ts: 1512383366120,
|
||||
},
|
||||
pathnames: [],
|
||||
project_ops: [
|
||||
{
|
||||
atV: 5,
|
||||
rename: {
|
||||
pathname: 'foo.tex',
|
||||
newPathname: 'bar.tex',
|
||||
},
|
||||
},
|
||||
],
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
fromV: 2,
|
||||
toV: 5,
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383313724,
|
||||
end_ts: 1512383362905,
|
||||
},
|
||||
pathnames: ['foo.tex'],
|
||||
project_ops: [],
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
fromV: 1,
|
||||
toV: 2,
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383246874,
|
||||
end_ts: 1512383246874,
|
||||
},
|
||||
pathnames: [],
|
||||
project_ops: [
|
||||
{
|
||||
atV: 1,
|
||||
rename: {
|
||||
pathname: 'bar.tex',
|
||||
newPathname: 'foo.tex',
|
||||
},
|
||||
},
|
||||
],
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
fromV: 0,
|
||||
toV: 1,
|
||||
meta: {
|
||||
users: [31],
|
||||
start_ts: 1512383015633,
|
||||
end_ts: 1512383015633,
|
||||
},
|
||||
pathnames: ['main.tex'],
|
||||
project_ops: [],
|
||||
labels: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the summarized updates from a before version at the start of a chunk', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/4/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
return ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.projectId,
|
||||
{ before: 4 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates.updates[0].toV).to.equal(4)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the summarized updates from a before version in the middle of a chunk', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/5/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
return ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.projectId,
|
||||
{ before: 5 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates.updates[0].toV).to.equal(5)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return the summarized updates from a before version at the end of a chunk', function (done) {
|
||||
MockHistoryStore()
|
||||
.get(`/api/projects/${this.historyId}/versions/6/history`)
|
||||
.replyWithFile(200, fixture('chunks/4-6.json'))
|
||||
return ProjectHistoryClient.getSummarizedUpdates(
|
||||
this.projectId,
|
||||
{ before: 6 },
|
||||
(error, updates) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(updates.updates[0].toV).to.equal(6)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
1543
services/project-history/test/acceptance/js/SyncTests.js
Normal file
1543
services/project-history/test/acceptance/js/SyncTests.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,7 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
let id = 0
|
||||
|
||||
export function nextId() {
|
||||
return id++
|
||||
}
|
@@ -0,0 +1,41 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import { expect } from 'chai'
|
||||
import request from 'request'
|
||||
import Settings from '@overleaf/settings'
|
||||
|
||||
export function getLatestContent(olProjectId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${Settings.overleaf.history.host}/projects/${olProjectId}/latest/content`,
|
||||
auth: {
|
||||
user: Settings.overleaf.history.user,
|
||||
pass: Settings.overleaf.history.pass,
|
||||
sendImmediately: true,
|
||||
},
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (res.statusCode < 200 || res.statusCode >= 300) {
|
||||
callback(
|
||||
new Error(
|
||||
`history store a non-success status code: ${res.statusCode}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return callback(error, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
}
|
@@ -0,0 +1,41 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
import { app } from '../../../../app/js/server.js'
|
||||
|
||||
let running = false
|
||||
let initing = false
|
||||
const callbacks = []
|
||||
|
||||
export function ensureRunning(callback) {
|
||||
if (callback == null) {
|
||||
callback = function () {}
|
||||
}
|
||||
if (running) {
|
||||
return callback()
|
||||
} else if (initing) {
|
||||
return callbacks.push(callback)
|
||||
}
|
||||
initing = true
|
||||
callbacks.push(callback)
|
||||
app.listen(3054, '127.0.0.1', error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
running = true
|
||||
return (() => {
|
||||
const result = []
|
||||
for (callback of Array.from(callbacks)) {
|
||||
result.push(callback())
|
||||
}
|
||||
return result
|
||||
})()
|
||||
})
|
||||
}
|
@@ -0,0 +1,354 @@
|
||||
import { expect } from 'chai'
|
||||
import request from 'request'
|
||||
import Settings from '@overleaf/settings'
|
||||
import RedisWrapper from '@overleaf/redis-wrapper'
|
||||
import { db } from '../../../../app/js/mongodb.js'
|
||||
|
||||
const rclient = RedisWrapper.createClient(Settings.redis.project_history)
|
||||
const Keys = Settings.redis.project_history.key_schema
|
||||
|
||||
export function resetDatabase(callback) {
|
||||
rclient.flushdb(callback)
|
||||
}
|
||||
|
||||
export function initializeProject(historyId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/project',
|
||||
json: { historyId },
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(null, body.project)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function flushProject(projectId, options, callback) {
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = null
|
||||
}
|
||||
if (!options) {
|
||||
options = { allowErrors: false }
|
||||
}
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/flush`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (!options.allowErrors) {
|
||||
expect(res.statusCode).to.equal(204)
|
||||
}
|
||||
callback(error, res)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getSummarizedUpdates(projectId, query, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/updates`,
|
||||
qs: query,
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(error, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getDiff(projectId, pathname, from, to, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/diff`,
|
||||
qs: {
|
||||
pathname,
|
||||
from,
|
||||
to,
|
||||
},
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(error, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getFileTreeDiff(projectId, from, to, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/filetree/diff`,
|
||||
qs: {
|
||||
from,
|
||||
to,
|
||||
},
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
callback(error, body, res.statusCode)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getChangesInChunkSince(projectId, since, options, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/changes-in-chunk`,
|
||||
qs: {
|
||||
since,
|
||||
},
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) return callback(error)
|
||||
if (!options.allowErrors) {
|
||||
expect(res.statusCode).to.equal(200)
|
||||
}
|
||||
callback(null, body, res.statusCode)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getLatestSnapshot(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/snapshot`,
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getSnapshot(projectId, pathname, version, options, callback) {
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = null
|
||||
}
|
||||
if (!options) {
|
||||
options = { allowErrors: false }
|
||||
}
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/version/${version}/${encodeURIComponent(
|
||||
pathname
|
||||
)}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
if (!options.allowErrors) {
|
||||
expect(res.statusCode).to.equal(200)
|
||||
}
|
||||
callback(error, body, res.statusCode)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function pushRawUpdate(projectId, update, callback) {
|
||||
rclient.rpush(
|
||||
Keys.projectHistoryOps({ project_id: projectId }),
|
||||
JSON.stringify(update),
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
export function setFirstOpTimestamp(projectId, timestamp, callback) {
|
||||
rclient.set(
|
||||
Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }),
|
||||
timestamp,
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
export function getFirstOpTimestamp(projectId, callback) {
|
||||
rclient.get(
|
||||
Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }),
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
export function clearFirstOpTimestamp(projectId, callback) {
|
||||
rclient.del(
|
||||
Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }),
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
export function getQueueLength(projectId, callback) {
|
||||
rclient.llen(Keys.projectHistoryOps({ project_id: projectId }), callback)
|
||||
}
|
||||
|
||||
export function getQueueCounts(callback) {
|
||||
return request.get(
|
||||
{
|
||||
url: 'http://127.0.0.1:3054/status/queue',
|
||||
json: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
}
|
||||
|
||||
export function resyncHistory(projectId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/resync`,
|
||||
json: true,
|
||||
body: { origin: { kind: 'test-origin' } },
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(204)
|
||||
callback(error)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function createLabel(
|
||||
projectId,
|
||||
userId,
|
||||
version,
|
||||
comment,
|
||||
createdAt,
|
||||
callback
|
||||
) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/labels`,
|
||||
json: { comment, version, created_at: createdAt, user_id: userId },
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getLabels(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/labels`,
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function deleteLabelForUser(projectId, userId, labelId, callback) {
|
||||
request.delete(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels/${labelId}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(204)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function deleteLabel(projectId, labelId, callback) {
|
||||
request.delete(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/project/${projectId}/labels/${labelId}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(204)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function setFailure(failureEntry, callback) {
|
||||
db.projectHistoryFailures.deleteOne(
|
||||
{ project_id: { $exists: true } },
|
||||
(err, result) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
db.projectHistoryFailures.insertOne(failureEntry, callback)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getFailure(projectId, callback) {
|
||||
db.projectHistoryFailures.findOne({ project_id: projectId }, callback)
|
||||
}
|
||||
|
||||
export function transferLabelOwnership(fromUser, toUser, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:3054/user/${fromUser}/labels/transfer/${toUser}`,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
expect(res.statusCode).to.equal(204)
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function getDump(projectId, callback) {
|
||||
request.get(
|
||||
`http://127.0.0.1:3054/project/${projectId}/dump`,
|
||||
(err, res, body) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
expect(res.statusCode).to.equal(200)
|
||||
callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function deleteProject(projectId, callback) {
|
||||
request.delete(`http://127.0.0.1:3054/project/${projectId}`, (err, res) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
expect(res.statusCode).to.equal(204)
|
||||
callback()
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user