first commit
This commit is contained in:
2
services/filestore/test/acceptance/certs/.gitignore
vendored
Normal file
2
services/filestore/test/acceptance/certs/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
@@ -0,0 +1,5 @@
|
||||
FROM fsouza/fake-gcs-server:1.20
|
||||
RUN apk add --update --no-cache curl
|
||||
COPY healthcheck.sh /healthcheck.sh
|
||||
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090
|
||||
CMD ["--port=9090", "--scheme=http"]
|
@@ -0,0 +1,4 @@
|
||||
FROM adobe/s3mock:2.4.14
|
||||
RUN apk add --update --no-cache curl
|
||||
COPY healthcheck.sh /healthcheck.sh
|
||||
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090
|
9
services/filestore/test/acceptance/deps/healthcheck.sh
Executable file
9
services/filestore/test/acceptance/deps/healthcheck.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
# health check to allow 404 status code as valid
|
||||
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" "$1")
|
||||
# will be 000 on non-http error (e.g. connection failure)
|
||||
if test "$STATUSCODE" -ge 500 || test "$STATUSCODE" -lt 200; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
42
services/filestore/test/acceptance/js/FilestoreApp.js
Normal file
42
services/filestore/test/acceptance/js/FilestoreApp.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const ObjectPersistor = require('@overleaf/object-persistor')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { promisify } = require('node:util')
|
||||
const App = require('../../../app')
|
||||
const FileHandler = require('../../../app/js/FileHandler')
|
||||
|
||||
class FilestoreApp {
|
||||
async runServer() {
|
||||
if (!this.server) {
|
||||
await new Promise((resolve, reject) => {
|
||||
this.server = App.listen(
|
||||
Settings.internal.filestore.port,
|
||||
'127.0.0.1',
|
||||
err => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve()
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
this.persistor = ObjectPersistor({
|
||||
...Settings.filestore,
|
||||
paths: Settings.path,
|
||||
})
|
||||
FileHandler._TESTONLYSwapPersistorManager(this.persistor)
|
||||
}
|
||||
|
||||
async stop() {
|
||||
if (!this.server) return
|
||||
const closeServer = promisify(this.server.close).bind(this.server)
|
||||
try {
|
||||
await closeServer()
|
||||
} finally {
|
||||
delete this.server
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FilestoreApp
|
1564
services/filestore/test/acceptance/js/FilestoreTests.js
Normal file
1564
services/filestore/test/acceptance/js/FilestoreTests.js
Normal file
File diff suppressed because it is too large
Load Diff
192
services/filestore/test/acceptance/js/TestConfig.js
Normal file
192
services/filestore/test/acceptance/js/TestConfig.js
Normal file
@@ -0,0 +1,192 @@
|
||||
const fs = require('node:fs')
|
||||
const Path = require('node:path')
|
||||
const crypto = require('node:crypto')
|
||||
const {
|
||||
RootKeyEncryptionKey,
|
||||
} = require('@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor')
|
||||
|
||||
const AWS_S3_USER_FILES_STORAGE_CLASS =
|
||||
process.env.AWS_S3_USER_FILES_STORAGE_CLASS
|
||||
|
||||
// use functions to get a fresh copy, not a reference, each time
|
||||
function s3BaseConfig() {
|
||||
return {
|
||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
||||
pathStyle: true,
|
||||
partSize: 100 * 1024 * 1024,
|
||||
ca: [fs.readFileSync('/certs/public.crt')],
|
||||
}
|
||||
}
|
||||
|
||||
function s3Config() {
|
||||
return {
|
||||
key: process.env.AWS_ACCESS_KEY_ID,
|
||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
...s3BaseConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
const S3SSECKeys = [
|
||||
new RootKeyEncryptionKey(
|
||||
crypto.generateKeySync('aes', { length: 256 }).export(),
|
||||
Buffer.alloc(32)
|
||||
),
|
||||
]
|
||||
|
||||
function s3SSECConfig() {
|
||||
return {
|
||||
...s3Config(),
|
||||
ignoreErrorsFromDEKReEncryption: false,
|
||||
automaticallyRotateDEKEncryption: true,
|
||||
dataEncryptionKeyBucketName: process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME,
|
||||
pathToProjectFolder(_bucketName, path) {
|
||||
const match = path.match(/^[a-f0-9]{24}\//)
|
||||
if (!match) throw new Error('not a project-folder')
|
||||
const [projectFolder] = match
|
||||
return projectFolder
|
||||
},
|
||||
async getRootKeyEncryptionKeys() {
|
||||
return S3SSECKeys
|
||||
},
|
||||
storageClass: {
|
||||
[process.env.AWS_S3_USER_FILES_BUCKET_NAME]:
|
||||
AWS_S3_USER_FILES_STORAGE_CLASS,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function s3ConfigDefaultProviderCredentials() {
|
||||
return {
|
||||
...s3BaseConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
function s3Stores() {
|
||||
return {
|
||||
user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME,
|
||||
template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME,
|
||||
}
|
||||
}
|
||||
|
||||
function gcsConfig() {
|
||||
return {
|
||||
endpoint: {
|
||||
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||
projectId: 'fake',
|
||||
},
|
||||
directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/,
|
||||
unlockBeforeDelete: false, // fake-gcs does not support this
|
||||
deletedBucketSuffix: '-deleted',
|
||||
}
|
||||
}
|
||||
|
||||
function gcsStores() {
|
||||
return {
|
||||
user_files: process.env.GCS_USER_FILES_BUCKET_NAME,
|
||||
template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME,
|
||||
}
|
||||
}
|
||||
|
||||
function fsStores() {
|
||||
return {
|
||||
user_files: Path.resolve(__dirname, '../../../user_files'),
|
||||
template_files: Path.resolve(__dirname, '../../../template_files'),
|
||||
}
|
||||
}
|
||||
|
||||
function fallbackStores(primaryConfig, fallbackConfig) {
|
||||
return {
|
||||
[primaryConfig.user_files]: fallbackConfig.user_files,
|
||||
[primaryConfig.template_files]: fallbackConfig.template_files,
|
||||
}
|
||||
}
|
||||
|
||||
const BackendSettings = {
|
||||
SHARD_01_FSPersistor: {
|
||||
backend: 'fs',
|
||||
stores: fsStores(),
|
||||
},
|
||||
SHARD_01_S3Persistor: {
|
||||
backend: 's3',
|
||||
s3: s3Config(),
|
||||
stores: s3Stores(),
|
||||
},
|
||||
SHARD_01_S3PersistorDefaultProviderCredentials: {
|
||||
backend: 's3',
|
||||
s3: s3ConfigDefaultProviderCredentials(),
|
||||
stores: s3Stores(),
|
||||
},
|
||||
SHARD_01_GcsPersistor: {
|
||||
backend: 'gcs',
|
||||
gcs: gcsConfig(),
|
||||
stores: gcsStores(),
|
||||
},
|
||||
SHARD_01_PerProjectEncryptedS3Persistor: {
|
||||
backend: 's3SSEC',
|
||||
s3SSEC: s3SSECConfig(),
|
||||
stores: s3Stores(),
|
||||
},
|
||||
SHARD_02_FallbackS3ToFSPersistor: {
|
||||
backend: 's3',
|
||||
s3: s3Config(),
|
||||
stores: s3Stores(),
|
||||
fallback: {
|
||||
backend: 'fs',
|
||||
buckets: fallbackStores(s3Stores(), fsStores()),
|
||||
},
|
||||
},
|
||||
SHARD_02_FallbackFSToS3Persistor: {
|
||||
backend: 'fs',
|
||||
s3: s3Config(),
|
||||
stores: fsStores(),
|
||||
fallback: {
|
||||
backend: 's3',
|
||||
buckets: fallbackStores(fsStores(), s3Stores()),
|
||||
},
|
||||
},
|
||||
SHARD_03_FallbackGcsToS3Persistor: {
|
||||
backend: 'gcs',
|
||||
gcs: gcsConfig(),
|
||||
stores: gcsStores(),
|
||||
s3: s3Config(),
|
||||
fallback: {
|
||||
backend: 's3',
|
||||
buckets: fallbackStores(gcsStores(), s3Stores()),
|
||||
},
|
||||
},
|
||||
SHARD_03_FallbackS3ToGcsPersistor: {
|
||||
backend: 's3',
|
||||
// can use the same bucket names for gcs and s3 (in tests)
|
||||
stores: s3Stores(),
|
||||
s3: s3Config(),
|
||||
gcs: gcsConfig(),
|
||||
fallback: {
|
||||
backend: 'gcs',
|
||||
buckets: fallbackStores(s3Stores(), gcsStores()),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
function checkForUnexpectedTestFile() {
|
||||
const awareOfSharding = [
|
||||
'FilestoreApp.js',
|
||||
'FilestoreTests.js',
|
||||
'TestConfig.js',
|
||||
'TestHelper.js',
|
||||
]
|
||||
for (const file of fs.readdirSync(__dirname).sort()) {
|
||||
if (!awareOfSharding.includes(file)) {
|
||||
throw new Error(
|
||||
`Found new test file ${file}: All tests must be aware of the SHARD_ prefix.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
checkForUnexpectedTestFile()
|
||||
|
||||
module.exports = {
|
||||
AWS_S3_USER_FILES_STORAGE_CLASS,
|
||||
BackendSettings,
|
||||
s3Config,
|
||||
s3SSECConfig,
|
||||
}
|
78
services/filestore/test/acceptance/js/TestHelper.js
Normal file
78
services/filestore/test/acceptance/js/TestHelper.js
Normal file
@@ -0,0 +1,78 @@
|
||||
const streamifier = require('streamifier')
|
||||
const fetch = require('node-fetch')
|
||||
const ObjectPersistor = require('@overleaf/object-persistor')
|
||||
|
||||
const { expect } = require('chai')
|
||||
|
||||
module.exports = {
|
||||
uploadStringToPersistor,
|
||||
getStringFromPersistor,
|
||||
expectPersistorToHaveFile,
|
||||
expectPersistorToHaveSomeFile,
|
||||
expectPersistorNotToHaveFile,
|
||||
streamToString,
|
||||
getMetric,
|
||||
}
|
||||
|
||||
async function getMetric(filestoreUrl, metric) {
|
||||
const res = await fetch(`${filestoreUrl}/metrics`)
|
||||
expect(res.status).to.equal(200)
|
||||
const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'gm')
|
||||
const body = await res.text()
|
||||
let v = 0
|
||||
// Sum up size="lt-128KiB" and size="gte-128KiB"
|
||||
for (const [, found] of body.matchAll(metricRegex)) {
|
||||
v += parseInt(found, 10) || 0
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
function streamToString(stream) {
|
||||
const chunks = []
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => chunks.push(chunk))
|
||||
stream.on('error', reject)
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||
stream.resume()
|
||||
})
|
||||
}
|
||||
|
||||
async function uploadStringToPersistor(persistor, bucket, key, content) {
|
||||
const fileStream = streamifier.createReadStream(content)
|
||||
await persistor.sendStream(bucket, key, fileStream)
|
||||
}
|
||||
|
||||
async function getStringFromPersistor(persistor, bucket, key) {
|
||||
const stream = await persistor.getObjectStream(bucket, key, {})
|
||||
return await streamToString(stream)
|
||||
}
|
||||
|
||||
async function expectPersistorToHaveFile(persistor, bucket, key, content) {
|
||||
const foundContent = await getStringFromPersistor(persistor, bucket, key)
|
||||
expect(foundContent).to.equal(content)
|
||||
}
|
||||
|
||||
async function expectPersistorToHaveSomeFile(persistor, bucket, keys, content) {
|
||||
let foundContent
|
||||
for (const key of keys) {
|
||||
try {
|
||||
foundContent = await getStringFromPersistor(persistor, bucket, key)
|
||||
break
|
||||
} catch (err) {
|
||||
if (err instanceof ObjectPersistor.Errors.NotFoundError) {
|
||||
continue
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
if (foundContent === undefined) {
|
||||
expect.fail(`Could not find any of the specified keys: ${keys}`)
|
||||
}
|
||||
expect(foundContent).to.equal(content)
|
||||
}
|
||||
|
||||
async function expectPersistorNotToHaveFile(persistor, bucket, key) {
|
||||
await expect(
|
||||
getStringFromPersistor(persistor, bucket, key)
|
||||
).to.eventually.have.been.rejected.with.property('name', 'NotFoundError')
|
||||
}
|
Reference in New Issue
Block a user