diff --git a/.github/workflows/s3-e2e.yml b/.github/workflows/s3-e2e.yml new file mode 100644 index 000000000..4f4110e18 --- /dev/null +++ b/.github/workflows/s3-e2e.yml @@ -0,0 +1,53 @@ +name: S3 E2E Tests + +on: push + +jobs: + s3-e2e: + timeout-minutes: 15 + # TODO should we use the same container as circle & central? + runs-on: ubuntu-latest + services: + # see: https://docs.github.com/en/enterprise-server@3.5/actions/using-containerized-services/creating-postgresql-service-containers + postgres: + image: postgres:14.10 + env: + POSTGRES_PASSWORD: odktest + ports: + - 5432:5432 + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + minio: + # see: https://github.com/minio/minio/discussions/16099 + image: minio/minio:edge-cicd + env: + MINIO_ROOT_USER: odk-central-dev + MINIO_ROOT_PASSWORD: topSecret123 + # Enable encryption - this changes how s3 ETags work + # See: https://docs.aws.amazon.com/AmazonS3/latest/API/API_Object.html + # See: https://github.com/minio/minio/discussions/19012 + MINIO_KMS_AUTO_ENCRYPTION: on + MINIO_KMS_SECRET_KEY: odk-minio-test-key:QfdUCrn3UQ58W5pqCS5SX4SOlec9sT8yb4rZ4zK24w0= + ports: + - 9000:9000 + options: >- + --health-cmd "curl -s http://localhost:9000/minio/health/live" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v4 + - name: Use Node.js 20 + uses: actions/setup-node@v4 + with: + node-version: 20.10.0 + cache: 'npm' + - run: npm ci --legacy-peer-deps + - run: node lib/bin/create-docker-databases.js + - name: E2E Test + timeout-minutes: 10 + run: ./test/e2e/s3/run-tests.sh diff --git a/Makefile b/Makefile index 603c9bbdb..6f4852f2e 100644 --- a/Makefile +++ b/Makefile @@ -26,6 +26,32 @@ fake-oidc-server-ci: cd test/e2e/oidc/fake-oidc-server && \ node index.mjs +.PHONY: fake-s3-accounts +fake-s3-accounts: node_version + NODE_CONFIG_ENV=s3-dev node lib/bin/s3-create-bucket.js + +.PHONY: dev-s3 +dev-s3: fake-s3-accounts base + NODE_CONFIG_ENV=s3-dev npx nodemon --watch lib --watch config lib/bin/run-server.js + +# default admin credentials: minioadmin:minioadmin +# See: https://hub.docker.com/r/minio/minio/ +# MINIO_KMS_SECRET_KEY, MINIO_KMS_AUTO_ENCRYPTION enable encryption - this changes how s3 ETags are generated. +# See: https://docs.aws.amazon.com/AmazonS3/latest/API/API_Object.html +# See: https://github.com/minio/minio/discussions/19012 +S3_SERVER_ARGS := --network host \ + -e MINIO_ROOT_USER=odk-central-dev \ + -e MINIO_ROOT_PASSWORD=topSecret123 \ + -e MINIO_KMS_AUTO_ENCRYPTION=on \ + -e MINIO_KMS_SECRET_KEY=odk-minio-test-key:QfdUCrn3UQ58W5pqCS5SX4SOlec9sT8yb4rZ4zK24w0= \ + minio/minio server /data --console-address ":9001" +.PHONY: fake-s3-server-ephemeral +fake-s3-server-ephemeral: + docker run --rm $(S3_SERVER_ARGS) +.PHONY: fake-s3-server-persistent +fake-s3-server-persistent: + docker run --detach $(S3_SERVER_ARGS) + .PHONY: node_version node_version: node_modules node lib/bin/enforce-node-version.js diff --git a/README.md b/README.md index 8759bd908..7c480145f 100644 --- a/README.md +++ b/README.md @@ -53,10 +53,12 @@ CREATE DATABASE jubilant_test WITH OWNER=jubilant ENCODING=UTF8; \c jubilant_test; CREATE EXTENSION IF NOT EXISTS CITEXT; CREATE EXTENSION IF NOT EXISTS pg_trgm; +CREATE EXTENSION IF NOT EXISTS pgrowlocks; CREATE DATABASE jubilant WITH OWNER=jubilant ENCODING=UTF8; \c jubilant; CREATE EXTENSION IF NOT EXISTS CITEXT; CREATE EXTENSION IF NOT EXISTS pg_trgm; +CREATE EXTENSION IF NOT EXISTS pgrowlocks; ``` If you are using Docker, you may find it easiest to run the database in Docker by running `make run-docker-postgres`. diff --git a/config/default.json b/config/default.json index c37688c73..0dea82c44 100644 --- a/config/default.json +++ b/config/default.json @@ -31,7 +31,8 @@ "url": "https://data.getodk.cloud/v1/key/eOZ7S4bzyUW!g1PF6dIXsnSqktRuewzLTpmc6ipBtRq$LDfIMTUKswCexvE0UwJ9/projects/1/forms/odk-analytics/submissions", "formId": "odk-analytics", "version": "v2024.1.0_1" - } + }, + "s3blobStore": {} } }, "test": { diff --git a/config/s3-dev.json b/config/s3-dev.json new file mode 100644 index 000000000..65e4db999 --- /dev/null +++ b/config/s3-dev.json @@ -0,0 +1,13 @@ +{ + "default": { + "external": { + "s3blobStore": { + "server": "http://localhost:9000", + "accessKey": "odk-central-dev", + "secretKey": "topSecret123", + "bucketName": "odk-central-bucket", + "requestTimeout": 60000 + } + } + } +} diff --git a/lib/bin/create-docker-databases.js b/lib/bin/create-docker-databases.js index 76af3b7e5..cdcdaf8ac 100644 --- a/lib/bin/create-docker-databases.js +++ b/lib/bin/create-docker-databases.js @@ -31,6 +31,7 @@ const { log } = program.opts(); const dbj = connect(database); await dbj.raw('create extension citext;'); await dbj.raw('create extension pg_trgm;'); + await dbj.raw('create extension pgrowlocks;'); dbj.destroy(); })); diff --git a/lib/bin/run-server.js b/lib/bin/run-server.js index 261cc8b78..0b71da766 100644 --- a/lib/bin/run-server.js +++ b/lib/bin/run-server.js @@ -38,13 +38,16 @@ const xlsform = require('../external/xlsform').init(config.get('default.xlsform' // get an Enketo client const enketo = require('../external/enketo').init(config.get('default.enketo')); +// get an S3 client. +const s3 = require('../external/s3').init(config.get('default.external.s3blobStore')); + //////////////////////////////////////////////////////////////////////////////// // START HTTP SERVICE // initialize our container, then generate an http service out of it. const container = require('../model/container') - .withDefaults({ db, mail, env, Sentry, xlsform, enketo }); + .withDefaults({ db, mail, env, Sentry, xlsform, enketo, s3 }); const service = require('../http/service')(container); // insert the graceful exit middleware. diff --git a/lib/bin/s3-create-bucket.js b/lib/bin/s3-create-bucket.js new file mode 100644 index 000000000..84f16c449 --- /dev/null +++ b/lib/bin/s3-create-bucket.js @@ -0,0 +1,39 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const Minio = require('minio'); + +const { server, bucketName, accessKey, secretKey } = require('config').get('default.external.s3blobStore'); + +const minioClient = (() => { + const url = new URL(server); + const useSSL = url.protocol === 'https:'; + const endPoint = (url.hostname + url.pathname).replace(/\/$/, ''); + const port = parseInt(url.port, 10); + + return new Minio.Client({ endPoint, port, useSSL, accessKey, secretKey }); +})(); + +const log = (...args) => console.log(__filename, ...args); + +minioClient.bucketExists(bucketName) + .then(exists => { + if (exists) { + log('Bucket already exists.'); + return; + } + + log('Creating bucket:', bucketName); + return minioClient.makeBucket(bucketName) + .then(() => log('Bucket created OK.')); + }) + .catch(err => { + log('ERROR CREATING MINIO BUCKET:', err); + process.exit(1); + }); diff --git a/lib/bin/s3.js b/lib/bin/s3.js new file mode 100644 index 000000000..af4fb3799 --- /dev/null +++ b/lib/bin/s3.js @@ -0,0 +1,19 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const { program, Argument } = require('commander'); + +const { getCount, setFailedToPending, uploadPending } = require('../task/s3'); + +program.command('count-blobs') + .addArgument(new Argument('status').choices(['pending', 'in_progress', 'uploaded', 'failed'])) + .action(getCount); +program.command('reset-failed-to-pending').action(setFailedToPending); +program.command('upload-pending').action(uploadPending); +program.parse(); diff --git a/lib/data/attachments.js b/lib/data/attachments.js index e46009047..a4c668ca6 100644 --- a/lib/data/attachments.js +++ b/lib/data/attachments.js @@ -10,8 +10,7 @@ const { join } = require('path'); const { compose, identity } = require('ramda'); -const { Writable, pipeline } = require('stream'); -const { rejectIfError } = require('../util/promise'); +const { Writable } = require('stream'); const { zipPart } = require('../util/zip'); const sanitize = require('sanitize-filename'); @@ -47,7 +46,7 @@ const streamAttachments = (inStream, decryptor) => { done(); } }); - pipeline(inStream, writable, rejectIfError(archive.error.bind(archive))); + inStream.with(writable).pipeline(archive.error.bind(archive)); return archive; }; diff --git a/lib/data/client-audits.js b/lib/data/client-audits.js index 1e740616c..afc49a722 100644 --- a/lib/data/client-audits.js +++ b/lib/data/client-audits.js @@ -123,9 +123,10 @@ const streamClientAudits = (inStream, form, decryptor) => { }); // only appended (above, in transform()) if data comes in. - const outStream = pipeline(inStream, csvifier, csv(), (err) => { - if (err != null) archive.error(err); - }); + const outStream = inStream + .with(csvifier) + .with(csv()) + .pipeline(archive.error.bind(archive)); return archive; }; diff --git a/lib/external/s3.js b/lib/external/s3.js new file mode 100644 index 000000000..f8f2467ab --- /dev/null +++ b/lib/external/s3.js @@ -0,0 +1,173 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const disabled = { enabled: false }; + +const init = (config) => { + if (!config) return disabled; + + const { server, accessKey, secretKey, bucketName, requestTimeout, objectPrefix } = config; + if (!(server && accessKey && secretKey && bucketName)) return disabled; + + const http = require('node:http'); + const https = require('node:https'); + const { Readable } = require('node:stream'); + const Minio = require('minio'); + const { contentDisposition } = require('../util/http'); + const { pipethroughAndBuffer } = require('../util/stream'); + + let destroyed = false; + + const inflight = new Set(); + function destroy() { + destroyed = true; + return new Promise(resolve => { + let remaining = 0; + for (const req of inflight) { + ++remaining; // eslint-disable-line no-plusplus + req.once('close', () => { // eslint-disable-line no-loop-func + if (!--remaining) resolve(); // eslint-disable-line no-plusplus + }); + req.destroy(new Error('Aborted by request')); + } + }); + } + + const minioClient = (() => { + const url = new URL(server); + const useSSL = url.protocol === 'https:'; + const endPoint = (url.hostname + url.pathname).replace(/\/$/, ''); + const port = parseInt(url.port, 10); + + // eslint-disable-next-line no-restricted-globals + const MAX_REQ_TIMEOUT = isNaN(requestTimeout) ? 120000 : requestTimeout; // ms; + const SMALL_REQ_TIMEOUT = Math.max(1, MAX_REQ_TIMEOUT - 1000); + + // Set a reasonable timeout on upload requests + // See: https://github.com/minio/minio-js/issues/722#issuecomment-1594401449 + const request = (_options, callback) => { + // It's unclear exactly what the different types of timeout on a request refer to. + // req.setTimeout(): "Milliseconds before a request times out" - https://nodejs.org/api/http.html#requestsettimeouttimeout-callback + // options.timeout: "the timeout before the socket is connected" - https://nodejs.org/api/http.html#httprequestoptions-callback + // setTimeout(): absolute timeout, without reference to the request implementation + + const options = { ..._options }; + + // eslint-disable-next-line no-restricted-globals + if (isNaN(options.timeout)) options.timeout = SMALL_REQ_TIMEOUT; + + const req = (useSSL ? https : http).request(options, callback); + inflight.add(req); + + // It might be simplest to use the global setTimeout() alone, but maybe we + // will find useful information in the different errors. + req.setTimeout(SMALL_REQ_TIMEOUT); + + const timeoutEventHandler = () => req.destroy(new Error('Request emitted timeout event.')); + + req.once('timeout', timeoutEventHandler); + const globalTimeoutHandler = setTimeout(() => req.destroy(new Error('Request timed out.')), MAX_REQ_TIMEOUT); + + req.once('close', () => { + req.off('timeout', timeoutEventHandler); + clearTimeout(globalTimeoutHandler); + inflight.delete(req); + }); + + return req; + }; + + const clientConfig = { endPoint, port, useSSL, accessKey, secretKey, transport: { request } }; + + return new Minio.Client(clientConfig); + })(); + + const objectNameFor = ({ id, sha }) => { + // Include blob ID in object name to allow easy correlation with postgres data. + // Include blob SHA sum to prevent name collision in case multiple odk-central- + // backend instances point to the same bucket. There are a few scenarios where + // this could happen, e.g. + // + // * instance reset after testing/training + // * staging & prod instances pointed to the same bucket + // * temporary loss of access to postgres data on upgrade error + if (typeof id !== 'number') throw new Error('Invalid id: ' + id); + if (!sha) throw new Error('Missing sha sum for blob: ' + id); + return `${objectPrefix??''}blob-${id}-${sha}`; + }; + + function deleteObjFor(blob) { + return minioClient.removeObject(bucketName, objectNameFor(blob)); + } + + async function getContentFor(blob) { + const stream = await minioClient.getObject(bucketName, objectNameFor(blob)); + const [ buf ] = await pipethroughAndBuffer(stream); + return buf; + } + + // respHeaders documentation is not clear, but can be found at: + // + // * https://min.io/docs/minio/linux/developers/javascript/API.html#presignedgetobject-bucketname-objectname-expiry-respheaders-requestdate + // * https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html#API_GetObject_RequestSyntax + const getRespHeaders = (filename, { contentType }) => ({ + 'response-content-disposition': contentDisposition(filename), + 'response-content-type': contentType, + }); + + async function urlForBlob(filename, blob) { + // URL expires after a certain amount of time. Allow enough time for normal clients to + // start the download. + // + // > Amazon S3 checks the expiration date and time of a signed URL at the time of the + // > HTTP request. For example, if a client begins to download a large file immediately + // > before the expiration time, the download continues even if the expiration time + // > passes during the download. However, if the connection drops and the client tries + // > to restart the download after the expiration time passes, the download fails. + // > - https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html#PresignedUrl-Expiration + const expiry = 60; // seconds + + const objectName = objectNameFor(blob); + const respHeaders = getRespHeaders(filename, blob); + + // See: https://min.io/docs/minio/linux/developers/javascript/API.html#presignedGetObject + return minioClient.presignedGetObject(bucketName, objectName, expiry, respHeaders); + } + + async function uploadFromBlob(blob) { + const objectName = objectNameFor(blob); + + const { md5, sha } = blob; + const { length } = blob.content; + console.log('Uploading blob:', JSON.stringify({ md5, sha, length })); // eslint-disable-line no-console + + const stream = new Readable(); + inflight.add(stream); + stream.push(blob.content); + stream.push(null); + + await minioClient.putObject(bucketName, objectName, stream); + } + + const guarded = fn => (...args) => { + if (destroyed) throw new Error('s3 destroyed'); + return fn(...args); + }; + + return { + enabled: true, + deleteObjFor: guarded(deleteObjFor), // eslint-disable-line key-spacing, no-multi-spaces + getContentFor: guarded(getContentFor), // eslint-disable-line key-spacing + uploadFromBlob: guarded(uploadFromBlob), + urlForBlob: guarded(urlForBlob), // eslint-disable-line key-spacing, no-multi-spaces + destroy: guarded(destroy), // eslint-disable-line key-spacing, no-multi-spaces + }; +}; + +module.exports = { init }; diff --git a/lib/model/frames/blob.js b/lib/model/frames/blob.js index c7263cc01..b6a66962e 100644 --- a/lib/model/frames/blob.js +++ b/lib/model/frames/blob.js @@ -16,7 +16,7 @@ const { digestWith, md5sum, shasum } = require('../../util/crypto'); const { pipethroughAndBuffer } = require('../../util/stream'); -class Blob extends Frame.define(table('blobs'), 'id', 'sha', 'content', 'contentType', 'md5') { +class Blob extends Frame.define(table('blobs'), 'id', 'sha', 'content', 'contentType', 'md5', 's3_status') { // Given a path to a file on disk (typically written to a temporary location for the // duration of the request), will do the work to generate a Blob instance with the // appropriate SHA and binary content information. Does _not_ save it to the database; diff --git a/lib/model/frames/submission.js b/lib/model/frames/submission.js index bd2752746..2185ba5c2 100644 --- a/lib/model/frames/submission.js +++ b/lib/model/frames/submission.js @@ -102,7 +102,7 @@ Submission.Extended = Frame.define('formVersion', readable); Submission.Xml = Frame.define(table('submission_defs', 'xml'), 'xml'); -Submission.Encryption = Frame.define(into('encryption'), 'encHasData', 'encData', 'encIndex', 'encKeyId'); +Submission.Encryption = Frame.define(into('encryption'), 'encHasData', 'blobId', 'encData', 'encSha', 'encIndex', 'encKeyId', 'encS3Status'); Submission.Exports = Frame.define(into('exports'), 'formVersion'); diff --git a/lib/model/migrations/20240913-01-add-blob-s3.js b/lib/model/migrations/20240913-01-add-blob-s3.js new file mode 100644 index 000000000..1d4be8f21 --- /dev/null +++ b/lib/model/migrations/20240913-01-add-blob-s3.js @@ -0,0 +1,24 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const up = async (db) => { + await db.raw('CREATE EXTENSION IF NOT EXISTS pgrowlocks'); + await db.raw(`CREATE TYPE S3_UPLOAD_STATUS AS ENUM ('pending', 'uploaded', 'failed')`); + await db.raw(` + ALTER TABLE blobs + ADD COLUMN s3_status S3_UPLOAD_STATUS NOT NULL DEFAULT 'pending', + ALTER COLUMN content DROP NOT NULL + `); +}; + +const down = () => { + // irreversible. +}; + +module.exports = { up, down }; diff --git a/lib/model/query/blobs.js b/lib/model/query/blobs.js index 75ae0829a..53c4343b1 100644 --- a/lib/model/query/blobs.js +++ b/lib/model/query/blobs.js @@ -8,7 +8,7 @@ // except according to the terms contained in the LICENSE file. const { sql } = require('slonik'); -const { map } = require('ramda'); +const { isEmpty, map } = require('ramda'); const { Blob } = require('../frames'); const { construct } = require('../../util/util'); @@ -31,19 +31,157 @@ const getById = (blobId) => ({ maybeOne }) => maybeOne(sql`select * from blobs where id=${blobId}`) .then(map(construct(Blob))); -const purgeUnattached = () => ({ all }) => - all(sql` -delete from blobs - using blobs as b - left join client_audits as ca on ca."blobId" = b.id - left join submission_attachments as sa on sa."blobId" = b.id - left join form_attachments as fa on fa."blobId" = b.id - left join form_defs as fd on fd."xlsBlobId" = b.id -where (blobs.id = b.id and - ca."blobId" is null and - sa."blobId" is null and - fa."blobId" is null and - fd."xlsBlobId" is null)`); - -module.exports = { ensure, getById, purgeUnattached }; +const s3CountByStatus = (status) => ({ oneFirst }) => { + // in_progress is an implicit status + if (status === 'in_progress') { + return oneFirst(sql`SELECT COUNT(*) FROM PGROWLOCKS('blobs')`); + } else if (status === 'pending') { + return oneFirst(sql` + WITH + allpending AS ( + SELECT COUNT(*) FROM blobs WHERE s3_status='pending' + ), + locked AS ( + SELECT COUNT(*) FROM PGROWLOCKS('blobs') + ) + SELECT allpending.count-locked.count FROM allpending, locked + `); + } else { + return oneFirst(sql`SELECT COUNT(*) FROM blobs WHERE s3_status=${status}`); + } +}; + +const s3SetFailedToPending = () => ({ oneFirst }) => oneFirst(sql` + WITH updated AS ( + UPDATE blobs + SET s3_status='pending' + WHERE s3_status='failed' + RETURNING 1 + ) + SELECT COUNT(*) FROM updated +`); + +const _markAsFailed = ({ id }) => ({ run }) => run(sql` + UPDATE blobs + SET s3_status = 'failed' + WHERE id = ${id} +`); + +const _markAsUploaded = ({ id }) => ({ run }) => run(sql` + UPDATE blobs + SET s3_status = 'uploaded' + , content = NULL + WHERE id=${id} +`); + +// Set s3_status to failed so that if the inner transaction rolls back, the +// the s3_status is committed as 'failed'. +const _getOnePending = () => ({ maybeOne }) => maybeOne(sql` + UPDATE blobs + SET s3_status='failed' + WHERE id IN ( + SELECT id + FROM blobs + WHERE s3_status='pending' + LIMIT 1 + FOR NO KEY UPDATE + SKIP LOCKED + ) + RETURNING * +`).then(map(construct(Blob))); + +const unattachedClause = sql` + LEFT JOIN client_audits AS ca ON ca."blobId" = b.id + LEFT JOIN submission_attachments AS sa ON sa."blobId" = b.id + LEFT JOIN form_attachments AS fa ON fa."blobId" = b.id + LEFT JOIN form_defs AS fd ON fd."xlsBlobId" = b.id + WHERE ca."blobId" IS NULL + AND sa."blobId" IS NULL + AND fa."blobId" IS NULL + AND fd."xlsBlobId" IS NULL +`; + +const _purgeAllUnattached = () => ({ all }) => all(sql` + DELETE FROM blobs + USING blobs AS b + ${unattachedClause} + AND blobs.id = b.id +`); + +const _purgeUnattachedNotUploaded = () => ({ all }) => all(sql` + DELETE FROM blobs + USING blobs AS b + ${unattachedClause} + AND b.s3_status != 'uploaded' + AND blobs.id = b.id +`); + +const _purgeOneUnattachedUploaded = () => ({ maybeOne }) => maybeOne(sql` + DELETE FROM blobs + WHERE id IN ( + SELECT b.id + FROM blobs AS b + ${unattachedClause} + AND b.s3_status = 'uploaded' + LIMIT 1 + ) + RETURNING blobs.id, blobs.sha +`); + +const purgeUnattached = () => async ({ s3, Blobs }) => { + if (!s3.enabled) return Blobs._purgeAllUnattached(); + + await Blobs._purgeUnattachedNotUploaded(); + + while (true) { // eslint-disable-line no-constant-condition + const maybeBlob = await Blobs._purgeOneUnattachedUploaded(); // eslint-disable-line no-await-in-loop + if (isEmpty(maybeBlob)) return; + + // If delete is interrupted or failed, this may leave an orphaned object in + // the S3 bucket. This should be identifiable by comparing object names + // with blob IDs available in postgres. + await s3.deleteObjFor(maybeBlob.get()); // eslint-disable-line no-await-in-loop + } +}; + +const uploadBlobIfAvailable = async container => { + let innerError; + + const res = await container.transacting(async outerTx => { + const maybeBlob = await outerTx.Blobs._getOnePending(); + if (isEmpty(maybeBlob)) return; + + const blob = maybeBlob.get(); + + try { + await outerTx.db.transaction(async innerDb => { + const innerTx = outerTx.with({ db: innerDb }); + + await innerTx.s3.uploadFromBlob(blob); + await innerTx.Blobs._markAsUploaded(blob); + }); + + return true; + } catch (err) { + // Allow outer transaction to commit, but bubble err + innerError = err; + } + }); + + if (innerError) throw innerError; + + return res; +}; + +const s3UploadPending = () => async (container) => { + // eslint-disable-next-line no-await-in-loop + while (await uploadBlobIfAvailable(container)); +}; + +module.exports = { + ensure, getById, purgeUnattached, + _getOnePending, _markAsFailed, _markAsUploaded, + s3CountByStatus, s3SetFailedToPending, s3UploadPending, + _purgeAllUnattached, _purgeOneUnattachedUploaded, _purgeUnattachedNotUploaded, +}; diff --git a/lib/model/query/client-audits.js b/lib/model/query/client-audits.js index c4cce185c..b3b99a089 100644 --- a/lib/model/query/client-audits.js +++ b/lib/model/query/client-audits.js @@ -11,6 +11,7 @@ const { sql } = require('slonik'); const { insertMany, QueryOptions } = require('../../util/db'); const { odataFilter } = require('../../data/odata-filter'); const { odataToColumnMap } = require('../../data/submission'); +const { streamBlobs } = require('../../util/blob'); const createMany = (cas) => ({ run }) => run(insertMany(cas)); @@ -22,8 +23,8 @@ const existsForBlob = (blobId) => ({ maybeOne }) => const keyIdCondition = (keyIds) => sql.join((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); -const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => stream(sql` -select client_audits.*, blobs.content, submissions."instanceId", "localKey", "keyId", index, submissions."instanceId" from submission_defs +const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ s3, stream }) => stream(sql` +select client_audits.*, blobs.id AS "blobId", blobs.s3_status, blobs.content, blobs.sha, submissions."instanceId", "localKey", "keyId", index, submissions."instanceId" from submission_defs inner join (select id, "submitterId", "createdAt", "updatedAt", "instanceId", "reviewState" from submissions where "formId"=${formId} and draft=${draft} and "deletedAt" is null) as submissions @@ -38,7 +39,8 @@ select client_audits.*, blobs.content, submissions."instanceId", "localKey", "ke where ${odataFilter(options.filter, odataToColumnMap)} and current=true and (form_defs."keyId" is null or form_defs."keyId" in (${keyIdCondition(keyIds)})) - order by submission_defs."createdAt" asc, submission_defs.id asc`); + order by submission_defs."createdAt" asc, submission_defs.id asc`) + .then(dbStream => streamBlobs(s3, dbStream)); module.exports = { createMany, existsForBlob, streamForExport }; diff --git a/lib/model/query/submission-attachments.js b/lib/model/query/submission-attachments.js index eecbbbbac..8a2901b8f 100644 --- a/lib/model/query/submission-attachments.js +++ b/lib/model/query/submission-attachments.js @@ -20,6 +20,7 @@ const { insertMany, QueryOptions } = require('../../util/db'); const { resolve } = require('../../util/promise'); const { isBlank, construct } = require('../../util/util'); const { traverseXml, findAll, root, node, text } = require('../../util/xml'); +const { streamBlobs } = require('../../util/blob'); //////////////////////////////////////////////////////////////////////////////// @@ -192,8 +193,8 @@ const getBySubmissionDefIdAndName = (subDefId, name) => ({ maybeOne }) => const keyIdCondition = (keyIds) => sql.join((((keyIds == null) || (keyIds.length === 0)) ? [ -1 ] : keyIds), sql`,`); -const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => stream(sql` -select submission_attachments.name, blobs.content, submission_attachments.index, form_defs."keyId", submissions."instanceId", submission_defs."localKey" from submission_defs +const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ s3, stream }) => stream(sql` +select submission_attachments.name, blobs.id AS "blobId", blobs.content, blobs.s3_status, blobs.sha, submission_attachments.index, form_defs."keyId", submissions."instanceId", submission_defs."localKey" from submission_defs inner join (select * from submissions where draft=${draft}) as submissions on submissions.id=submission_defs."submissionId" inner join form_defs on submission_defs."formDefId"=form_defs.id @@ -205,7 +206,9 @@ where submission_defs.current=true and ${odataFilter(options.filter, odataToColumnMap)} and submission_attachments.name is distinct from submission_defs."encDataAttachmentName" and submission_attachments."isClientAudit" is not true - and (form_defs."keyId" is null or form_defs."keyId" in (${keyIdCondition(keyIds)}))`); + and (form_defs."keyId" is null or form_defs."keyId" in (${keyIdCondition(keyIds)}))` +) + .then(dbStream => streamBlobs(s3, dbStream)); module.exports = { create, upsert, attach, clear, diff --git a/lib/model/query/submissions.js b/lib/model/query/submissions.js index 54a3df07b..588dc0e93 100644 --- a/lib/model/query/submissions.js +++ b/lib/model/query/submissions.js @@ -16,6 +16,7 @@ const { odataToColumnMap, odataSubTableToColumnMap } = require('../../data/submi const { unjoiner, extender, equals, page, updater, QueryOptions, insertMany } = require('../../util/db'); const { blankStringToNull, construct } = require('../../util/util'); const Problem = require('../../util/problem'); +const { streamEncBlobs } = require('../../util/blob'); //////////////////////////////////////////////////////////////////////////////// @@ -358,8 +359,8 @@ inner join (select id, version as "formVersion" from form_defs) as fds on fds.id=roots."formDefId" ${encrypted ? sql` left outer join (select id, "keyId" as "encKeyId" from form_defs) as form_defs on form_defs.id=submission_defs."formDefId" -left outer join (select id, content as "encData" from blobs) as blobs on blobs.id=submission_attachments."blobId"` - : sql`join (select null as "encKeyId", null as "encData") as enc on true`} +left outer join (select id, content as "encData", sha as "encSha", s3_status as "encS3Status" from blobs) as blobs on blobs.id=submission_attachments."blobId"` + : sql`join (select null as "encKeyId", null as "encData", null as "encSha", null as "encS3Status") as enc on true`} inner join (select "submissionId", (count(id) - 1) as count from submission_defs group by "submissionId") as edits @@ -376,8 +377,9 @@ ${options.orderby ? sql` ${page(options)}`; }; -const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ stream }) => +const streamForExport = (formId, draft, keyIds, options = QueryOptions.none) => ({ s3, stream }) => stream(_export(formId, draft, keyIds, options)) + .then(dbStream => streamEncBlobs(s3, dbStream)) .then(stream.map(_exportUnjoiner)); const getForExport = (formId, instanceId, draft, options = QueryOptions.none) => ({ maybeOne }) => diff --git a/lib/resources/forms.js b/lib/resources/forms.js index fc2c0f913..45633ab74 100644 --- a/lib/resources/forms.js +++ b/lib/resources/forms.js @@ -12,7 +12,8 @@ const { identity } = require('ramda'); const { Blob, Form } = require('../model/frames'); const { ensureDef } = require('../model/frame'); const { QueryOptions } = require('../util/db'); -const { isTrue, xml, blobResponse, contentDisposition, withEtag } = require('../util/http'); +const { isTrue, xml, contentDisposition, withEtag } = require('../util/http'); +const { blobResponse } = require('../util/blob'); const Problem = require('../util/problem'); const { sanitizeFieldsForOdata, setVersion } = require('../data/schema'); const { getOrNotFound, reject, resolve, rejectIf } = require('../util/promise'); @@ -34,13 +35,13 @@ const canReadForm = (auth, form) => (form.state === 'closed' : auth.canOrReject(['open_form.read', 'form.read'], form)); const streamAttachment = async (container, attachment, response) => { - const { Blobs, Datasets, Entities } = container; + const { s3, Blobs, Datasets, Entities } = container; if (attachment.blobId == null && attachment.datasetId == null) { return reject(Problem.user.notFound()); } else if (attachment.blobId != null) { const blob = await Blobs.getById(attachment.blobId).then(getOrNotFound); - return blobResponse(attachment.name, blob); + return blobResponse(s3, attachment.name, blob); } else { const dataset = await Datasets.getById(attachment.datasetId, true).then(getOrNotFound); const properties = await Datasets.getProperties(attachment.datasetId); @@ -263,7 +264,7 @@ module.exports = (service, endpoint) => { // we could move this up a scope to save a couple instantiations, but really it's // not that expensive and it reads more easily here. - const getXls = (extension) => endpoint(({ Blobs, Forms }, { params, auth }) => + const getXls = (extension) => endpoint(({ s3, Blobs, Forms }, { params, auth }) => getInstance(Forms, params) .then((form) => canReadForm(auth, form)) .then((form) => ((form.def.xlsBlobId == null) @@ -271,7 +272,7 @@ module.exports = (service, endpoint) => { : Blobs.getById(form.def.xlsBlobId) .then(getOrNotFound) .then(rejectIf(((blob) => blob.contentType !== excelMimeTypes[extension]), noargs(Problem.user.notFound))) - .then((blob) => blobResponse(`${form.xmlFormId}.${extension}`, blob))))); + .then((blob) => blobResponse(s3, `${form.xmlFormId}.${extension}`, blob))))); service.get(`${base}.xls`, getXls('xls')); service.get(`${base}.xlsx`, getXls('xlsx')); diff --git a/lib/resources/odata.js b/lib/resources/odata.js index efdf3b1fc..b1de3567f 100644 --- a/lib/resources/odata.js +++ b/lib/resources/odata.js @@ -50,7 +50,7 @@ module.exports = (service, endpoint) => { getForm(Forms, auth, params) .then((form) => Promise.all([ Forms.getFields(form.def.id).then(selectFields(query, getTableFromOriginalUrl(originalUrl))), - Submissions.getForExport(form.id, getUuid(params.uuid), draft).then(getOrNotFound) + Submissions.getForExport(form.id, getUuid(params.uuid), draft).then(getOrNotFound), // may require s3 blob handling ]) .then(([fields, row]) => singleRowToOData(fields, row, env.domain, originalUrl, query)))); diff --git a/lib/resources/submissions.js b/lib/resources/submissions.js index dca711f25..f3d1eaa7e 100644 --- a/lib/resources/submissions.js +++ b/lib/resources/submissions.js @@ -15,7 +15,8 @@ const { ensureDef } = require('../model/frame'); const { createdMessage } = require('../formats/openrosa'); const { getOrNotFound, getOrReject, rejectIf, reject } = require('../util/promise'); const { QueryOptions } = require('../util/db'); -const { success, xml, isFalse, contentDisposition, blobResponse, redirect, url } = require('../util/http'); +const { success, xml, isFalse, contentDisposition, redirect, url } = require('../util/http'); +const { blobResponse } = require('../util/blob'); const Problem = require('../util/problem'); const { streamBriefcaseCsvs } = require('../data/briefcase'); const { streamAttachments } = require('../data/attachments'); @@ -407,11 +408,11 @@ module.exports = (service, endpoint) => { getOrRedirect(Forms, Submissions, context) .then(([ form, submission ]) => SubmissionAttachments.getCurrentForSubmissionId(form.id, submission.id, draft)))); - service.get(`${base}/:instanceId/attachments/:name`, endpoint(({ Forms, Submissions, SubmissionAttachments }, context) => + service.get(`${base}/:instanceId/attachments/:name`, endpoint(({ s3, Forms, Submissions, SubmissionAttachments }, context) => getOrRedirect(Forms, Submissions, context) .then(([ form, submission ]) => SubmissionAttachments.getCurrentBlobByIds(form.id, submission.id, context.params.name, draft)) .then(getOrNotFound) - .then((blob) => blobResponse(context.params.name, blob)))); + .then((blob) => blobResponse(s3, context.params.name, blob)))); // TODO: wow audit-logging this is expensive. service.post( @@ -490,7 +491,7 @@ module.exports = (service, endpoint) => { ])) .then(([ atts ]) => atts))); - service.get(`${base}/:rootId/versions/:instanceId/attachments/:name`, endpoint(({ Forms, Submissions, SubmissionAttachments }, { params, auth }) => + service.get(`${base}/:rootId/versions/:instanceId/attachments/:name`, endpoint(({ s3, Forms, Submissions, SubmissionAttachments }, { params, auth }) => getForm(params, Forms) .then(auth.canOrReject('submission.read')) .then((form) => Promise.all([ @@ -498,7 +499,7 @@ module.exports = (service, endpoint) => { .then(getOrNotFound), Submissions.verifyVersion(form.id, params.rootId, params.instanceId, draft) ])) - .then(([ blob ]) => blobResponse(params.name, blob)))); + .then(([ blob ]) => blobResponse(s3, params.name, blob)))); //////////////////////////////////////////////////////////////////////////////// // Diffs between all versions of a submission diff --git a/lib/task/s3.js b/lib/task/s3.js new file mode 100644 index 000000000..565dd1236 --- /dev/null +++ b/lib/task/s3.js @@ -0,0 +1,54 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const { task: { withContainer } } = require('./task'); + +/* eslint-disable no-console */ + +const assertEnabled = s3 => { + if (!s3.enabled) { + throw new Error('S3 blob support is not enabled.'); + } +}; + +const getCount = withContainer(({ s3, Blobs }) => async status => { + assertEnabled(s3); + const count = await Blobs.s3CountByStatus(status); + console.log(count); + return count; // just for testing +}); + +const setFailedToPending = withContainer(({ s3, Blobs }) => async () => { + assertEnabled(s3); + const count = await Blobs.s3SetFailedToPending(); + console.log(`${count} blobs marked for re-uploading.`); +}); + +const uploadPending = withContainer(({ s3, Blobs }) => async () => { + assertEnabled(s3); + const count = await Blobs.s3CountByStatus('pending'); + + const signals = ['SIGINT', 'SIGTERM']; + + const shutdownListener = async signal => { + await s3.destroy(); + process.kill(process.pid, signal); + }; + signals.forEach(s => process.once(s, shutdownListener)); + + try { + console.log(`Uploading ${count} blobs...`); + await Blobs.s3UploadPending(); + console.log(`[${new Date().toISOString()}]`, 'Upload completed.'); + } finally { + signals.forEach(s => process.removeListener(s, shutdownListener)); + } +}); + +module.exports = { getCount, setFailedToPending, uploadPending }; diff --git a/lib/task/task.js b/lib/task/task.js index 1065da753..c5a2d1859 100644 --- a/lib/task/task.js +++ b/lib/task/task.js @@ -30,6 +30,7 @@ const env = config.get('default.env'); const { mailer } = require('../external/mail'); const mail = mailer(mergeRight(config.get('default.email'), { env })); const odkAnalytics = require('../external/odk-analytics').init(config.get('default.external.analytics')); +const s3 = require('../external/s3').init(config.get('default.external.s3blobStore')); //////////////////////////////////////////////////////////////////////////////// @@ -46,7 +47,7 @@ const task = { // not thread-safe! but we don't have threads.. withContainer: (taskdef) => (...args) => { const needsContainer = (task._container == null); - if (needsContainer) task._container = container.withDefaults({ db: slonikPool(config.get('default.database')), env, mail, task: true, odkAnalytics }); + if (needsContainer) task._container = container.withDefaults({ db: slonikPool(config.get('default.database')), env, mail, task: true, odkAnalytics, s3 }); const result = taskdef(task._container)(...args); diff --git a/lib/util/blob.js b/lib/util/blob.js new file mode 100644 index 000000000..099f5aa14 --- /dev/null +++ b/lib/util/blob.js @@ -0,0 +1,70 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +const { Transform } = require('stream'); +const { PartialPipe } = require('./stream'); +const { contentDisposition, redirect, withEtag } = require('./http'); + +function streamEncBlobs(s3, inStream) { + return PartialPipe.of(inStream, new Transform({ + objectMode: true, + transform({ row: { blobId, encSha, encS3Status, ...row } }, _, done) { + if (encS3Status !== 'uploaded') return done(null, { row }); + + return s3.getContentFor({ id: blobId, sha: encSha }) + .then(encData => done(null, { row: { ...row, encData } })) + .catch(done); + }, + })); +} + +function streamBlobs(s3, inStream) { + return PartialPipe.of(inStream, new Transform({ + objectMode: true, + transform({ row }, _, done) { + const { blobId, sha, s3_status } = row; + + if (s3_status !== 'uploaded') return done(null, { row }); // eslint-disable-line camelcase + + return s3.getContentFor({ id: blobId, sha }) + .then(content => done(null, { row: { ...row, content } })) + .catch(done); + }, + })); +} + +function blobContent(s3, blob) { + if (blob.s3_status === 'uploaded') return s3.getContentFor(blob); + else return Promise.resolve(blob.content); +} + +async function blobResponse(s3, filename, blob) { + if (blob.s3_status === 'uploaded') { + // Per https://www.ietf.org/rfc/rfc9110.pdf section 13.2.1: + // + // > A server **MUST** ignore all received preconditions if its response to + // > the same request without those conditions, prior to processing the + // > request content, would have been a status code other than a 2xx + // > (Successful) or 412 (Precondition Failed). + // + // I.e. don't check the ETag header if the alternative is a 307. + return redirect(307, await s3.urlForBlob(filename, blob)); + } else { + return withEtag( + blob.md5, + () => (_, response) => { + response.set('Content-Disposition', contentDisposition(filename)); + response.set('Content-Type', blob.contentType); + return blob.content; + }, + ); + } +} + +module.exports = { blobContent, blobResponse, streamBlobs, streamEncBlobs }; diff --git a/lib/util/db.js b/lib/util/db.js index 6e096bcd1..ca60a31fd 100644 --- a/lib/util/db.js +++ b/lib/util/db.js @@ -255,6 +255,7 @@ const queryFuncs = (db, obj) => { obj.maybeOne.map = (f) => (x) => x.map(f); obj.oneFirst = (s) => db.oneFirst(s); obj.oneFirst.map = (f) => (x) => f(x); + obj.maybeOneFirst = (s) => db.maybeOneFirst(s).then(Option.of); obj.all = (s) => db.any(s); obj.all.map = (f) => (xs) => { diff --git a/lib/util/http.js b/lib/util/http.js index 994ced6e1..1536284ff 100644 --- a/lib/util/http.js +++ b/lib/util/http.js @@ -69,12 +69,6 @@ const contentDisposition = (filename) => { // content-disposition helpers //////////////////////////////////////// -const binary = (type, name, content) => (_, response) => { - response.set('Content-Disposition', contentDisposition(name)); - response.set('Content-Type', type); - return content; -}; - class Redirect { constructor(code, url) { this.code = code; this.url = url; } } const redirect = (x, y) => { if (y === undefined) { @@ -131,14 +125,9 @@ const withEtag = (serverEtag, fn) => (request, response) => { return fn(); }; -const blobResponse = (filename, blob) => withEtag( - blob.md5, - () => binary(blob.contentType, filename, blob.content), -); - module.exports = { isTrue, isFalse, urlPathname, - success, contentType, xml, atom, json, contentDisposition, blobResponse, redirect, + success, contentType, xml, atom, json, contentDisposition, redirect, urlWithQueryParams, url, withEtag }; diff --git a/lib/worker/submission.attachment.update.js b/lib/worker/submission.attachment.update.js index 812dc8145..b10c6d543 100644 --- a/lib/worker/submission.attachment.update.js +++ b/lib/worker/submission.attachment.update.js @@ -7,9 +7,10 @@ // including this file, may be copied, modified, propagated, or distributed // except according to the terms contained in the LICENSE file. +const { blobContent } = require('../util/blob'); const { parseClientAudits } = require('../data/client-audits'); -const worker = ({ ClientAudits, Blobs, Submissions, SubmissionAttachments }, event) => +const worker = ({ s3, ClientAudits, Blobs, Submissions, SubmissionAttachments }, event) => Promise.all([ Submissions.getDefById(event.details.submissionDefId), SubmissionAttachments.getBySubmissionDefIdAndName(event.details.submissionDefId, event.details.name) @@ -23,8 +24,9 @@ const worker = ({ ClientAudits, Blobs, Submissions, SubmissionAttachments }, eve .then((exists) => ((exists === true) ? null // do nothing : Blobs.getById(attachment.blobId) - .then((maybeBlob) => maybeBlob.get()) // blobs are immutable - .then((blob) => parseClientAudits(blob.content)) + .then((maybeBlob) => maybeBlob.get()) + .then(blob => blobContent(s3, blob)) + .then(parseClientAudits) .then((audits) => { const withBlobIds = audits.map((audit) => audit.with({ blobId: attachment.blobId })); return ClientAudits.createMany(withBlobIds); diff --git a/package-lock.json b/package-lock.json index 970502797..88683c88f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -26,6 +26,7 @@ "htmlparser2": "~3.9", "knex": "~0.21", "luxon": "~0.3", + "minio": "^7.1.3", "morgan": "~1.9", "multer": "^1.4.5-lts.1", "mustache": "~2.3", @@ -1470,6 +1471,12 @@ "undici-types": "~5.26.4" } }, + "node_modules/@zxing/text-encoding": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz", + "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==", + "optional": true + }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -1899,6 +1906,20 @@ "node": ">= 4.5.0" } }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -2012,6 +2033,14 @@ "node": ">= 0.8.0" } }, + "node_modules/block-stream2": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/block-stream2/-/block-stream2-2.1.0.tgz", + "integrity": "sha512-suhjmLI57Ewpmq00qaygS8UgEq2ly2PCItenIyhMqVjo4t4pGzqMvfgJuX8iWTeSDdfSSqS6j38fL4ToNL7Pfg==", + "dependencies": { + "readable-stream": "^3.4.0" + } + }, "node_modules/bluebird": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", @@ -2089,6 +2118,11 @@ "node": ">=8" } }, + "node_modules/browser-or-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-2.1.1.tgz", + "integrity": "sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==" + }, "node_modules/browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", @@ -2269,12 +2303,18 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2997,6 +3037,22 @@ "node": ">=10" } }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/define-properties": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", @@ -3280,6 +3336,25 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-shim-unscopables": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", @@ -4077,6 +4152,27 @@ "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true }, + "node_modules/fast-xml-parser": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, "node_modules/fastq": { "version": "1.15.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", @@ -4132,6 +4228,14 @@ "node": ">=8" } }, + "node_modules/filter-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", + "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -4398,6 +4502,14 @@ } } }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, "node_modules/for-in": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", @@ -4574,9 +4686,12 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/function.prototype.name": { "version": "1.1.5", @@ -4643,13 +4758,18 @@ } }, "node_modules/get-intrinsic": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz", - "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4894,6 +5014,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/got": { "version": "13.0.0", "resolved": "https://registry.npmjs.org/got/-/got-13.0.0.tgz", @@ -4959,11 +5090,22 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4981,12 +5123,11 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -5083,6 +5224,17 @@ "node": ">=8" } }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -5558,6 +5710,21 @@ "node": ">=0.10.0" } }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", @@ -5606,7 +5773,6 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true, "engines": { "node": ">= 0.4" }, @@ -5700,7 +5866,6 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", - "dev": true, "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -5867,6 +6032,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "dependencies": { + "which-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -6145,6 +6324,11 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, + "node_modules/json-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-stream/-/json-stream-1.0.0.tgz", + "integrity": "sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==" + }, "node_modules/json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -6744,6 +6928,38 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, + "node_modules/minio": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minio/-/minio-7.1.3.tgz", + "integrity": "sha512-xPrLjWkTT5E7H7VnzOjF//xBp9I40jYB4aWhb2xTFopXXfw+Wo82DDWngdUju7Doy3Wk7R8C4LAgwhLHHnf0wA==", + "dependencies": { + "async": "^3.2.4", + "block-stream2": "^2.1.0", + "browser-or-node": "^2.1.1", + "buffer-crc32": "^0.2.13", + "fast-xml-parser": "^4.2.2", + "ipaddr.js": "^2.0.1", + "json-stream": "^1.0.0", + "lodash": "^4.17.21", + "mime-types": "^2.1.35", + "query-string": "^7.1.3", + "through2": "^4.0.2", + "web-encoding": "^1.1.5", + "xml": "^1.0.1", + "xml2js": "^0.5.0" + }, + "engines": { + "node": "^16 || ^18 || >=20" + } + }, + "node_modules/minio/node_modules/ipaddr.js": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", + "engines": { + "node": ">= 10" + } + }, "node_modules/minipass": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", @@ -8731,6 +8947,14 @@ "node": ">=0.10.0" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -8950,6 +9174,23 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/query-string": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", + "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "dependencies": { + "decode-uri-component": "^0.2.2", + "filter-obj": "^1.1.0", + "split-on-first": "^1.0.0", + "strict-uri-encode": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -9524,6 +9765,22 @@ "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==", "dev": true }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/set-value": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", @@ -10093,6 +10350,14 @@ "node": ">=8" } }, + "node_modules/split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", + "engines": { + "node": ">=6" + } + }, "node_modules/split-string": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", @@ -10265,6 +10530,14 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/strict-uri-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", + "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==", + "engines": { + "node": ">=4" + } + }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -10370,6 +10643,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, "node_modules/superagent": { "version": "8.0.9", "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.0.9.tgz", @@ -11015,6 +11293,18 @@ "resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz", "integrity": "sha512-4+wkEYLBbWxqTahEsWrhxepcoVOJ+1z5PGIjPZxRkytcdSUaNjIjBM7Xn8E+pdSuV7SzvWovBFA54FO0JSoqhA==" }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -11082,6 +11372,17 @@ "lodash": "^4.17.14" } }, + "node_modules/web-encoding": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz", + "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==", + "dependencies": { + "util": "^0.12.3" + }, + "optionalDependencies": { + "@zxing/text-encoding": "0.9.0" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", @@ -11133,6 +11434,24 @@ "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==", "dev": true }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/wide-align": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", @@ -11220,8 +11539,27 @@ "node_modules/xml": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz", - "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", - "dev": true + "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==" + }, + "node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } }, "node_modules/xtend": { "version": "4.0.2", @@ -12451,6 +12789,12 @@ "undici-types": "~5.26.4" } }, + "@zxing/text-encoding": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz", + "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==", + "optional": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -12475,8 +12819,7 @@ "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "requires": {} + "dev": true }, "agent-base": { "version": "6.0.2", @@ -12781,6 +13124,14 @@ "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" }, + "available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "requires": { + "possible-typed-array-names": "^1.0.0" + } + }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -12857,6 +13208,14 @@ "resolved": "https://registry.npmjs.org/blessed/-/blessed-0.1.81.tgz", "integrity": "sha512-LoF5gae+hlmfORcG1M5+5XZi4LBmvlXTzwJWzUlPryN/SJdSflZvROM2TwkT0GMpq7oqT48NRd4GS7BiVBc5OQ==" }, + "block-stream2": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/block-stream2/-/block-stream2-2.1.0.tgz", + "integrity": "sha512-suhjmLI57Ewpmq00qaygS8UgEq2ly2PCItenIyhMqVjo4t4pGzqMvfgJuX8iWTeSDdfSSqS6j38fL4ToNL7Pfg==", + "requires": { + "readable-stream": "^3.4.0" + } + }, "bluebird": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", @@ -12923,6 +13282,11 @@ "fill-range": "^7.0.1" } }, + "browser-or-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-2.1.1.tgz", + "integrity": "sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==" + }, "browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", @@ -13043,12 +13407,15 @@ } }, "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" } }, "callsites": { @@ -13597,6 +13964,16 @@ "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "dev": true }, + "define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + } + }, "define-properties": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", @@ -13818,6 +14195,19 @@ "unbox-primitive": "^1.0.2" } }, + "es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "requires": { + "get-intrinsic": "^1.2.4" + } + }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" + }, "es-shim-unscopables": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", @@ -14429,6 +14819,14 @@ "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true }, + "fast-xml-parser": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", + "requires": { + "strnum": "^1.0.5" + } + }, "fastq": { "version": "1.15.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", @@ -14478,6 +14876,11 @@ "to-regex-range": "^5.0.1" } }, + "filter-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", + "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==" + }, "finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -14680,6 +15083,14 @@ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==" }, + "for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "requires": { + "is-callable": "^1.1.3" + } + }, "for-in": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", @@ -14801,9 +15212,9 @@ "optional": true }, "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" }, "function.prototype.name": { "version": "1.1.5", @@ -14852,13 +15263,15 @@ "dev": true }, "get-intrinsic": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz", - "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" } }, "get-package-type": { @@ -15041,6 +15454,14 @@ "define-properties": "^1.1.3" } }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "requires": { + "get-intrinsic": "^1.1.3" + } + }, "got": { "version": "13.0.0", "resolved": "https://registry.npmjs.org/got/-/got-13.0.0.tgz", @@ -15091,25 +15512,29 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "requires": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" } }, + "has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==" + }, "has-symbols": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" }, "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", - "dev": true, + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "requires": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" } }, "has-unicode": { @@ -15182,6 +15607,14 @@ } } }, + "hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "requires": { + "function-bind": "^1.1.2" + } + }, "he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -15542,6 +15975,15 @@ "kind-of": "^6.0.0" } }, + "is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, "is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", @@ -15577,8 +16019,7 @@ "is-callable": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", - "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", - "dev": true + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" }, "is-circular": { "version": "1.0.2", @@ -15642,7 +16083,6 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", - "dev": true, "requires": { "has-tostringtag": "^1.0.0" } @@ -15746,6 +16186,14 @@ "has-symbols": "^1.0.2" } }, + "is-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "requires": { + "which-typed-array": "^1.1.14" + } + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -15965,6 +16413,11 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, + "json-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-stream/-/json-stream-1.0.0.tgz", + "integrity": "sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==" + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -16411,6 +16864,34 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, + "minio": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minio/-/minio-7.1.3.tgz", + "integrity": "sha512-xPrLjWkTT5E7H7VnzOjF//xBp9I40jYB4aWhb2xTFopXXfw+Wo82DDWngdUju7Doy3Wk7R8C4LAgwhLHHnf0wA==", + "requires": { + "async": "^3.2.4", + "block-stream2": "^2.1.0", + "browser-or-node": "^2.1.1", + "buffer-crc32": "^0.2.13", + "fast-xml-parser": "^4.2.2", + "ipaddr.js": "^2.0.1", + "json-stream": "^1.0.0", + "lodash": "^4.17.21", + "mime-types": "^2.1.35", + "query-string": "^7.1.3", + "through2": "^4.0.2", + "web-encoding": "^1.1.5", + "xml": "^1.0.1", + "xml2js": "^0.5.0" + }, + "dependencies": { + "ipaddr.js": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==" + } + } + }, "minipass": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", @@ -17568,8 +18049,7 @@ "pg-cursor": { "version": "2.7.4", "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.7.4.tgz", - "integrity": "sha512-CNWwOzTTZ9QvphoOL+Wg/7pmVr9GnAWBjPbuK2FRclrB4A/WRO/ssCJ9BlkzIGmmofK2M/LyokNHgsLSn+fMHA==", - "requires": {} + "integrity": "sha512-CNWwOzTTZ9QvphoOL+Wg/7pmVr9GnAWBjPbuK2FRclrB4A/WRO/ssCJ9BlkzIGmmofK2M/LyokNHgsLSn+fMHA==" }, "pg-int8": { "version": "1.0.1", @@ -17579,8 +18059,7 @@ "pg-pool": { "version": "3.5.2", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", - "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", - "requires": {} + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==" }, "pg-protocol": { "version": "1.5.0", @@ -17897,6 +18376,11 @@ "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", "integrity": "sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==" }, + "possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==" + }, "postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -18064,6 +18548,17 @@ "side-channel": "^1.0.4" } }, + "query-string": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", + "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "requires": { + "decode-uri-component": "^0.2.2", + "filter-obj": "^1.1.0", + "split-on-first": "^1.0.0", + "strict-uri-encode": "^2.0.0" + } + }, "querystringify": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", @@ -18477,6 +18972,19 @@ "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==", "dev": true }, + "set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "requires": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + } + }, "set-value": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", @@ -18932,6 +19440,11 @@ "which": "^2.0.1" } }, + "split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==" + }, "split-string": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", @@ -19070,6 +19583,11 @@ } } }, + "strict-uri-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", + "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==" + }, "string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -19142,6 +19660,11 @@ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true }, + "strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, "superagent": { "version": "8.0.9", "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.0.9.tgz", @@ -19625,6 +20148,18 @@ "resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz", "integrity": "sha512-4+wkEYLBbWxqTahEsWrhxepcoVOJ+1z5PGIjPZxRkytcdSUaNjIjBM7Xn8E+pdSuV7SzvWovBFA54FO0JSoqhA==" }, + "util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "requires": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -19679,6 +20214,15 @@ } } }, + "web-encoding": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz", + "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==", + "requires": { + "@zxing/text-encoding": "0.9.0", + "util": "^0.12.3" + } + }, "webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", @@ -19721,6 +20265,18 @@ "integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==", "dev": true }, + "which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "requires": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + } + }, "wide-align": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", @@ -19779,14 +20335,26 @@ "ws": { "version": "7.4.6", "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", - "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==", - "requires": {} + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==" }, "xml": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz", - "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", - "dev": true + "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==" + }, + "xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" }, "xtend": { "version": "4.0.2", diff --git a/package.json b/package.json index bd8edc899..fb3a7ad32 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "htmlparser2": "~3.9", "knex": "~0.21", "luxon": "~0.3", + "minio": "^7.1.3", "morgan": "~1.9", "multer": "^1.4.5-lts.1", "mustache": "~2.3", diff --git a/test/e2e/soak/.eslintrc.js b/test/e2e/.eslintrc.js similarity index 100% rename from test/e2e/soak/.eslintrc.js rename to test/e2e/.eslintrc.js diff --git a/test/e2e/s3/run-tests.sh b/test/e2e/s3/run-tests.sh new file mode 100755 index 000000000..54e5a1778 --- /dev/null +++ b/test/e2e/s3/run-tests.sh @@ -0,0 +1,71 @@ +#!/bin/bash -eu +set -o pipefail + +serverUrl="http://localhost:8383" +userEmail="x@example.com" +userPassword="secret1234" + +log() { echo "[test/e2e/s3/run-tests] $*"; } + +cleanup() { + if [[ -n "${_cleanupStarted-}" ]]; then return; fi + _cleanupStarted=1 # track to prevent recursive cleanup + + log "Cleaning up background service(s); ignore subsequent errors." + set +eo pipefail + kill -- -$$ +} +trap cleanup EXIT SIGINT SIGTERM SIGHUP + +if curl -s -o /dev/null $serverUrl; then + log "!!! Error: server already running at: $serverUrl" + exit 1 +fi + +make base + +if [[ "${CI-}" = '' ]]; then + set +e +fi + +log "Attempting to create user..." +echo "$userPassword" | node ./lib/bin/cli.js user-create -u "$userEmail" && log "User created." +log "Attempting to promote user..." +node ./lib/bin/cli.js user-promote -u "$userEmail" && log "User promoted." + +if [[ "${CI-}" = '' ]]; then + set -e + cat < to continue... + +EOF + read -rp '' +fi + +NODE_CONFIG_ENV=s3-dev node lib/bin/s3-create-bucket.js +NODE_CONFIG_ENV=s3-dev make run & +serverPid=$! + +log 'Waiting for backend to start...' +timeout 30 bash -c "while ! curl -s -o /dev/null $serverUrl; do sleep 1; done" +log 'Backend started!' + +cd test/e2e/s3 +npx mocha test.js + +if ! curl -s -o /dev/null "$serverUrl"; then + log '!!! Backend died.' + exit 1 +fi + +log "Tests completed OK." diff --git a/test/e2e/s3/test-forms/.gitignore b/test/e2e/s3/test-forms/.gitignore new file mode 100644 index 000000000..b71093d7a --- /dev/null +++ b/test/e2e/s3/test-forms/.gitignore @@ -0,0 +1 @@ +/*-attachments/big-*.bin diff --git a/test/e2e/s3/test-forms/1-attachments/a.jpg b/test/e2e/s3/test-forms/1-attachments/a.jpg new file mode 100644 index 000000000..625873b9e Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/a.jpg differ diff --git a/test/e2e/s3/test-forms/1-attachments/angry.png b/test/e2e/s3/test-forms/1-attachments/angry.png new file mode 100644 index 000000000..aa85398a4 Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/angry.png differ diff --git a/test/e2e/s3/test-forms/1-attachments/b.jpg b/test/e2e/s3/test-forms/1-attachments/b.jpg new file mode 100644 index 000000000..47ece14e0 Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/b.jpg differ diff --git a/test/e2e/s3/test-forms/1-attachments/body.svg b/test/e2e/s3/test-forms/1-attachments/body.svg new file mode 100644 index 000000000..77dbe0120 --- /dev/null +++ b/test/e2e/s3/test-forms/1-attachments/body.svg @@ -0,0 +1 @@ + diff --git a/test/e2e/s3/test-forms/1-attachments/c.jpg b/test/e2e/s3/test-forms/1-attachments/c.jpg new file mode 100644 index 000000000..ec87704cd Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/c.jpg differ diff --git a/test/e2e/s3/test-forms/1-attachments/d.jpg b/test/e2e/s3/test-forms/1-attachments/d.jpg new file mode 100644 index 000000000..c029b4052 Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/d.jpg differ diff --git a/test/e2e/s3/test-forms/1-attachments/famous.png b/test/e2e/s3/test-forms/1-attachments/famous.png new file mode 100644 index 000000000..5f07ad9eb Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/famous.png differ diff --git a/test/e2e/s3/test-forms/1-attachments/neutral.png b/test/e2e/s3/test-forms/1-attachments/neutral.png new file mode 100644 index 000000000..f8114e95e Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/neutral.png differ diff --git a/test/e2e/s3/test-forms/1-attachments/sad.png b/test/e2e/s3/test-forms/1-attachments/sad.png new file mode 100644 index 000000000..afc31dcb0 Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/sad.png differ diff --git a/test/e2e/s3/test-forms/1-attachments/smile.png b/test/e2e/s3/test-forms/1-attachments/smile.png new file mode 100644 index 000000000..743682116 Binary files /dev/null and b/test/e2e/s3/test-forms/1-attachments/smile.png differ diff --git a/test/e2e/s3/test-forms/1.xml b/test/e2e/s3/test-forms/1.xml new file mode 100644 index 000000000..73e0e9bbb --- /dev/null +++ b/test/e2e/s3/test-forms/1.xml @@ -0,0 +1,108 @@ + + + + Blob Test 1 + + + + + A + jr://images/a.jpg + + + Angry + jr://images/angry.png + + + B + jr://images/b.jpg + + + Big Bin 1 + jr://images/big-1.bin + + + Body Svg + jr://images/body.svg + + + C + jr://images/c.jpg + + + D + jr://images/d.jpg + + + Famous + jr://images/famous.png + + + Neutral + jr://images/neutral.png + + + Sad + jr://images/sad.png + + + Smile + jr://images/smile.png + + + + + + + + + + + + + + + + + image-a + a + + + image-b + b + + + image-c + c + + + image-d + d + + + + + + + + + + + + + + image type with draw appearance + + + + file type with no appearance <br/> WARNING: any kind of file could be uploaded including files that contain viruses or other malware. Be sure to take proper precautions when downloading files from server. + + + diff --git a/test/e2e/s3/test-forms/2.xml b/test/e2e/s3/test-forms/2.xml new file mode 100644 index 000000000..af0797d1a --- /dev/null +++ b/test/e2e/s3/test-forms/2.xml @@ -0,0 +1,25 @@ + + + + Blob Test 2 + + + + + Big Bin 1 + jr://images/big-1.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test-forms/3-attachments/a.jpg b/test/e2e/s3/test-forms/3-attachments/a.jpg new file mode 100644 index 000000000..aff54b47b Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/a.jpg differ diff --git a/test/e2e/s3/test-forms/3-attachments/angry.png b/test/e2e/s3/test-forms/3-attachments/angry.png new file mode 100644 index 000000000..856408b76 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/angry.png differ diff --git a/test/e2e/s3/test-forms/3-attachments/b.jpg b/test/e2e/s3/test-forms/3-attachments/b.jpg new file mode 100644 index 000000000..94ba0f1ed Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/b.jpg differ diff --git a/test/e2e/s3/test-forms/3-attachments/body.svg b/test/e2e/s3/test-forms/3-attachments/body.svg new file mode 100644 index 000000000..67de9a259 --- /dev/null +++ b/test/e2e/s3/test-forms/3-attachments/body.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/test/e2e/s3/test-forms/3-attachments/c.jpg b/test/e2e/s3/test-forms/3-attachments/c.jpg new file mode 100644 index 000000000..88124a67f Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/c.jpg differ diff --git a/test/e2e/s3/test-forms/3-attachments/d.jpg b/test/e2e/s3/test-forms/3-attachments/d.jpg new file mode 100644 index 000000000..ac3991746 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/d.jpg differ diff --git a/test/e2e/s3/test-forms/3-attachments/famous.png b/test/e2e/s3/test-forms/3-attachments/famous.png new file mode 100644 index 000000000..43a2b0805 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/famous.png differ diff --git a/test/e2e/s3/test-forms/3-attachments/neutral.png b/test/e2e/s3/test-forms/3-attachments/neutral.png new file mode 100644 index 000000000..d39e13078 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/neutral.png differ diff --git a/test/e2e/s3/test-forms/3-attachments/sad.png b/test/e2e/s3/test-forms/3-attachments/sad.png new file mode 100644 index 000000000..d22fefbe9 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/sad.png differ diff --git a/test/e2e/s3/test-forms/3-attachments/smile.png b/test/e2e/s3/test-forms/3-attachments/smile.png new file mode 100644 index 000000000..8be6dbe46 Binary files /dev/null and b/test/e2e/s3/test-forms/3-attachments/smile.png differ diff --git a/test/e2e/s3/test-forms/3.xml b/test/e2e/s3/test-forms/3.xml new file mode 100644 index 000000000..97845bcca --- /dev/null +++ b/test/e2e/s3/test-forms/3.xml @@ -0,0 +1,108 @@ + + + + Blob Test 3 + + + + + A + jr://images/a.jpg + + + Angry + jr://images/angry.png + + + B + jr://images/b.jpg + + + Big Bin 1 + jr://images/big-1.bin + + + Body Svg + jr://images/body.svg + + + C + jr://images/c.jpg + + + D + jr://images/d.jpg + + + Famous + jr://images/famous.png + + + Neutral + jr://images/neutral.png + + + Sad + jr://images/sad.png + + + Smile + jr://images/smile.png + + + + + + + + + + + + + + + + + image-a + a + + + image-b + b + + + image-c + c + + + image-d + d + + + + + + + + + + + + + + image type with draw appearance + + + + file type with no appearance <br/> WARNING: any kind of file could be uploaded including files that contain viruses or other malware. Be sure to take proper precautions when downloading files from server. + + + diff --git a/test/e2e/s3/test-forms/4.xml b/test/e2e/s3/test-forms/4.xml new file mode 100644 index 000000000..a065ffe44 --- /dev/null +++ b/test/e2e/s3/test-forms/4.xml @@ -0,0 +1,25 @@ + + + + Blob Test 4 + + + + + Big Bin 1 + jr://images/big-1.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test-forms/5.xml b/test/e2e/s3/test-forms/5.xml new file mode 100644 index 000000000..f91ff8f17 --- /dev/null +++ b/test/e2e/s3/test-forms/5.xml @@ -0,0 +1,25 @@ + + + + Blob Test 5 + + + + + Big Bin 1 + jr://images/big-1.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test-forms/6.xml b/test/e2e/s3/test-forms/6.xml new file mode 100644 index 000000000..4cd194eca --- /dev/null +++ b/test/e2e/s3/test-forms/6.xml @@ -0,0 +1,25 @@ + + + + Blob Test 6 + + + + + Big Bin 1 + jr://images/big-1.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test-forms/7.xml b/test/e2e/s3/test-forms/7.xml new file mode 100644 index 000000000..bdcc7c02f --- /dev/null +++ b/test/e2e/s3/test-forms/7.xml @@ -0,0 +1,29 @@ + + + + Blob Test 7 + + + + + Big Bin 1 + jr://images/big-1.bin + + + Big Bin 2 + jr://images/big-2.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test-forms/8-attachments/tiny-1.bin b/test/e2e/s3/test-forms/8-attachments/tiny-1.bin new file mode 100644 index 000000000..d00491fd7 --- /dev/null +++ b/test/e2e/s3/test-forms/8-attachments/tiny-1.bin @@ -0,0 +1 @@ +1 diff --git a/test/e2e/s3/test-forms/8-attachments/tiny-2.bin b/test/e2e/s3/test-forms/8-attachments/tiny-2.bin new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/test/e2e/s3/test-forms/8-attachments/tiny-2.bin @@ -0,0 +1 @@ +2 diff --git a/test/e2e/s3/test-forms/8.xml b/test/e2e/s3/test-forms/8.xml new file mode 100644 index 000000000..21346af2f --- /dev/null +++ b/test/e2e/s3/test-forms/8.xml @@ -0,0 +1,29 @@ + + + + Blob Test 8 + + + + + Tiny Bin 1 + jr://images/tiny-1.bin + + + Tiny Bin 2 + jr://images/tiny-2.bin + + + + + + + + + + + + + + + diff --git a/test/e2e/s3/test.js b/test/e2e/s3/test.js new file mode 100644 index 000000000..e79aa8024 --- /dev/null +++ b/test/e2e/s3/test.js @@ -0,0 +1,461 @@ +// Copyright 2024 ODK Central Developers +// See the NOTICE file at the top-level directory of this distribution and at +// https://github.com/getodk/central-backend/blob/master/NOTICE. +// This file is part of ODK Central. It is subject to the license terms in +// the LICENSE file found in the top-level directory of this distribution and at +// https://www.apache.org/licenses/LICENSE-2.0. No part of ODK Central, +// including this file, may be copied, modified, propagated, or distributed +// except according to the terms contained in the LICENSE file. + +/* eslint-disable func-names, no-await-in-loop, space-before-function-paren */ + +// Enough time to upload big-*.bin, and then run each test scenario. +const TIMEOUT = 240_000; // ms + +const { exec, execSync } = require('node:child_process'); +const fs = require('node:fs'); +const { randomBytes } = require('node:crypto'); +const _ = require('lodash'); +const should = require('should'); + +const SUITE_NAME = 'test/e2e/s3'; +const log = require('../util/logger')(SUITE_NAME); +const { apiClient, mimetypeFor, Redirect } = require('../util/api'); + +const serverUrl = 'http://localhost:8383'; +const userEmail = 'x@example.com'; +const userPassword = 'secret1234'; + +describe('s3 support', () => { + // eslint-disable-next-line one-var, one-var-declaration-per-line + let api, expectedAttachments, actualAttachments, projectId, xmlFormId, attDir; + let _initial, _minioTerminated; // eslint-disable-line one-var, one-var-declaration-per-line + + const minioTerminated = () => { + if(_minioTerminated) return; + + // It should be possible to use docker more precisely here, e.g. + // docker stop $(docker ps --quiet --filter "ancestor=minio/minio") + // However, the ancestor filter requries specifying the exact tag used. + // See: https://docs.docker.com/reference/cli/docker/container/ls/#ancestor + execSync(`docker ps | awk '/minio/ { print $1 }' | xargs docker kill`); + _minioTerminated = true; + }; + + beforeEach(async function() { + this.timeout(5000); + _initial = await countAllByStatus(); + }); + + afterEach(async function() { + if(_minioTerminated) return; + + this.timeout(TIMEOUT); + await cli('reset-failed-to-pending'); + await cli('upload-pending'); + }); + + async function setup(testNumber, opts={ bigFiles: 1 }) { + attDir = `./test-forms/${testNumber}-attachments`; + + // given + fs.mkdirSync(attDir, { recursive:true }); + for(let idx=0; idx !f.startsWith('.')).sort(); + api = await apiClient(SUITE_NAME, { serverUrl, userEmail, userPassword }); + projectId = await createProject(); + xmlFormId = await uploadFormWithAttachments(`./test-forms/${testNumber}.xml`, attDir); + + // when + actualAttachments = await api.apiGet(`projects/${projectId}/forms/${xmlFormId}/attachments`); + should.deepEqual(actualAttachments.map(a => a.name).sort(), expectedAttachments); + + // then + await assertNewStatuses({ pending: expectedAttachments.length }); + // and + await assertNoneRedirect(actualAttachments); + } + + it('should shift submission attachments to s3', async function() { + this.timeout(TIMEOUT); + + // given + await setup(1); + await assertNewStatuses({ pending: 11 }); + + // when + await cli('upload-pending'); + + // then + await assertNewStatuses({ uploaded: 11 }); + // and + await assertAllRedirect(actualAttachments); + await assertAllDownloadsMatchOriginal(actualAttachments); + }); + + it('should continue to serve blobs while upload-pending is running', async function() { + this.timeout(TIMEOUT); + + // given + await setup(2); + await assertNewStatuses({ pending: 1 }); + + // when + const uploading = cli('upload-pending'); + await untilUploadInProgress(); + + // then + const res = await api.apiRawGet(`projects/${projectId}/forms/${xmlFormId}/attachments/big-1.bin`); + await assertDownloadMatchesOriginal(res, 'big-1.bin'); + + // cleanup + await uploading; + }); + + it('should gracefully handle simultaneous calls to upload-pending', async function() { + this.timeout(TIMEOUT); + + const uploadPending = async () => { + const start = performance.now(); + const stdout = await cli('upload-pending'); + const duration = performance.now() - start; + const parsedHashes = hashes(stdout); + return { hashes:parsedHashes, duration }; + }; + + // given + await setup(3); + await assertNewStatuses({ pending: 11 }); + + // given + const uploading1 = uploadPending(); + const uploading2 = uploadPending(); + + // when + const res1 = await uploading1; + const res2 = await uploading2; + + // then + await assertNewStatuses({ uploaded: 11 }); + // and + (res1.hashes.length + res2.hashes.length).should.equal(11); + // and + _.intersection(res1.hashes, res2.hashes).length.should.equal(0); + // and + Math.abs(res1.duration - res2.duration).should.be.above(1_000, + 'UPLOAD DURATIONS TOO SIMILAR! ' + + 'There is no guarantee of which call to upload-pending got big-1.bin, ' + + `but similar durations for uploading1 (${humanDuration(res1)}) and ` + + `uploading2 (${humanDuration(res2)}) implies that one was blocking the other.`); + }); + + it('should gracefully handle upload-pending dying unexpectedly (SIGKILL)', async function() { + this.timeout(TIMEOUT); + + // given + await setup(4); + await assertNewStatuses({ pending: 1 }); + + // when + const uploading = forSacrifice(cli('upload-pending')); + await untilUploadInProgress(); + // and + await execSync(`kill -9 ${uploading.pid}`); + + // then + await expectRejectionFrom(uploading); + + // then + await assertNewStatuses({ pending: 1 }); // crashed process will roll back to pending + }); + + it('should gracefully handle upload-pending dying unexpectedly (SIGTERM)', async function() { + this.timeout(TIMEOUT); + + // given + await setup(5); + await assertNewStatuses({ pending: 1 }); + + // when + const uploading = forSacrifice(cli('upload-pending')); + await untilUploadInProgress(); + // and + await execSync(`kill ${uploading.pid}`); + + // then + await expectRejectionFrom(uploading); + + // then + await assertNewStatuses({ failed: 1 }); + }); + + it('should gracefully handle upload-pending dying unexpectedly (SIGINT)', async function() { + this.timeout(TIMEOUT); + + // given + await setup(6); + await assertNewStatuses({ pending: 1 }); + + // when + const uploading = forSacrifice(cli('upload-pending')); + await untilUploadInProgress(); + // and + await execSync(`kill -2 ${uploading.pid}`); + + // then + await expectRejectionFrom(uploading); + + // then + await assertNewStatuses({ failed: 1 }); + }); + + // N.B. THIS TEST KILLS THE MINIO SERVER, SO IT WILL NOT BE AVAILABLE TO SUBSEQUENT TESTS + it('should handle s3 connection failing', async function() { + this.timeout(TIMEOUT); + + // This also tests transaction boundaries are correct by adding two attachments, + // and making sure the first uploads successfully before killing the server. + + // given + await setup(7, { bigFiles: 2 }); + await assertNewStatuses({ pending: 2 }); + + // when + const uploading = forSacrifice(cli('upload-pending')); + while(true) { // eslint-disable-line no-constant-condition + const uploaded = await countByNewStatus('uploaded'); + if(uploaded === 0) { + await tick(); + continue; + } + if(uploaded === 1) break; + else should.fail('Too many blobs uploaded already!'); + } + await untilUploadInProgress(); + // and + minioTerminated(); + + // then + // N.B. These errors are as observed, and demonstrate that the root error is shared + // with the user. They are not something to try to retain if implementation changes. + await expectRejectionFrom(uploading, new RegExp( + 'Command failed: exec node lib/bin/s3 upload-pending\n' + + '(AggregateError\n.*)?Error: (connect ECONNREFUSED|read ECONNRESET|socket hang up|write EPIPE)', + 's', + )); + // and + await assertNewStatuses({ uploaded: 1, failed: 1 }); + }); + + it('should handle s3 unavailable', async function() { + this.timeout(TIMEOUT); + + // given + minioTerminated(); + // and + await setup(8, { bigFiles: 0 }); + await assertNewStatuses({ pending: 2 }); + + // when + await expectRejectionFrom(forSacrifice(cli('upload-pending')), /Error: connect ECONNREFUSED/); + + // then + await assertNewStatuses({ pending: 1, failed: 1 }); + }); + + // Guard against a Promise resolving when it was expected to reject. This has + // specifically been seen when upload-pending returns immediately, but later + // test code is expecting it to spend time uploading. In those cases, this + // wrapper allows for faster failure - without this short-circuit, the test + // would have to wait for the maximum timeout duration. + function forSacrifice(cliPromise) { + const wrapper = cliPromise.then(res => { + // TODO there may be a more idiomatic way to quickly fail the test from + // within mocha, but this achieves the desired result: + console.log(new Error(`FATAL ERROR: promise should have failed, but it resolved successfully with: <${res}>`)); + process.exit(1); + }); + wrapper.pid = cliPromise.pid; + return wrapper; + } + + async function untilUploadInProgress() { + while(await countByStatus('in_progress') !== 1) await tick(); + } + + // Yield control of the event loop to other functions which are waiting. + function tick() { + return new Promise(resolve => { setImmediate(resolve); }); + } + + async function assertNewStatuses(expected) { + const counts = await countAllByStatus(); + counts.should.deepEqual({ + pending: _initial.pending + (expected.pending ?? 0), + in_progress: _initial.in_progress + (expected.in_progress ?? 0), + uploaded: _initial.uploaded + (expected.uploaded ?? 0), + failed: _initial.failed + (expected.failed ?? 0), + }); + } + + async function countByNewStatus(status) { + const current = await countByStatus(status); + return current - _initial[status]; + } + + async function countByStatus(status) { + return Number(await cli(`count-blobs ${status}`)); + } + + async function countAllByStatus() { + // For easier debugging, define keys up-front. This makes print order more predictable. + const counts = { pending:null, in_progress:null, uploaded:null, failed:null }; + await Promise.all(Object.keys(counts).map(async status => { + counts[status] = await countByStatus(status); + })); + return counts; + } + + async function createProject() { + const project = await api.apiPostJson( + 'projects', + { name:`s3-test-${new Date().toISOString().replace(/\..*/, '')}` }, + ); + return project.id; + } + + async function uploadFormWithAttachments(xmlFilePath, attDir) { + const { xmlFormId } = await api.apiPostFile(`projects/${projectId}/forms`, xmlFilePath); + + await Promise.all( + expectedAttachments + .map(f => api.apiPostFile( + `projects/${projectId}/forms/${xmlFormId}/draft/attachments/${f}`, + `${attDir}/${f}`, + )) + ); + + return xmlFormId; + } + + async function assertNoneRedirect(attachments) { + for(const att of attachments) { + log.debug('assertNoneRedirect()', 'checking attachment:', att.name); + const res = await api.apiRawHead(`projects/${projectId}/forms/${xmlFormId}/attachments/${att.name}`); + should.ok(!(res instanceof Redirect), `${att.name} is a redirect!`); + should.equal(res.status, 200); + log.debug('assertNoneRedirect()', ' Looks OK.'); + } + } + + async function assertAllRedirect(attachments) { + for(const att of attachments) { + log.debug('assertAllRedirect()', 'checking attachment:', att.name); + const res = await api.apiRawHead(`projects/${projectId}/forms/${xmlFormId}/attachments/${att.name}`); + should.ok(res instanceof Redirect, `${att.name} is not a redirect - returned HTTP status: ${res.status}`); + log.debug('assertAllRedirect()', ' Looks OK.'); + } + } + + async function assertAllDownloadsMatchOriginal(attachments) { + for(const att of attachments) { + const res = await api.apiRawHead(`projects/${projectId}/forms/${xmlFormId}/attachments/${att.name}`); + if(!(res instanceof Redirect) || res.status !== 307) { + throw new Error(`Unexpected response for attachment ${JSON.stringify(att)}: ${res}`); + } + + const res2 = await fetch(res.location); + await assertDownloadMatchesOriginal(res2, att.name); + } + } + + async function assertDownloadMatchesOriginal(res, name) { + should.ok(res.ok); + + const filepath = `${attDir}/${name}`; + + const expectedContentType = mimetypeFor(name); + const actualContentType = res.headers.get('content-type'); + should.equal(actualContentType, expectedContentType); + + const resContent = new Uint8Array(await res.arrayBuffer()); + const fileContent = fs.readFileSync(filepath); + should.equal(resContent.length, fileContent.length); + + // Comparing streams might be faster; this is acceptably fast at the moment. + for(let i=0; i { + const child = exec(cmd, { env, cwd:'../../..' }, (err, stdout) => { + if (err) return reject(err); + + const res = stdout.toString().trim(); + log.debug('cli()', 'returned:', res); + resolve(res); + }); + pid = child.pid; + }); + + promise.pid = pid; + + return promise; +} + +function hashes(uploadOutput) { + const leader = 'Uploading blob:'; + const hashes = uploadOutput.trim() + .split('\n') + .filter(line => line.startsWith(leader)) + .map(line => JSON.parse(line.substr(leader.length)).sha); + return hashes; +} + +async function expectRejectionFrom(promise, expectedMessage) { + try { + await promise; + should.fail('Uploading should have exited with non-zero status.'); + } catch(err) { + if(err.message.startsWith('Command failed: exec node lib/bin/s3 ')) { + // expected + if(expectedMessage) err.message.should.match(expectedMessage); + } else { + throw err; + } + } +} + +function humanDuration({ duration }) { + return (duration / 1000).toFixed(3) + 's'; +} + +function bigFileExists(attDir, idx) { + const bigFile = `${attDir}/big-${idx}.bin`; + if(fs.existsSync(bigFile)) { + log.debug(`${bigFile} exists; skipping generation`); + } else { + log.debug(`Generating ${bigFile}...`); + // Big bin files need to take long enough to upload that the tests can + // intervene with the upload in various ways. Uploading a file of 100 + // million bytes was timed to take the following: + // + // * on github actions: 1.2-1.6s + // * locally: 300ms-7s + let remaining = 100_000_000; + const batchSize = 100_000; + do { + fs.appendFileSync(bigFile, randomBytes(batchSize)); + } while((remaining-=batchSize) > 0); // eslint-disable-line no-cond-assign + } +} diff --git a/test/e2e/soak/index.js b/test/e2e/soak/index.js index 802d31133..f94347efe 100644 --- a/test/e2e/soak/index.js +++ b/test/e2e/soak/index.js @@ -10,17 +10,11 @@ const fs = require('node:fs'); const _ = require('lodash'); const uuid = require('uuid').v4; -const { basename } = require('node:path'); const { program } = require('commander'); -const { Readable } = require('stream'); -const { finished } = require('stream/promises'); -const _log = (...args) => console.log(`[${new Date().toISOString()}]`, '[test/e2e/soak]', ...args); -const log = (...args) => true && _log('INFO', ...args); -log.debug = (...args) => false && _log('DEBUG', ...args); -log.info = log; -log.error = (...args) => true && _log('ERROR', ...args); -log.report = (...args) => true && _log('REPORT', ...args); +const SUITE_NAME = 'test/e2e/soak'; +const log = require('../util/logger')(SUITE_NAME); +const { apiClient } = require('../util/api'); program .option('-s, --server-url ', 'URL of ODK Central server', 'http://localhost:8989') @@ -39,7 +33,7 @@ log(`Connecting to ${serverUrl} with user ${userEmail}...`); const logPath = logDirectory || `./logs/${new Date().toISOString()}`; -let bearerToken; +let api; soakTest(); @@ -49,18 +43,16 @@ async function soakTest() { log.info('Creating log directory:', logPath, '...'); fs.mkdirSync(logPath, { recursive:true }); - log.info('Creating session...'); - const { token } = await apiPostJson('sessions', { email:userEmail, password:userPassword }, { Authorization:null }); - bearerToken = token; + api = await apiClient(SUITE_NAME, { serverUrl, userEmail, userPassword, logPath }); log.info('Creating project...'); - const { id:projectId } = await apiPostJson('projects', { name:`soak-test-${new Date().toISOString().replace(/\..*/, '')}` }); + const { id:projectId } = await api.apiPostJson('projects', { name:`soak-test-${new Date().toISOString().replace(/\..*/, '')}` }); log.info('Uploading form...'); - const { xmlFormId:formId } = await apiPostFile(`projects/${projectId}/forms`, formPath); + const { xmlFormId:formId } = await api.apiPostFile(`projects/${projectId}/forms`, formPath); log.info('Publishing form...'); - await apiPost(`projects/${projectId}/forms/${formId}/draft/publish`); + await api.apiPost(`projects/${projectId}/forms/${formId}/draft/publish`); log.info('Setup complete. Starting soak tests...'); @@ -195,82 +187,6 @@ function reportWarning(message) { log.report('--------------------------'); } -function apiPostFile(path, filePath) { - const mimeType = mimetypeFor(filePath); - const blob = fs.readFileSync(filePath); - return apiPost(path, blob, { 'Content-Type':mimeType }); -} - -function apiPostJson(path, body, headers) { - return apiPost(path, JSON.stringify(body), { 'Content-Type':'application/json', ...headers }); -} - -function apiGetAndDump(prefix, n, path, headers) { - return fetchToFile(prefix, n, 'GET', path, undefined, headers); -} - -function apiPostAndDump(prefix, n, path, body, headers) { - return fetchToFile(prefix, n, 'POST', path, body, headers); -} - -async function fetchToFile(filenamePrefix, n, method, apiPath, body, headers) { - const res = await apiFetch(method, apiPath, body, headers); - - const filePath = `${logPath}/${filenamePrefix}.${n.toString().padStart(9, '0')}.dump`; - const file = fs.createWriteStream(filePath); - - await finished(Readable.fromWeb(res.body).pipe(file)); - - return fs.statSync(filePath).size; -} - -async function apiPost(path, body, headers) { - const res = await apiFetch('POST', path, body, headers); - return res.json(); -} - -async function apiFetch(method, path, body, extraHeaders) { - const url = `${serverUrl}/v1/${path}`; - - const Authorization = bearerToken ? `Bearer ${bearerToken}` : `Basic ${base64(`${userEmail}:${userPassword}`)}`; - - const headers = { Authorization, ...extraHeaders }; - // unset null/undefined Authorization value to prevent fetch() from stringifying it: - if(headers.Authorization == null) delete headers.Authorization; - - const res = await fetch(url, { - method, - body, - headers, - }); - log.debug(method, res.url, '->', res.status); - if(!res.ok) throw new Error(`${res.status}: ${await res.text()}`); - return res; -} - -function base64(s) { - return Buffer.from(s).toString('base64'); -} - -function mimetypeFor(f) { - const extension = fileExtensionFrom(f); - log.debug('fileExtensionFrom()', f, '->', extension); - switch(extension) { - case 'xls' : return 'application/vnd.ms-excel'; - case 'xlsx': return 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'; - case 'xml' : return 'application/xml'; - default: throw new Error(`Unsure what mime type to use for: ${f}`); - } -} - -function fileExtensionFrom(f) { - try { - return basename(f).match(/\.([^.]*)$/)[1]; - } catch(err) { - throw new Error(`Could not get file extension from filename '${f}'!`); - } -} - function randomSubmission(n, projectId, formId) { const headers = { 'Content-Type': 'multipart/form-data; boundary=foo', @@ -288,7 +204,7 @@ ${submissionTemplate \r --foo--`; - return apiPostAndDump('randomSubmission', n, `projects/${projectId}/forms/${formId}/submissions`, body, headers); + return api.apiPostAndDump('randomSubmission', n, `projects/${projectId}/forms/${formId}/submissions`, body, headers); } function randInt() { @@ -296,7 +212,7 @@ function randInt() { } function exportZipWithDataAndMedia(n, projectId, formId) { - return apiGetAndDump('exportZipWithDataAndMedia', n, `projects/${projectId}/forms/${formId}/submissions.csv.zip?splitSelectMultiples=true&groupPaths=true&deletedFields=true`); + return api.apiGetToFile('exportZipWithDataAndMedia', n, `projects/${projectId}/forms/${formId}/submissions.csv.zip?splitSelectMultiples=true&groupPaths=true&deletedFields=true`); } function durationForHumans(ms) { diff --git a/test/e2e/util/api.js b/test/e2e/util/api.js new file mode 100644 index 000000000..4a79e94ac --- /dev/null +++ b/test/e2e/util/api.js @@ -0,0 +1,138 @@ +const fs = require('node:fs'); +const { extname } = require('node:path'); +const { Readable } = require('stream'); +const { finished } = require('stream/promises'); + +async function apiClient(suiteName, { serverUrl, userEmail, userPassword, logPath }) { + const log = require('./logger')(suiteName); + + let bearerToken; + + log.info('Creating session...'); + const { token } = await apiPostJson('sessions', { email:userEmail, password:userPassword }, { Authorization:null }); + // eslint-disable-next-line prefer-const + bearerToken = token; + + return { + apiGet, + apiRawHead, + apiRawGet, + apiPostFile, + apiPostJson, + apiGetToFile, + apiPostAndDump, + apiPost, + apiFetch, + }; + + async function apiGet(path, headers) { + const res = await apiFetch('GET', path, undefined, headers); + return res.json(); + } + + function apiRawHead(path, headers) { + return apiFetch('HEAD', path, undefined, headers); + } + + function apiRawGet(path, headers) { + return apiFetch('GET', path, undefined, headers); + } + + function apiPostFile(path, filePath) { + const mimeType = mimetypeFor(filePath); + const blob = fs.readFileSync(filePath); + return apiPost(path, blob, { 'Content-Type':mimeType }); + } + + function apiPostJson(path, body, headers) { + return apiPost(path, JSON.stringify(body), { 'Content-Type':'application/json', ...headers }); + } + + function apiGetToFile(prefix, n, path, headers) { + return fetchToFile(prefix, n, 'GET', path, undefined, headers); + } + + function apiPostAndDump(prefix, n, path, body, headers) { + return fetchToFile(prefix, n, 'POST', path, body, headers); + } + + async function fetchToFile(filenamePrefix, n, method, apiPath, body, headers) { + const res = await apiFetch(method, apiPath, body, headers); + + const filePath = `${logPath}/${filenamePrefix}.${n.toString().padStart(9, '0')}.dump`; + const file = fs.createWriteStream(filePath); + + await finished(Readable.fromWeb(res.body).pipe(file)); + + return fs.statSync(filePath).size; + } + + async function apiPost(path, body, headers) { + const res = await apiFetch('POST', path, body, headers); + return res.json(); + } + + async function apiFetch(method, path, body, extraHeaders) { + const url = `${serverUrl}/v1/${path}`; + + const Authorization = bearerToken ? `Bearer ${bearerToken}` : `Basic ${base64(`${userEmail}:${userPassword}`)}`; + + const headers = { Authorization, ...extraHeaders }; + // unset null/undefined Authorization value to prevent fetch() from stringifying it: + if(headers.Authorization == null) delete headers.Authorization; + + const res = await fetch(url, { + method, + body, + headers, + redirect: 'manual', + }); + log.debug(method, res.url, '->', res.status); + + // eslint-disable-next-line no-use-before-define + if(isRedirected(res)) return new Redirect(res); + if(!res.ok) throw new Error(`${res.status}: ${await res.text()}`); + return res; + } +} + +function base64(s) { + return Buffer.from(s).toString('base64'); +} + +function mimetypeFor(f) { + // For more, see: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types + const extension = extname(f); + switch(extension) { + case '.bin' : return 'application/octet-stream'; + case '.jpg' : return 'image/jpeg'; + case '.png' : return 'image/png'; + case '.svg' : return 'image/svg+xml'; + case '.txt' : return 'text/plain'; + case '.xls' : return 'application/vnd.ms-excel'; + case '.xlsx': return 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'; + case '.xml' : return 'application/xml'; + default: throw new Error(`Unsure what mime type to use for: ${f}`); + } +} + +function isRedirected(res) { + // should support res.redirected, but maybe old version + // See: https://www.npmjs.com/package/node-fetch#responseredirected + return res.redirected || (res.status >=300 && res.status < 400); +} + +class Redirect { + constructor(res) { + this.props = Object.freeze({ + status: res.status, + location: res.headers.get('location'), + headers: Object.freeze([...res.headers]), + }); + } + get status() { return this.props.status; } + get location() { return this.props.location; } + get headers() { return this.props.headers; } +} + +module.exports = { apiClient, mimetypeFor, Redirect }; diff --git a/test/e2e/util/logger.js b/test/e2e/util/logger.js new file mode 100644 index 000000000..f137b87b3 --- /dev/null +++ b/test/e2e/util/logger.js @@ -0,0 +1,16 @@ +const LOG_LEVELS = ['DEBUG', 'INFO', 'WARN', 'ERROR', 'REPORT']; +const logLevel = process.env.LOG_LEVEL || 'INFO'; + +module.exports = suiteName => { + const _log = (level, ...args) => { + if (LOG_LEVELS.indexOf(logLevel) > LOG_LEVELS.indexOf(level)) return; + console.log(`[${new Date().toISOString()}]`, level, `[${suiteName}]`, ...args); + }; + const log = (...args) => _log('INFO', ...args); + log.debug = (...args) => _log('DEBUG', ...args); + log.info = log; + log.error = (...args) => _log('ERROR', ...args); + log.report = (...args) => _log('REPORT', ...args); + + return log; +}; diff --git a/test/integration/api/forms/forms.js b/test/integration/api/forms/forms.js index e6e2b952c..3bd56cc8b 100644 --- a/test/integration/api/forms/forms.js +++ b/test/integration/api/forms/forms.js @@ -738,6 +738,35 @@ describe('api: /projects/:id/forms (create, read, update)', () => { .expect(304))); })); + it('should return s3 redirect after xlsx file uploaded to s3', testService((service, { Blobs }) => { + global.s3.enableMock(); + const input = readFileSync(appRoot + '/test/data/simple.xlsx'); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .send(input) + .set('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + .expect(200) + .then(() => asAlice.get('/v1/projects/1/forms/simple2.xlsx') + .set('If-None-Match', '"30fdb0e9115ea7ca6702573f521814d1"') + .expect(304)) + .then(() => Blobs.s3UploadPending() + .then(() => { + global.s3.uploads.attempted.should.equal(1); + global.s3.uploads.successful.should.equal(1); + })) + .then(() => asAlice.get('/v1/projects/1/forms/simple2.xlsx') + .expect(307) + .then(({ headers }) => { + headers.location.should.equal('s3://mock/30fdb0e9115ea7ca6702573f521814d1/9ebd53024b8560ffd0b84763481ed24159ca600f/simple2.xlsx?contentType=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'); + })) + .then(() => asAlice.get('/v1/projects/1/forms/simple2.xlsx') + .set('If-None-Match', '"30fdb0e9115ea7ca6702573f521814d1"') + .expect(307) + .then(({ headers }) => { + headers.location.should.equal('s3://mock/30fdb0e9115ea7ca6702573f521814d1/9ebd53024b8560ffd0b84763481ed24159ca600f/simple2.xlsx?contentType=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'); + }))); + })); + it('should return the xlsx file originally provided for a draft', testService((service) => { const input = readFileSync(appRoot + '/test/data/simple.xlsx'); return service.login('alice', (asAlice) => @@ -763,6 +792,39 @@ describe('api: /projects/:id/forms (create, read, update)', () => { .expect(304))); })); + it('should return s3 redirect after xlsx file for draft uploaded to s3', testService((service, { Blobs }) => { + global.s3.enableMock(); + const input = readFileSync(appRoot + '/test/data/simple.xlsx'); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .send(input) + .set('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + .expect(200) + .then(() => asAlice.post('/v1/projects/1/forms/simple2/draft') + .send(input) + .set('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + .expect(200)) + .then(() => asAlice.get('/v1/projects/1/forms/simple2/draft.xlsx') + .set('If-None-Match', '"30fdb0e9115ea7ca6702573f521814d1"') + .expect(304)) + .then(() => Blobs.s3UploadPending() + .then(() => { + global.s3.uploads.attempted.should.equal(1); + global.s3.uploads.successful.should.equal(1); + })) + .then(() => asAlice.get('/v1/projects/1/forms/simple2/draft.xlsx') + .expect(307) + .then(({ headers }) => { + headers.location.should.equal('s3://mock/30fdb0e9115ea7ca6702573f521814d1/9ebd53024b8560ffd0b84763481ed24159ca600f/simple2.xlsx?contentType=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'); + })) + .then(() => asAlice.get('/v1/projects/1/forms/simple2/draft.xlsx') + .set('If-None-Match', '"30fdb0e9115ea7ca6702573f521814d1"') + .expect(307) + .then(({ headers }) => { + headers.location.should.equal('s3://mock/30fdb0e9115ea7ca6702573f521814d1/9ebd53024b8560ffd0b84763481ed24159ca600f/simple2.xlsx?contentType=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'); + }))); + })); + it('should continue to offer the xlsx file after a copy-draft', testService((service) => { const input = readFileSync(appRoot + '/test/data/simple.xlsx'); return service.login('alice', (asAlice) => @@ -1300,6 +1362,34 @@ describe('api: /projects/:id/forms (create, read, update)', () => { text.should.equal('test,csv\n1,2'); }))))); + it('should return 307 if file has been moved to s3', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms') + .send(testData.forms.withAttachments) + .set('Content-Type', 'application/xml') + .expect(200) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('test,csv\n1,2') + .set('Content-Type', 'text/csv') + .expect(200)) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/publish') + .expect(200)) + .then(() => asAlice.get('/v1/projects/1/forms/withAttachments/attachments/goodone.csv') + .expect(200) + .then(({ headers, text }) => { + headers['content-disposition'].should.equal('attachment; filename="goodone.csv"; filename*=UTF-8\'\'goodone.csv'); + headers['content-type'].should.equal('text/csv; charset=utf-8'); + text.should.equal('test,csv\n1,2'); + })) + .then(() => Blobs.s3UploadPending()) + .then(() => asAlice.get('/v1/projects/1/forms/withAttachments/attachments/goodone.csv') + .expect(307) + .then(({ headers }) => { + headers.location.should.equal('s3://mock/2241de57bbec8144c8ad387e69b3a3ba/61baf7288ad1b373346a2fad6056d640746440be/goodone.csv?contentType=text/csv'); + }))); + })); + it('should return 304 content not changed if ETag matches', testService(async (service) => { const asAlice = await service.login('alice'); @@ -1363,6 +1453,87 @@ describe('api: /projects/:id/forms (create, read, update)', () => { etagV1.should.not.be.eql(etagV2); })); + + it('should return latest content if previous content was uploaded to s3', testService(async (service, { Blobs }) => { + global.s3.enableMock(); + + const asAlice = await service.login('alice'); + + await asAlice.post('/v1/projects/1/forms') + .send(testData.forms.withAttachments) + .set('Content-Type', 'application/xml') + .expect(200); + + await asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('test,csv\n1,2') + .set('Content-Type', 'text/csv') + .expect(200); + + const attachmentV1 = await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .expect(200); + + const etagV1 = attachmentV1.get('ETag'); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .set('If-None-Match', etagV1) + .expect(304); + + await Blobs.s3UploadPending(); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .expect(307); + + await asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('test,csv\n1,2\n3,4') + .set('Content-Type', 'text/csv') + .expect(200); + + const attachmentV2 = await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .set('If-None-Match', etagV1) + .expect(200); + + const etagV2 = attachmentV2.get('ETag'); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .set('If-None-Match', etagV2) + .expect(304); + + etagV1.should.not.be.eql(etagV2); + })); + + it('should ignore local etag if content was uploaded to s3', testService(async (service, { Blobs }) => { + global.s3.enableMock(); + + const asAlice = await service.login('alice'); + + await asAlice.post('/v1/projects/1/forms') + .send(testData.forms.withAttachments) + .set('Content-Type', 'application/xml') + .expect(200); + + await asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('test,csv\n1,2') + .set('Content-Type', 'text/csv') + .expect(200); + + const attachment = await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .expect(200); + + const etag = attachment.get('ETag'); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .set('If-None-Match', etag) + .expect(304); + + await Blobs.s3UploadPending(); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .expect(307); + + await asAlice.get('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .set('If-None-Match', etag) + .expect(307); + })); }); }); }); diff --git a/test/integration/api/submissions.js b/test/integration/api/submissions.js index e21a8fc15..07f1c06cc 100644 --- a/test/integration/api/submissions.js +++ b/test/integration/api/submissions.js @@ -485,6 +485,62 @@ describe('api: /submission', () => { .set('If-None-Match', '"25bdb03b7942881c279788575997efba"') .expect(304))))))); + it('should successfully save additionally POSTed attachment binary data with s3 enabled', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.binaryType) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('my_file1.mp4', Buffer.from('this is test file one'), { filename: 'my_file1.mp4' }) + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .expect(201) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .attach('here_is_file2.jpg', Buffer.from('this is test file two'), { filename: 'here_is_file2.jpg' }) + .expect(201) + .then(() => asAlice.get('/v1/projects/1/forms/binaryType/submissions/both/attachments/here_is_file2.jpg') + .expect(200) + .then(({ headers, body }) => { + headers['content-type'].should.equal('image/jpeg'); + headers['content-disposition'].should.equal('attachment; filename="here_is_file2.jpg"; filename*=UTF-8\'\'here_is_file2.jpg'); + headers['etag'].should.equal('"25bdb03b7942881c279788575997efba"'); // eslint-disable-line dot-notation + body.toString('utf8').should.equal('this is test file two'); + })) + .then(() => asAlice.get('/v1/projects/1/forms/binaryType/submissions/both/attachments/here_is_file2.jpg') + .set('If-None-Match', '"25bdb03b7942881c279788575997efba"') + .expect(304)) + .then(() => Blobs.s3UploadPending() + .then(() => asAlice.get('/v1/projects/1/forms/binaryType/submissions/both/attachments/here_is_file2.jpg') + .expect(307) + .then(({ headers, body }) => { + // TODO content-type should not be present at all, but response.removeHeader() does not seem to have an effect + headers['content-type'].should.equal('text/plain; charset=utf-8'); + should(headers['content-disposition']).be.undefined(); + should(headers.etag).be.undefined(); + + const { location } = headers; + location.should.equal('s3://mock/25bdb03b7942881c279788575997efba/eba799d1dc156c0df70f7bad65f815928b98aa7d/here_is_file2.jpg?contentType=image/jpeg'); + body.should.deepEqual({}); // not sure why + })) + .then(() => asAlice.get('/v1/projects/1/forms/binaryType/submissions/both/attachments/here_is_file2.jpg') + .set('If-None-Match', '"25bdb03b7942881c279788575997efba"') + .expect(307) + .then(({ headers, body }) => { + // TODO content-type should not be present at all, but response.removeHeader() does not seem to have an effect + headers['content-type'].should.equal('text/plain; charset=utf-8'); + should(headers['content-disposition']).be.undefined(); + should(headers.etag).be.undefined(); + + const { location } = headers; + location.should.equal('s3://mock/25bdb03b7942881c279788575997efba/eba799d1dc156c0df70f7bad65f815928b98aa7d/here_is_file2.jpg?contentType=image/jpeg'); + body.should.deepEqual({}); // not sure why + })))))); + })); + it('should accept encrypted submissions, with attachments', testService((service) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/forms?publish=true') @@ -1571,6 +1627,67 @@ describe('api: /forms/:id/submissions', () => { csv.length.should.equal(3); // newline at end })))))); + it('should return a zipfile with the relevant attachments if s3 is enabled', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.binaryType) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .attach('my_file1.mp4', Buffer.from('this is test file one'), { filename: 'my_file1.mp4' }) + .expect(201) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .attach('here_is_file2.jpg', Buffer.from('this is test file two'), { filename: 'here_is_file2.jpg' }) + .expect(201)) + .then(() => Blobs.s3UploadPending()) + .then(() => pZipStreamToFiles(asAlice.get('/v1/projects/1/forms/binaryType/submissions.csv.zip')) + .then((result) => { + result.filenames.should.containDeep([ + 'binaryType.csv', + 'media/my_file1.mp4', + 'media/here_is_file2.jpg' + ]); + + result['media/my_file1.mp4'].should.equal('this is test file one'); + result['media/here_is_file2.jpg'].should.equal('this is test file two'); + + // we also check the csv for the sake of verifying the attachments counts. + const csv = result['binaryType.csv'].split('\n'); + csv[0].should.equal('SubmissionDate,meta-instanceID,file1,file2,KEY,SubmitterID,SubmitterName,AttachmentsPresent,AttachmentsExpected,Status,ReviewState,DeviceID,Edits,FormVersion'); + csv[1].should.endWith(',both,my_file1.mp4,here_is_file2.jpg,both,5,Alice,2,2,,,,0,'); + csv.length.should.equal(3); // newline at end + })))); + })); + + it('should handle s3 errors when trying to construct zipfile', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.binaryType) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .attach('my_file1.mp4', Buffer.from('this is test file one'), { filename: 'my_file1.mp4' }) + .expect(201) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.binaryType.both), { filename: 'data.xml' }) + .attach('here_is_file2.jpg', Buffer.from('this is test file two'), { filename: 'here_is_file2.jpg' }) + .expect(201)) + .then(() => Blobs.s3UploadPending()) + .then(() => { global.s3.error.onDownload = true; }) + .then(() => asAlice.get('/v1/projects/1/forms/binaryType/submissions.csv.zip') + .then(() => should.fail('Should have thrown an error.')) + .catch(err => err.message.should.equal('aborted'))))); + })); + it('should filter attachments by the query', testService((service) => service.login('alice', (asAlice) => service.login('bob', (asBob) => @@ -1920,6 +2037,76 @@ two,h,/data/h,2000-01-01T00:06,2000-01-01T00:07,-5,-6,,ee,ff `); }))))); + it('should return adhoc-processed consolidated client audit log attachments if uploaded to s3', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.clientAudits) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('audit.csv', createReadStream(appRoot + '/test/data/audit.csv'), { filename: 'audit.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.one), { filename: 'data.xml' }) + .expect(201)) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('log.csv', createReadStream(appRoot + '/test/data/audit2.csv'), { filename: 'log.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.two), { filename: 'data.xml' }) + .expect(201)) + .then(() => Blobs.s3UploadPending()) + .then(() => { + global.s3.uploads.attempted.should.equal(2); + global.s3.uploads.successful.should.equal(2); + }) + .then(() => pZipStreamToFiles(asAlice.get('/v1/projects/1/forms/audits/submissions.csv.zip')) + .then((result) => { + result.filenames.should.eql([ + 'audits.csv', + 'audits - audit.csv' + ]); + + result['audits - audit.csv'].should.equal(`instance ID,event,node,start,end,latitude,longitude,accuracy,old-value,new-value +one,a,/data/a,2000-01-01T00:01,2000-01-01T00:02,1,2,3,aa,bb +one,b,/data/b,2000-01-01T00:02,2000-01-01T00:03,4,5,6,cc,dd +one,c,/data/c,2000-01-01T00:03,2000-01-01T00:04,7,8,9,ee,ff +one,d,/data/d,2000-01-01T00:10,,10,11,12,gg, +one,e,/data/e,2000-01-01T00:11,,,,,hh,ii +two,f,/data/f,2000-01-01T00:04,2000-01-01T00:05,-1,-2,,aa,bb +two,g,/data/g,2000-01-01T00:05,2000-01-01T00:06,-3,-4,,cc,dd +two,h,/data/h,2000-01-01T00:06,2000-01-01T00:07,-5,-6,,ee,ff +`); + })) + .then(() => { + global.s3.downloads.attempted.should.equal(2); + global.s3.downloads.successful.should.equal(2); + })); + })); + + it('should gracefully handle error if client audit s3 download fails', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.clientAudits) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('audit.csv', createReadStream(appRoot + '/test/data/audit.csv'), { filename: 'audit.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.one), { filename: 'data.xml' }) + .expect(201)) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('log.csv', createReadStream(appRoot + '/test/data/audit2.csv'), { filename: 'log.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.two), { filename: 'data.xml' }) + .expect(201)) + .then(() => Blobs.s3UploadPending()) + .then(() => { global.s3.error.onDownload = true; }) + .then(() => asAlice.get('/v1/projects/1/forms/audits/submissions.csv.zip') + .then(() => should.fail('should have thrown')) + .catch(err => err.message.should.equal('aborted')))); + })); + it('should return consolidated client audit log filtered by user', testService((service) => service.login('alice', (asAlice) => service.login('bob', (asBob) => diff --git a/test/integration/other/analytics-queries.js b/test/integration/other/analytics-queries.js index ad1e3c830..38906e552 100644 --- a/test/integration/other/analytics-queries.js +++ b/test/integration/other/analytics-queries.js @@ -357,6 +357,37 @@ describe('analytics task queries', function () { await container.Analytics.countClientAuditRows() .then((res) => res.should.equal(8)); })); + + it('should count rows from client audit attachments uploaded to s3', testService(async (service, container) => { + global.s3.enableMock(); + + const asAlice = await service.login('alice'); + await asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.clientAudits) + .expect(200); + + await asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('audit.csv', createReadStream(appRoot + '/test/data/audit.csv'), { filename: 'audit.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.one), { filename: 'data.xml' }) + .expect(201); + + await asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('log.csv', createReadStream(appRoot + '/test/data/audit2.csv'), { filename: 'log.csv' }) + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.two), { filename: 'data.xml' }) + .expect(201); + + await container.Analytics.countClientAuditRows() + .then((res) => res.should.equal(0)); + + await container.Blobs.s3UploadPending(); + await exhaust(container); + + await container.Analytics.countClientAuditRows() + .then((res) => res.should.equal(8)); + })); }); it('should count failed audits', testService(async (service, container) => { diff --git a/test/integration/other/encryption.js b/test/integration/other/encryption.js index 2835565d6..138bda6a5 100644 --- a/test/integration/other/encryption.js +++ b/test/integration/other/encryption.js @@ -11,6 +11,7 @@ const { Form, Key, Submission, Actor } = require(appRoot + '/lib/model/frames'); const { mapSequential } = require(appRoot + '/test/util/util'); const { exhaust } = require(appRoot + '/lib/worker/worker'); const authenticateUser = require('../../util/authenticate-user'); +const should = require('should'); describe('managed encryption', () => { describe('lock management', () => { @@ -224,6 +225,37 @@ describe('managed encryption', () => { result['simple.csv'].should.be.an.EncryptedSimpleCsv(); }))))); + it('should decrypt to CSV successfully if submissions uploaded to S3', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/key') + .send({ passphrase: 'supersecret', hint: 'it is a secret' }) + .expect(200) + .then(() => asAlice.get('/v1/projects/1/forms/simple.xml') + .expect(200) + .then(({ text }) => sendEncrypted(asAlice, extractVersion(text), extractPubkey(text))) + .then((send) => send(testData.instances.simple.one) + .then(() => send(testData.instances.simple.two)) + .then(() => send(testData.instances.simple.three)))) + .then(() => Blobs.s3UploadPending()) + .then(() => { + global.s3.uploads.attempted.should.equal(3); + global.s3.uploads.successful.should.equal(3); + }) + .then(() => asAlice.get('/v1/projects/1/forms/simple/submissions/keys') + .expect(200) + .then(({ body }) => body[0].id)) + .then((keyId) => pZipStreamToFiles(asAlice.get(`/v1/projects/1/forms/simple/submissions.csv.zip?${keyId}=supersecret`)) + .then((result) => { + result.filenames.should.eql([ 'simple.csv' ]); + result['simple.csv'].should.be.an.EncryptedSimpleCsv(); + })) + .then(() => { + global.s3.downloads.attempted.should.equal(3); + global.s3.downloads.successful.should.equal(3); + })); + })); + it('should decrypt to CSV successfully as a direct root table', testService((service) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/key') @@ -335,6 +367,53 @@ describe('managed encryption', () => { result['media/charlie'].should.equal('file charlie is right here'); }))))); + it('should decrypt attached files successfully when s3 enabled', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/key') + .send({ passphrase: 'supersecret', hint: 'it is a secret' }) + .expect(200) + .then(() => asAlice.get('/v1/projects/1/forms/simple.xml') + .expect(200) + .then(({ text }) => sendEncrypted(asAlice, extractVersion(text), extractPubkey(text))) + .then((send) => send(testData.instances.simple.one, { alpha: 'hello this is file alpha', beta: 'and beta' }) + .then(() => send(testData.instances.simple.two, { charlie: 'file charlie is right here' })))) + .then(() => Blobs.s3UploadPending()) + .then(() => asAlice.get('/v1/projects/1/forms/simple/submissions/keys') + .expect(200) + .then(({ body }) => body[0].id)) + .then((keyId) => pZipStreamToFiles(asAlice.get(`/v1/projects/1/forms/simple/submissions.csv.zip?${keyId}=supersecret`)) + .then((result) => { + result.filenames.length.should.equal(4); + result.filenames.should.containDeep([ 'simple.csv', 'media/alpha', 'media/beta', 'media/charlie' ]); + + result['media/alpha'].should.equal('hello this is file alpha'); + result['media/beta'].should.equal('and beta'); + result['media/charlie'].should.equal('file charlie is right here'); + }))); + })); + + it('should handle s3 issues gracefully', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/key') + .send({ passphrase: 'supersecret', hint: 'it is a secret' }) + .expect(200) + .then(() => asAlice.get('/v1/projects/1/forms/simple.xml') + .expect(200) + .then(({ text }) => sendEncrypted(asAlice, extractVersion(text), extractPubkey(text))) + .then((send) => send(testData.instances.simple.one, { alpha: 'hello this is file alpha', beta: 'and beta' }) + .then(() => send(testData.instances.simple.two, { charlie: 'file charlie is right here' })))) + .then(() => Blobs.s3UploadPending()) + .then(() => { global.s3.error.onDownload = true; }) + .then(() => asAlice.get('/v1/projects/1/forms/simple/submissions/keys') + .expect(200) + .then(({ body }) => body[0].id)) + .then((keyId) => asAlice.get(`/v1/projects/1/forms/simple/submissions.csv.zip?${keyId}=supersecret`) + .then(() => should.fail('should have thrown')) + .catch((err) => err.message.should.equal('aborted')))); + })); + it('should strip .enc suffix from decrypted attachments', testService((service) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/key') @@ -412,6 +491,49 @@ two,h,/data/h,2000-01-01T00:06,2000-01-01T00:07,-5,-6,,ee,ff `); }))))); + it('should decrypt client audit log attachments from s3', testService((service, { Blobs }) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/key') + .send({ passphrase: 'supersecret', hint: 'it is a secret' }) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.clientAudits) + .expect(200)) + .then(() => asAlice.get('/v1/projects/1/forms/audits.xml') + .expect(200) + .then(({ text }) => sendEncrypted(asAlice, extractVersion(text), extractPubkey(text))) + .then((send) => send(testData.instances.clientAudits.one, { 'audit.csv.enc': readFileSync(appRoot + '/test/data/audit.csv') }) + .then(() => send(testData.instances.clientAudits.two, { 'audit.csv.enc': readFileSync(appRoot + '/test/data/audit2.csv') })))) + .then(() => Blobs.s3UploadPending()) + .then(() => { + global.s3.uploads.attempted.should.equal(4); + global.s3.uploads.successful.should.equal(4); + }) + .then(() => asAlice.get('/v1/projects/1/forms/audits/submissions/keys') + .expect(200) + .then(({ body }) => body[0].id)) + .then((keyId) => pZipStreamToFiles(asAlice.get(`/v1/projects/1/forms/audits/submissions.csv.zip?${keyId}=supersecret`)) + .then((result) => { + result.filenames.should.eql([ + 'audits.csv', + 'audits - audit.csv' + ]); + + result['audits - audit.csv'].should.equal(`instance ID,event,node,start,end,latitude,longitude,accuracy,old-value,new-value +one,a,/data/a,2000-01-01T00:01,2000-01-01T00:02,1,2,3,aa,bb +one,b,/data/b,2000-01-01T00:02,2000-01-01T00:03,4,5,6,cc,dd +one,c,/data/c,2000-01-01T00:03,2000-01-01T00:04,7,8,9,ee,ff +one,d,/data/d,2000-01-01T00:10,,10,11,12,gg, +one,e,/data/e,2000-01-01T00:11,,,,,hh,ii +two,f,/data/f,2000-01-01T00:04,2000-01-01T00:05,-1,-2,,aa,bb +two,g,/data/g,2000-01-01T00:05,2000-01-01T00:06,-3,-4,,cc,dd +two,h,/data/h,2000-01-01T00:06,2000-01-01T00:07,-5,-6,,ee,ff +`); + }))); + })); + it('should handle mixed [plaintext/encrypted] attachments (not decrypting)', testService((service) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/forms?publish=true') diff --git a/test/integration/other/form-purging.js b/test/integration/other/form-purging.js index deb956899..db498225b 100644 --- a/test/integration/other/form-purging.js +++ b/test/integration/other/form-purging.js @@ -150,6 +150,63 @@ describe('query module form purge', () => { ])) .then((counts) => counts.should.eql([ 0, 0, 0, 0 ])))))); + it('should purge attachments and blobs of a form, s3-enabled, blobs not uploaded', testService((service, container) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms') + .send(testData.forms.withAttachments) + .set('Content-Type', 'application/xml') + .expect(200) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('this is goodone.csv') + .expect(200)) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/publish') + .expect(200)) + .then(() => container.Forms.getByProjectAndXmlFormId(1, 'withAttachments').then((o) => o.get())) + .then((ghostForm) => asAlice.delete('/v1/projects/1/forms/withAttachments') + .expect(200) + .then(() => container.Forms.purge(true)) + .then(() => Promise.all([ + container.oneFirst(sql`select count(*) from forms where id = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from form_defs where "formId" = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from form_attachments where "formId" = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from blobs`) + ])) + .then((counts) => counts.should.eql([ 0, 0, 0, 0 ])) + .then(() => global.s3.uploads.attempted.should.equal(0)) + .then(() => global.s3.uploads.successful.should.equal(0)) + .then(() => global.s3.uploads.deleted.should.equal(0)))); + })); + + it('should purge attachments and blobs of a form, s3-enabled, blobs uploaded', testService((service, container) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms') + .send(testData.forms.withAttachments) + .set('Content-Type', 'application/xml') + .expect(200) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/attachments/goodone.csv') + .send('this is goodone.csv') + .expect(200)) + .then(() => asAlice.post('/v1/projects/1/forms/withAttachments/draft/publish') + .expect(200)) + .then(() => container.Forms.getByProjectAndXmlFormId(1, 'withAttachments').then((o) => o.get())) + .then((ghostForm) => asAlice.delete('/v1/projects/1/forms/withAttachments') + .expect(200) + .then(() => container.Blobs.s3UploadPending()) + .then(() => container.Forms.purge(true)) + .then(() => Promise.all([ + container.oneFirst(sql`select count(*) from forms where id = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from form_defs where "formId" = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from form_attachments where "formId" = ${ghostForm.id}`), + container.oneFirst(sql`select count(*) from blobs`) + ])) + .then((counts) => counts.should.eql([ 0, 0, 0, 0 ])) + .then(() => global.s3.uploads.attempted.should.equal(1)) + .then(() => global.s3.uploads.successful.should.equal(1)) + .then(() => global.s3.uploads.deleted.should.equal(1)))); + })); + it('should purge the form fields of a form', testService((service, container) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/forms/simple/draft') diff --git a/test/integration/setup.js b/test/integration/setup.js index 1d19e3678..08718d3ff 100644 --- a/test/integration/setup.js +++ b/test/integration/setup.js @@ -41,6 +41,9 @@ before(resetEnketo); after(resetEnketo); afterEach(resetEnketo); +// set up our s3 mock +const { s3 } = require(appRoot + '/test/util/s3'); + // set up odk analytics mock. const { ODKAnalytics } = require(appRoot + '/test/util/odk-analytics-mock'); const odkAnalytics = new ODKAnalytics(); @@ -81,7 +84,7 @@ const initialize = async () => { await migrator.destroy(); } - return withDefaults({ db, context, enketo, env }).transacting(populate); + return withDefaults({ db, context, enketo, env, s3 }).transacting(populate); }; // eslint-disable-next-line func-names, space-before-function-paren @@ -94,6 +97,7 @@ let mustReinitAfter; beforeEach(() => { // eslint-disable-next-line keyword-spacing if(mustReinitAfter) throw new Error(`Failed to reinitalize after previous test: '${mustReinitAfter}'. You may need to increase your mocha timeout.`); + s3.resetMock(); }); // eslint-disable-next-line func-names, space-before-function-paren afterEach(async function() { @@ -137,7 +141,7 @@ const augment = (service) => { // FINAL TEST WRAPPERS -const baseContainer = withDefaults({ db, mail, env, xlsform, enketo, Sentry, odkAnalytics, context }); +const baseContainer = withDefaults({ db, mail, env, xlsform, enketo, Sentry, odkAnalytics, context, s3 }); // called to get a service context per request. we do some work to hijack the // transaction system so that each test runs in a single transaction that then @@ -188,6 +192,14 @@ const testTask = (test) => () => new Promise((resolve, reject) => { });//.catch(Promise.resolve.bind(Promise)); }); +// See testServiceFullTrx() +// eslint-disable-next-line space-before-function-paren, func-names +const testTaskFullTrx = (test) => function() { + mustReinitAfter = this.test.fullTitle(); + task._container = baseContainer.with({ task: true }); + return test(task._container); +}; + // eslint-disable-next-line no-shadow const withClosedForm = (f) => async (service) => { const asAlice = await service.login('alice'); @@ -213,4 +225,4 @@ const withClosedForm = (f) => async (service) => { return f(service); }; -module.exports = { testService, testServiceFullTrx, testContainer, testContainerFullTrx, testTask, withClosedForm }; +module.exports = { testService, testServiceFullTrx, testContainer, testContainerFullTrx, testTask, testTaskFullTrx, withClosedForm }; diff --git a/test/integration/task/s3.js b/test/integration/task/s3.js new file mode 100644 index 000000000..7fcc0361e --- /dev/null +++ b/test/integration/task/s3.js @@ -0,0 +1,201 @@ +const crypto = require('crypto'); +const should = require('should'); +const appRoot = require('app-root-path'); +const { sql } = require('slonik'); +const { testTask, testTaskFullTrx } = require('../setup'); +const { getCount, setFailedToPending, uploadPending } = require(appRoot + '/lib/task/s3'); +const { Blob } = require(appRoot + '/lib/model/frames'); + +// eslint-disable-next-line camelcase +const aBlobExistsWith = async (container, { status }) => { + const blob = await Blob.fromBuffer(crypto.randomBytes(100)); + container.run(sql` + INSERT INTO BLOBS (sha, md5, content, "contentType", s3_status) + VALUES (${blob.sha}, ${blob.md5}, ${sql.binary(blob.content)}, ${blob.contentType || null}, ${status}) + `); +}; + +const assertThrowsAsync = async (fn, expected) => { + try { + await fn(); + should.fail('should have thrown'); + } catch (err) { + if (err.message === 'should have thrown') throw err; + if (expected) err.message.should.equal(expected); + } +}; + +describe('task: s3', () => { + describe('s3 disabled', () => { + it('uploadPending() should fail', async () => { + await assertThrowsAsync(() => uploadPending(), 'S3 blob support is not enabled.'); + }); + + it('setFailedToPending() should fail', async () => { + await assertThrowsAsync(() => setFailedToPending(), 'S3 blob support is not enabled.'); + }); + + it('getCount() should fail', async () => { + await assertThrowsAsync(() => getCount(), 'S3 blob support is not enabled.'); + }); + }); + + describe('s3 enabled', () => { + const assertUploadCount = (expected) => { + global.s3.uploads.successful.should.equal(expected); + }; + + beforeEach(() => { + global.s3.enableMock(); + }); + + describe('getCount()', () => { + [ + ['pending', 1], + ['uploaded', 2], + ['failed', 3], + ].forEach(([ status, expectedCount ]) => { + it(`should return count of ${status} blobs`, testTask(async (container) => { + // given + await aBlobExistsWith(container, { status: 'pending' }); + + await aBlobExistsWith(container, { status: 'uploaded' }); + await aBlobExistsWith(container, { status: 'uploaded' }); + + await aBlobExistsWith(container, { status: 'failed' }); + await aBlobExistsWith(container, { status: 'failed' }); + await aBlobExistsWith(container, { status: 'failed' }); + + // when + const count = await getCount(status); + + // then + count.should.equal(expectedCount); + })); + }); + + it('should reject requests for unknown statuses', testTask(async () => { + await assertThrowsAsync(() => getCount('nonsense'), 'invalid input value for enum s3_upload_status: "nonsense"'); + })); + }); + + describe('setFailedToPending()', () => { + it('should change all failed messages to pending', testTask(async (container) => { + // given + await aBlobExistsWith(container, { status: 'pending' }); + await aBlobExistsWith(container, { status: 'uploaded' }); + await aBlobExistsWith(container, { status: 'uploaded' }); + await aBlobExistsWith(container, { status: 'failed' }); + await aBlobExistsWith(container, { status: 'failed' }); + await aBlobExistsWith(container, { status: 'failed' }); + + // expect + (await getCount('pending')).should.equal(1); + (await getCount('failed')).should.equal(3); + + // when + await setFailedToPending(); + + // then + (await getCount('pending')).should.equal(4); + (await getCount('failed')).should.equal(0); + })); + }); + + describe('uploadPending()', () => { + it('should not do anything if nothing to upload', testTask(async () => { + // when + await uploadPending(); + + // then + assertUploadCount(0); + })); + + it('should upload pending blobs, and ignore others', testTask(async (container) => { + // given + await aBlobExistsWith(container, { status: 'pending' }); + await aBlobExistsWith(container, { status: 'uploaded' }); + await aBlobExistsWith(container, { status: 'failed' }); + await aBlobExistsWith(container, { status: 'pending' }); + await aBlobExistsWith(container, { status: 'uploaded' }); + await aBlobExistsWith(container, { status: 'failed' }); + + // when + await uploadPending(); + + // then + assertUploadCount(2); + })); + + it('should return error if uploading fails', testTask(async (container) => { + // given + global.s3.error.onUpload = true; + await aBlobExistsWith(container, { status: 'pending' }); + + // when + await assertThrowsAsync(() => uploadPending(), 'Mock error when trying to upload blobs.'); + + // and + assertUploadCount(0); + })); + + it('should not allow failure to affect previous or future uploads', testTask(async (container) => { + // given + global.s3.error.onUpload = 3; + await aBlobExistsWith(container, { status: 'pending' }); + await aBlobExistsWith(container, { status: 'pending' }); + await aBlobExistsWith(container, { status: 'pending' }); + + // expect + await assertThrowsAsync(() => uploadPending(), 'Mock error when trying to upload #3'); + + // and + assertUploadCount(2); + + + // given + await aBlobExistsWith(container, { status: 'pending' }); + + // when + await uploadPending(); + + // then + assertUploadCount(3); + })); + + it('should not attempt to upload an in-progress blob', testTaskFullTrx(async (container) => { + // given + const original = global.s3.uploadFromBlob; + let resume; + global.s3.uploadFromBlob = async (...args) => { + await new Promise(resolve => { + resume = resolve; + }); + original.apply(global.s3, args); + }; + await aBlobExistsWith(container, { status: 'pending' }); + + // when + const first = uploadPending(); + await new Promise(resolve => { setTimeout(resolve, 200); }); + if (!resume) should.fail('Test did not set up successfully'); + global.s3.uploadFromBlob = original; + // and + const second = uploadPending(); + await second; + + // then + global.s3.uploads.attempted.should.equal(0); + global.s3.uploads.successful.should.equal(0); + + // when + resume(); + await first; + + // then + global.s3.uploads.attempted.should.equal(1); + global.s3.uploads.successful.should.equal(1); + })); + }); + }); +}); diff --git a/test/integration/worker/submission.attachment.update.js b/test/integration/worker/submission.attachment.update.js index b304b20a4..fa419ff07 100644 --- a/test/integration/worker/submission.attachment.update.js +++ b/test/integration/worker/submission.attachment.update.js @@ -46,6 +46,30 @@ describe('worker: submission.attachment.update', () => { .then(() => container.oneFirst(sql`select count(*) from client_audits`)) .then((count) => { Number(count).should.equal(5); })))); + it('should process the given logs if already uploaded to s3', testService((service, container) => { + global.s3.enableMock(); + return service.login('alice', (asAlice) => + asAlice.post('/v1/projects/1/forms?publish=true') + .set('Content-Type', 'application/xml') + .send(testData.forms.clientAudits) + .expect(200) + .then(() => asAlice.post('/v1/projects/1/submission') + .set('X-OpenRosa-Version', '1.0') + .attach('xml_submission_file', Buffer.from(testData.instances.clientAudits.one), { filename: 'data.xml' }) + .attach('audit.csv', createReadStream(appRoot + '/test/data/audit.csv'), { filename: 'audit.csv' }) + .expect(201) + .then(() => asAlice.get('/v1/projects/1/forms/audits/submissions/one/attachments'))) + .then(() => container.oneFirst(sql`select count(*) from client_audits`)) + .then((count) => { Number(count).should.equal(0); }) + .then(() => container.Blobs.s3UploadPending()) + .then(() => container.Audits.getLatestByAction('submission.attachment.update')) + .then((o) => o.get()) + .then((event) => worker(container, event)) + .then((result) => { result.should.equal(true); }) + .then(() => container.oneFirst(sql`select count(*) from client_audits`)) + .then((count) => { Number(count).should.equal(5); })); + })); + it('should not reprocess already-processed logs', testService((service, container) => service.login('alice', (asAlice) => asAlice.post('/v1/projects/1/forms?publish=true') diff --git a/test/unit/data/attachments.js b/test/unit/data/attachments.js index ebe9910c9..56f527dfb 100644 --- a/test/unit/data/attachments.js +++ b/test/unit/data/attachments.js @@ -1,16 +1,17 @@ const appRoot = require('app-root-path'); const streamTest = require('streamtest').v2; const { zipStreamToFiles } = require(appRoot + '/test/util/zip'); +const { PartialPipe } = require(appRoot + '/lib/util/stream'); const { streamAttachments } = require(appRoot + '/lib/data/attachments'); const { zipStreamFromParts } = require(appRoot + '/lib/util/zip'); describe('.zip attachments streaming', () => { it('should stream the contents to files at the appropriate paths', (done) => { - const inStream = streamTest.fromObjects([ + const inStream = PartialPipe.of(streamTest.fromObjects([ { row: { instanceId: 'subone', name: 'firstfile.ext', content: 'this is my first file' } }, { row: { instanceId: 'subone', name: 'secondfile.ext', content: 'this is my second file' } }, { row: { instanceId: 'subtwo', name: 'thirdfile.ext', content: 'this is my third file' } } - ]); + ])); zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (err, result) => { // eslint-disable-next-line keyword-spacing if(err) return done(err); @@ -30,11 +31,11 @@ describe('.zip attachments streaming', () => { }); it('should deal with unsafe filenames sanely', (done) => { - const inStream = streamTest.fromObjects([ + const inStream = PartialPipe.of(streamTest.fromObjects([ { row: { instanceId: '../subone', name: 'firstfile.ext', content: 'this is my first file' } }, { row: { instanceId: 'subone', name: '../secondfile.ext', content: 'this is my second file' } }, { row: { instanceId: 'subone', name: './.secondfile.ext', content: 'this is my duplicate second file' } }, - ]); + ])); zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (err, result) => { // eslint-disable-next-line keyword-spacing if(err) return done(err); @@ -50,9 +51,9 @@ describe('.zip attachments streaming', () => { }); it('should not strip .enc unless decryption is happening', (done) => { - const inStream = streamTest.fromObjects([ + const inStream = PartialPipe.of(streamTest.fromObjects([ { row: { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } } - ]); + ])); zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream)), (err, result) => { // eslint-disable-next-line keyword-spacing if(err) return done(err); @@ -63,9 +64,9 @@ describe('.zip attachments streaming', () => { }); it('should strip .enc if decryption is happening', (done) => { - const inStream = streamTest.fromObjects([ + const inStream = PartialPipe.of(streamTest.fromObjects([ { row: { instanceId: 'subone', name: 'firstfile.ext.enc', content: 'this is my first file' } } - ]); + ])); zipStreamToFiles(zipStreamFromParts(streamAttachments(inStream, () => {})), (err, result) => { // eslint-disable-next-line keyword-spacing if(err) return done(err); diff --git a/test/util/s3.js b/test/util/s3.js new file mode 100644 index 000000000..47ad18272 --- /dev/null +++ b/test/util/s3.js @@ -0,0 +1,89 @@ +const keyFrom = (id, sha) => { + if (!id || !sha) throw new Error('Missing required arg: ' + JSON.stringify({ id, sha })); + return sha+id; +}; + +class S3mock { + resetMock() { + delete this.enabled; + delete this.s3bucket; + delete this.error; + delete this.downloads; + delete this.uploads; + } + + enableMock() { + this.enabled = true; + this.s3bucket = new Map(); + this.error = {}; + this.downloads = { attempted: 0, successful: 0 }; + this.uploads = { attempted: 0, successful: 0, deleted: 0 }; + } + + // MOCKED FUNCTIONS + // ================ + // These functions should be marked `async` to correspond with the function + // in lib/external/s3.js that they are mocking. + + async uploadFromBlob({ id, content, sha }) { + if (!this.enabled) throw new Error('S3 mock has not been enabled, so this function should not be called.'); + + if (this.error.onUpload === true) { + throw new Error('Mock error when trying to upload blobs.'); + } + + // eslint-disable-next-line no-plusplus + if (this.error.onUpload === ++this.uploads.attempted) { + throw new Error(`Mock error when trying to upload #${this.uploads.attempted}`); + } + + const key = keyFrom(id, sha); + + if (this.s3bucket.has(key)) { + throw new Error('Should not re-upload existing s3 object.'); + } + + this.s3bucket.set(key, content); + // eslint-disable-next-line no-plusplus + ++this.uploads.successful; + } + + async getContentFor({ id, sha }) { + if (!this.enabled) throw new Error('S3 mock has not been enabled, so this function should not be called.'); + + // eslint-disable-next-line no-plusplus + ++this.downloads.attempted; + + if (this.error.onDownload) { + throw new Error('Mock error when trying to download blob.'); + } + + const content = this.s3bucket.get(keyFrom(id, sha)); + if (content == null) throw new Error('Blob content not found.'); + + // eslint-disable-next-line no-plusplus + ++this.downloads.successful; + + return content; + } + + async urlForBlob(filename, { md5, sha, contentType }) { + if (!this.enabled) throw new Error('S3 mock has not been enabled, so this function should not be called.'); + + return `s3://mock/${md5}/${sha}/${filename}?contentType=${contentType}`; + } + + async deleteObjFor({ id, sha }) { + if (!this.enabled) throw new Error('S3 mock has not been enabled, so this function should not be called.'); + + const key = keyFrom(id, sha); + if (!this.s3bucket.has(key)) throw new Error('Blob not found.'); + this.s3bucket.delete(key); + // eslint-disable-next-line no-plusplus + ++this.uploads.deleted; + } +} + +global.s3 = new S3mock(); + +module.exports = { s3: global.s3 };