From ea6caac7576a1afeb8f03148d44a221ba57de35a Mon Sep 17 00:00:00 2001 From: Samir Musali Date: Fri, 19 Mar 2021 17:27:04 -0400 Subject: [PATCH] refactor(logger): introduce @logdna/logger as a logger Replacing current flushing mechanism with @logdna/logger Ref: #2 Semver: minor --- .circleci/config.yml | 2 +- LICENSE | 2 +- doc/env.md | 88 +----- index.js | 54 ++-- lib/config.js | 30 +- lib/constants.js | 16 - lib/event-handler.js | 25 +- lib/logger.js | 73 ++--- lib/transformer.js | 98 +++--- lib/utils.js | 18 +- package-lock.json | 272 +++++++++++----- package.json | 14 +- test/integration/index.js | 286 +++++++++++++++++ test/unit/index.js | 147 +++++++++ test/unit/lib/event-handler.js | 47 ++- test/unit/lib/logger.js | 50 +++ test/unit/lib/transformer.js | 561 ++++++++++++++++++--------------- test/unit/lib/utils.js | 24 +- 18 files changed, 1176 insertions(+), 631 deletions(-) delete mode 100644 lib/constants.js create mode 100644 test/integration/index.js create mode 100644 test/unit/index.js create mode 100644 test/unit/lib/logger.js diff --git a/.circleci/config.yml b/.circleci/config.yml index 1df323c..2c9de38 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,7 +36,7 @@ jobs: name: Build S3 Lambda command: | npm ci --production - zip logdna-s3.zip -r node_modules/ index.js package-lock.json lib/*.js + zip logdna-s3.zip -r node_modules/ index.js package.json lib/*.js - persist_to_workspace: root: . paths: diff --git a/LICENSE b/LICENSE index e8db2e0..bd95903 100755 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2021 LogDNA, Inc. +Copyright (c) 2019 LogDNA, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/doc/env.md b/doc/env.md index f235cdb..cdb74b7 100644 --- a/doc/env.md +++ b/doc/env.md @@ -1,47 +1,33 @@ ## Environment Variables -### `BATCH_INTERVAL` +### `FLUSH_INTERVAL` -> The number of milliseconds between sending each batch +> The number of milliseconds to wait between sending payloads to LogDNA | Config | Value | | --- | --- | -| Name | `batch-interval` | -| Environment Variable | `BATCH_INTERVAL` | +| Name | `flush-interval` | +| Environment Variable | `FLUSH_INTERVAL` | | Type | `number` | | Required | no | -| Default | `50` | +| Default | `1000` | *** -### `BATCH_LIMIT` +### `FLUSH_LIMIT` -> The number of lines within each batch +> If the length of the send buffer exceeds this length, send immediately | Config | Value | | --- | --- | -| Name | `batch-limit` | -| Environment Variable | `BATCH_LIMIT` | +| Name | `flush-limit` | +| Environment Variable | `FLUSH_LIMIT` | | Type | `number` | | Required | no | | Default | `25` | *** -### `FREE_SOCKET_TIMEOUT` - -> The number of milliseconds to wait for inactivity before timing out - -| Config | Value | -| --- | --- | -| Name | `free-socket-timeout` | -| Environment Variable | `FREE_SOCKET_TIMEOUT` | -| Type | `number` | -| Required | no | -| Default | `300000` | - -*** - ### `HOSTNAME` > Optionally, use alternative host name set through the environment @@ -140,34 +126,6 @@ *** -### `MAX_REQUEST_RETRIES` - -> Maximum number of retries for sending each batch - -| Config | Value | -| --- | --- | -| Name | `max-request-retries` | -| Environment Variable | `MAX_REQUEST_RETRIES` | -| Type | `number` | -| Required | no | -| Default | `5` | - -*** - -### `MAX_REQUEST_TIMEOUT` - -> Maximum request timeout in sending each batch - -| Config | Value | -| --- | --- | -| Name | `max-request-timeout` | -| Environment Variable | `MAX_REQUEST_TIMEOUT` | -| Type | `number` | -| Required | no | -| Default | `300` | - -*** - ### `PROXY` > A full proxy URL (including protocol) to pass through before going to LogDNA @@ -182,20 +140,6 @@ *** -### `REQUEST_RETRY_INTERVAL` - -> The number of milliseconds between each retry - -| Config | Value | -| --- | --- | -| Name | `request-retry-interval` | -| Environment Variable | `REQUEST_RETRY_INTERVAL` | -| Type | `number` | -| Required | no | -| Default | `100` | - -*** - ### `SSL` > Use https:// for log ingestion @@ -224,20 +168,6 @@ *** -### `URL` - -> *Combination of SSL, INGESTION_HOST, INGESTION_PORT, and INGESTION_ENDPOINT* - -| Config | Value | -| --- | --- | -| Name | `url` | -| Environment Variable | `URL` | -| Type | `string` | -| Required | no | -| Default | `https://logs.logdna.com/logs/ingest` | - -*** - ### `USER_AGENT` > user-agent header value to use while sending logs diff --git a/index.js b/index.js index 3ac7b06..595254e 100644 --- a/index.js +++ b/index.js @@ -1,50 +1,54 @@ 'use strict' -const async = require('async') +const {once} = require('events') const config = require('./lib/config.js') const {handleEvent} = require('./lib/event-handler.js') -const {flush} = require('./lib/logger.js') +const {createLoggerClient} = require('./lib/logger.js') const {getLogs, prepareLogs} = require('./lib/transformer.js') -const {batchify, getProperty} = require('./lib/utils.js') +const {getProperty, trimTags} = require('./lib/utils.js') -const DOT_REGEXP = /\./g +const HOSTNAME_REGEX = /[^0-9a-zA-Z\-.]/g module.exports = { handler } -async function handler(event, context, callback) { +async function handler(event, context) { config.validateEnvVars() - const tags = config.get('tags') - if (tags) { - config.set('tags', tags.split(',').map((tag) => { - return tag.trim() - }).join(',')) + const eventData = handleEvent(event) + if (!eventData) { + const error = new Error('Cannot Parse the S3 Event') + error.meta = {event} + throw error } - const eventData = handleEvent(event) const s3params = { Bucket: getProperty(eventData, 'meta.bucket.name') , Key: getProperty(eventData, 'meta.object.key') } - let lines - try { - lines = getLogs(s3params) - } catch (e) { - return callback(e) + const tags = config.get('tags') + if (tags) { + config.set('tags', trimTags(tags)) + } + + const hostname = config.get('hostname') || s3params.Bucket + if (hostname) { + config.set('hostname', hostname.replace(HOSTNAME_REGEX, '')) } - const logArrays = prepareLogs(lines, eventData) - const batches = batchify(logArrays, config.get('batch-limit')) - if (!config.get('hostname')) { - config.set('hostname', s3params.Bucket.replace(DOT_REGEXP, '_')) + const logger = createLoggerClient(config) + const lines = await getLogs(s3params) + logger.on('error', console.error) + logger.on('warn', console.warn) + const logs = prepareLogs(lines, eventData) + for (const log of logs) { + const {line, opts} = log + logger.log(line, opts) } - async.everySeries(batches, (batch, next) => { - setTimeout(() => { - return flush(batch, config, next) - }, config.get('batch-interval')) - }, callback) + // Ensure logs have been flushed to LogDNA before finishing + await once(logger, 'cleared') + return } diff --git a/lib/config.js b/lib/config.js index 811c1da..6bfbeb5 100644 --- a/lib/config.js +++ b/lib/config.js @@ -6,17 +6,13 @@ const pkg = require('../package.json') const config = new Config([ Config - .number('batch-interval') - .default(50) - .desc('The number of milliseconds between sending each batch') + .number('flush-interval') + .default(1000) + .desc('The number of milliseconds to wait between sending payloads to LogDNA') , Config - .number('batch-limit') + .number('flush-limit') .default(25) - .desc('The number of lines within each batch') -, Config - .number('free-socket-timeout') - .default(300000) - .desc('The number of milliseconds to wait for inactivity before timing out') + .desc('If the length of the send buffer exceeds this length, send immediately') , Config .string('hostname') .desc('Optionally, use alternative host name set through the environment') @@ -42,21 +38,9 @@ const config = new Config([ .number('ingestion-port') .default(443) .desc('The port for log ingestion') -, Config - .number('max-request-retries') - .default(5) - .desc('Maximum number of retries for sending each batch') -, Config - .number('max-request-timeout') - .default(300) - .desc('Maximum request timeout in sending each batch') , Config .string('proxy') .desc('A full proxy URL (including protocol) to pass through before going to LogDNA') -, Config - .number('request-retry-interval') - .default(100) - .desc('The number of milliseconds between each retry') , Config .boolean('ssl') .default(true) @@ -64,10 +48,6 @@ const config = new Config([ , Config .string('tags') .desc('Optionally, use comma-separated tags set through the environment') -, Config - .string('url') - .default('https://logs.logdna.com/logs/ingest') - .desc('*Combination of SSL, INGESTION_HOST, INGESTION_PORT, and INGESTION_ENDPOINT*') , Config .string('user-agent') .default(`${pkg.name}/${pkg.version}`) diff --git a/lib/constants.js b/lib/constants.js deleted file mode 100644 index 2a3128a..0000000 --- a/lib/constants.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -module.exports = { - DEFAULT_HTTP_ERRORS: [ - 'ECONNRESET' - , 'EHOSTUNREACH' - , 'ETIMEDOUT' - , 'ESOCKETTIMEDOUT' - , 'ECONNREFUSED' - , 'ENOTFOUND' - ] -, INTERNAL_SERVER_ERROR: { - statusCode: 500 - , code: 'INTERNAL_SERVER_ERROR' - } -} diff --git a/lib/event-handler.js b/lib/event-handler.js index 1b0e4f0..2d0915c 100644 --- a/lib/event-handler.js +++ b/lib/event-handler.js @@ -1,27 +1,20 @@ 'use strict' -const { - getProperty -, setProperty -} = require('./utils.js') +const {getProperty} = require('./utils.js') module.exports = { handleEvent } function handleEvent(event) { - let record = getProperty(event, 'Records.0') + const record = getProperty(event, 'Records.0') if (!record) return undefined - let key = getProperty(record, 's3.object.key') - if (key) { - key = key.replace(/\+/g, ' ') - record = setProperty(record, 's3.object.key', decodeURIComponent(key)) - } - + const key = getProperty(record, 's3.object.key') let timestamp = Date.now() if (record.eventTime) { - timestamp = (new Date(record.eventTime)).getTime() + const eventTimestamp = (new Date(record.eventTime)).getTime() + if (!isNaN(eventTimestamp)) timestamp = eventTimestamp } const object = getProperty(record, 's3.object') @@ -31,9 +24,13 @@ function handleEvent(event) { , arn: getProperty(record, 's3.bucket.arn') } - let file = bucket.name + let file = '' + if (bucket.name) { + file = `${file}${bucket.name}/` + } + if (key) { - file = `${file}/${key}` + file = `${file}${key}` } return { diff --git a/lib/logger.js b/lib/logger.js index b6c90fb..e7b1d09 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,58 +1,35 @@ 'use strict' -const agent = require('agentkeepalive') -const async = require('async') -const request = require('request') +const {createLogger} = require('@logdna/logger') -const constants = require('./constants.js') +let logger module.exports = { - flush + buildLoggerURL +, createLoggerClient } -function flush(payload, config, callback) { - const options = { - url: config.get('url') - , qs: { - tags: config.tags - , hostname: config.hostname - } - , method: 'POST' - , body: JSON.stringify({ - e: 'ls' - , ls: payload - }) - , auth: { - username: config.get('ingestion-key') - } - , headers: { - 'Content-Type': 'application/json; charset=UTF-8' - , 'user-agent': config.get('user-agent') - } - , timeout: config.get('max-request-timeout') - , withCredentials: false - , agent: new agent.HttpsAgent({ - freeSocketTimeout: config.get('free-socket-timeout') - }) - } +function buildLoggerURL(config) { + const ssl = config.get('ssl') + const host = config.get('ingestion-host') + const port = config.get('ingestion-port') + const endpoint = config.get('ingestion-endpoint') + const protocol = ssl ? 'https' : 'http' + const url = `${protocol}://${host}:${port}${endpoint}` + return url +} - async.retry({ - times: config.get('max-request-retries') - , interval: (retryCount) => { - return config.get('request-retry-interval') * Math.pow(2, retryCount) - } - , errorFilter: (errCode) => { - return constants.DEFAULT_HTTP_ERRORS.includes(errCode) - || errCode === constants.INTERNAL_SERVER_ERROR.code - } - }, (reqCallback) => { - return request(options, (error, response, body) => { - if (error) return reqCallback(error.code) - if (response.statusCode >= constants.INTERNAL_SERVER_ERROR.statusCode) { - return reqCallback(constants.INTERNAL_SERVER_ERROR.code) - } +function createLoggerClient(config) { + logger = createLogger(config.get('ingestion-key'), { + flushLimit: config.get('flush-limit') + , flushIntervalMs: config.get('flush-interval') + , hostname: config.get('hostname') + , indexMeta: true + , proxy: config.get('proxy') || config.get('https-proxy') || config.get('http-proxy') + , tags: config.get('tags') + , url: buildLoggerURL(config) + , UserAgent: config.get('user-agent') + }) - return reqCallback(null, body) - }) - }, callback) + return logger } diff --git a/lib/transformer.js b/lib/transformer.js index 697bc6b..bcc530e 100644 --- a/lib/transformer.js +++ b/lib/transformer.js @@ -1,9 +1,11 @@ 'use strict' +const {promisify} = require('util') const zlib = require('zlib') const aws = require('aws-sdk') const { checkFileFormat +, formatObjectKey , hasProperty } = require('./utils.js') @@ -11,11 +13,11 @@ const s3 = new aws.S3({ apiVersion: '2006-03-01' }) -module.exports = { +module.exports = exports = { extractData , getLogs , prepareLogs -, s3 +, getObject: promisify(s3.getObject.bind(s3)) } function extractData(data) { @@ -75,69 +77,83 @@ function extractData(data) { }) } -async function getLogs(params, callback) { +async function getLogs(params) { + let err if (!hasProperty(params, 'Key') || !hasProperty(params, 'Bucket')) { - return callback('Both Bucket and Key params must be provided') + err = new Error('Both Bucket and Key params must be provided') + err.meta = {params} + throw err } - const keyFormat = checkFileFormat(params.Key) let data try { - data = await s3.getObject(params) - } catch (e) { - return callback(`Error in Getting ${params.Bucket}/${params.Key}: ${e}`) + data = await exports.getObject(params) + } catch (error) { + err = new Error('Error in Getting the S3 Object') + err.meta = {error, params} + throw err } if (!hasProperty(data, 'Body')) { - return callback(`Corrupted data returned from ${params.Bucket}/${params.Key}`) + err = new Error('Corrupted data returned from the object') + err.meta = {params} + throw err } let content = data.Body - if (keyFormat.gz) { + const type = checkFileFormat(params.Key) + if (type.gz) { try { content = zlib.gunzipSync(content) - } catch (e) { - return callback(`Error in Unzipping ${params.Bucket}/${params.Key}: ${e}`) + } catch (error) { + err = new Error('Error in Unzipping the S3 Object') + err.meta = {error, params, type} + throw err } } content = content.toString('ascii') - if (keyFormat.json) { + if (type.json) { try { content = JSON.parse(content) - } catch (e) { - return callback('Error in Parsing the JSON Data from ' - + `${params.Bucket}/${params.Key}: ${e}`) + } catch (error) { + err = new Error('Error in Parsing the JSON Data from the S3 Object') + err.meta = {error, params, type} + throw err } } - return callback(null, extractData(content)) + content = extractData(content) + return content } function prepareLogs(logs, eventData) { - if (!logs || !Array.isArray(logs)) return undefined - - return logs.filter((log) => { - return log && log.line - }).map((log) => { - const result = { - file: undefined - , line: log.line - , meta: {...log.meta} - , timestamp: Date.now() - } - - if (eventData) { - result.file = eventData.file - result.meta = {...eventData.meta, ...result.meta} - } - - if (log.timestamp) { - result.timestamp = log.timestamp - } else if (eventData && eventData.timestamp) { - result.timestamp = eventData.timestamp - } + let logObjects = [] + if (logs && Array.isArray(logs)) { + logObjects = logs.filter((log) => { + return log && log.line + }).map((log) => { + const line = log.line + const opts = { + app: undefined + , meta: {...log.meta} + , timestamp: Date.now() + } + + if (eventData) { + opts.app = formatObjectKey(eventData.file) + opts.meta = {...eventData.meta, ...opts.meta} + } + + if (log.timestamp) { + opts.timestamp = log.timestamp + } else if (eventData && eventData.timestamp) { + opts.timestamp = eventData.timestamp + } + + return {line, opts} + }) + } - return result - }) + return logObjects } diff --git a/lib/utils.js b/lib/utils.js index e266542..4822b73 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,11 +1,12 @@ 'use strict' module.exports = { - batchify -, checkFileFormat + checkFileFormat +, formatObjectKey , getProperty , hasProperty , setProperty +, trimTags } function getProperty(obj, string = '', sep = '.') { @@ -79,11 +80,12 @@ function checkFileFormat(key) { } } -function batchify(arr, size) { - const batches = [] - for (let i = 0; i < arr.length; i += size) { - batches.push(arr.slice(i, i + size)) - } +function trimTags(tags) { + return tags.split(',').map((tag) => { + return tag.trim() + }).filter(Boolean).join(',') +} - return batches +function formatObjectKey(key) { + return decodeURIComponent(key.replace(/\+/g, ' ')) } diff --git a/package-lock.json b/package-lock.json index 519e70c..2110cd8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -92,9 +92,9 @@ } }, "@babel/parser": { - "version": "7.13.11", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.13.11.tgz", - "integrity": "sha512-PhuoqeHoO9fc4ffMEVk4qb/w/s2iOSWohvbHxLtxui0eBg3Lg5gN1U8wp1V1u61hOWkPQJJyJzGH6Y+grwkq8Q==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.13.12.tgz", + "integrity": "sha512-4T7Pb244rxH24yR116LAuJ+adxXXnHhZaLJjegJVKSdoNCe4x1eDBaud5YIcQFcqzsaD5BHvJw5BQ0AZapdCRw==", "dev": true }, "@babel/template": { @@ -154,9 +154,9 @@ } }, "@babel/types": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.13.0.tgz", - "integrity": "sha512-hE+HE8rnG1Z6Wzo+MhaKE5lM5eMx71T4EHJgku2E3xIfaULhDcxiiRxUYgwX8qwP1BBSlag+TdGOt6JAidIZTA==", + "version": "7.13.12", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.13.12.tgz", + "integrity": "sha512-K4nY2xFN4QMvQwkQ+zmBDp6ANMbVNw6BbxWmYA4qNjhR9W+Lj/8ky5MEY2Me5r+B2c6/v6F53oMndG+f9s3IiA==", "dev": true, "requires": { "@babel/helper-validator-identifier": "^7.12.11", @@ -197,6 +197,17 @@ "resolved": "https://registry.npmjs.org/@logdna/env-config/-/env-config-1.0.5.tgz", "integrity": "sha512-pK+8J1lSeWKdfiUu4Aeww8sssC0+cCFZmlyPbh8dViA0B4JIfpSnAFJ9KBgaGYljPvNRGrdYsb4xSSe0gQOkKw==" }, + "@logdna/logger": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@logdna/logger/-/logger-2.3.1.tgz", + "integrity": "sha512-435dk5lU6IVFLKuvIWqXC+gPq0E8dQOyv+ZUPrNKZE4jotjOarlG6fC2iDXRWIS10h8icn493mvLLrWApTc7Hw==", + "requires": { + "agentkeepalive": "^4.1.3", + "axios": "^0.21.1", + "https-proxy-agent": "^5.0.0", + "json-stringify-safe": "^5.0.1" + } + }, "acorn": { "version": "7.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", @@ -209,6 +220,14 @@ "integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==", "dev": true }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "requires": { + "debug": "4" + } + }, "agentkeepalive": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.1.4.tgz", @@ -223,6 +242,7 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -295,6 +315,7 @@ "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dev": true, "requires": { "safer-buffer": "~2.1.0" } @@ -302,7 +323,8 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true }, "astral-regex": { "version": "2.0.0", @@ -310,14 +332,6 @@ "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", "dev": true }, - "async": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", - "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", - "requires": { - "lodash": "^4.17.14" - } - }, "async-hook-domain": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-1.1.3.tgz", @@ -330,12 +344,13 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true }, "aws-sdk": { - "version": "2.868.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.868.0.tgz", - "integrity": "sha512-ZayPsA/ycaAXqqa2oDyf8iUpl1WOLODZS8ZdvYj77L5owMQm0XC7yqiD+WHj9nToUECF9VAD+AKQMIN6695tVQ==", + "version": "2.872.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.872.0.tgz", + "integrity": "sha512-hI1/iwR1uPbuulvWZmCCmLKN1Oiv+Beutwcn+7ZOsWAEtsgsXiHmuRDS/ZdWiBRNQkfZgUhcCwLz7nOrWKpb8w==", "dev": true, "requires": { "buffer": "4.9.2", @@ -347,25 +362,27 @@ "url": "0.10.3", "uuid": "3.3.2", "xml2js": "0.4.19" - }, - "dependencies": { - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "dev": true - } } }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true }, "aws4": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", - "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "dev": true + }, + "axios": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", + "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", + "requires": { + "follow-redirects": "^1.10.0" + } }, "balanced-match": { "version": "1.0.0", @@ -383,6 +400,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dev": true, "requires": { "tweetnacl": "^0.14.3" } @@ -503,7 +521,8 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true }, "chalk": { "version": "4.1.0", @@ -654,6 +673,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, "requires": { "delayed-stream": "~1.0.0" } @@ -690,7 +710,8 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true }, "coveralls": { "version": "3.1.0", @@ -733,6 +754,7 @@ "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -778,7 +800,8 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true }, "depd": { "version": "1.1.2", @@ -819,6 +842,7 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dev": true, "requires": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -955,9 +979,9 @@ } }, "eslint-config-logdna": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/eslint-config-logdna/-/eslint-config-logdna-4.0.2.tgz", - "integrity": "sha512-4GHqCp0QdNH+zjiPbSA0rmiF7FVdLLS/ZAGgHw0ruoAOZBvPndXJ/sKjB4hgM119WhzIymWfmpHJq0XS9RyIAA==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-config-logdna/-/eslint-config-logdna-4.2.0.tgz", + "integrity": "sha512-/Tve007ULH44EhNacE9k2yY7x3YUib/XP6o3DpYHzHb4gz0SNaSyTSDvGjGH8EcQrVyJVztFBx3Xkew6Ly9J7w==", "dev": true, "requires": { "eslint-plugin-logdna": "^1.0.0", @@ -1149,22 +1173,26 @@ "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true }, "fast-levenshtein": { "version": "2.0.6", @@ -1258,6 +1286,11 @@ "vlq": "^0.2.1" } }, + "follow-redirects": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", + "integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==" + }, "foreground-child": { "version": "1.5.6", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", @@ -1308,12 +1341,14 @@ "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true }, "form-data": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.6", @@ -1378,6 +1413,7 @@ "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -1437,12 +1473,14 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true }, "har-validator": { "version": "5.1.5", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "dev": true, "requires": { "ajv": "^6.12.3", "har-schema": "^2.0.0" @@ -1506,12 +1544,22 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", "sshpk": "^1.7.0" } }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "requires": { + "agent-base": "6", + "debug": "4" + } + }, "humanize-ms": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", @@ -1709,7 +1757,8 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true }, "isarray": { "version": "1.0.0", @@ -1726,7 +1775,8 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true }, "istanbul-lib-coverage": { "version": "2.0.5", @@ -2019,7 +2069,8 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true }, "jsesc": { "version": "2.5.2", @@ -2036,12 +2087,14 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true }, "json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -2058,6 +2111,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -2114,7 +2168,8 @@ "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "lodash.flattendeep": { "version": "4.4.0", @@ -2122,6 +2177,12 @@ "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", "dev": true }, + "lodash.set": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", + "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=", + "dev": true + }, "log-driver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", @@ -2204,12 +2265,14 @@ "mime-db": { "version": "1.46.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz", - "integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==" + "integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==", + "dev": true }, "mime-types": { "version": "2.1.29", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz", "integrity": "sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==", + "dev": true, "requires": { "mime-db": "1.46.0" } @@ -2373,6 +2436,18 @@ "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", "dev": true }, + "nock": { + "version": "13.0.11", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.0.11.tgz", + "integrity": "sha512-sKZltNkkWblkqqPAsjYW0bm3s9DcHRPiMOyKO/PkfJ+ANHZ2+LA2PLe22r4lLrKgXaiSaDQwW3qGsJFtIpQIeQ==", + "dev": true, + "requires": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "lodash.set": "^4.3.2", + "propagate": "^2.0.0" + } + }, "node-environment-flags": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.6.tgz", @@ -2476,7 +2551,8 @@ "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true }, "object-assign": { "version": "4.1.1", @@ -2668,7 +2744,8 @@ "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true }, "picomatch": { "version": "2.2.2", @@ -2723,6 +2800,12 @@ "react-is": "^16.8.1" } }, + "propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true + }, "pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", @@ -2732,17 +2815,20 @@ "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true }, "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "dev": true }, "querystring": { "version": "0.2.0", @@ -2816,6 +2902,7 @@ "version": "2.88.2", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "dev": true, "requires": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -2891,12 +2978,14 @@ "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true }, "sax": { "version": "1.2.1", @@ -2905,9 +2994,9 @@ "dev": true }, "semver": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", - "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -3069,6 +3158,7 @@ "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "dev": true, "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -3172,9 +3262,9 @@ }, "dependencies": { "ajv": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-7.2.1.tgz", - "integrity": "sha512-+nu0HDv7kNSOua9apAVc979qd932rrZeb3WOvoiD31A/p1mIE5/9bN2027pE2rOPYEdS3UHzsvof4hY+lM9/WQ==", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-7.2.3.tgz", + "integrity": "sha512-idv5WZvKVXDqKralOImQgPM9v6WOdLNa0IY3B3doOjw/YxRGT8I+allIJ6kd7Uaj+SF1xZUSU+nPM5aDNBVtnw==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", @@ -4457,9 +4547,18 @@ "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dev": true, "requires": { "psl": "^1.1.28", "punycode": "^2.1.1" + }, + "dependencies": { + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + } } }, "trivial-deferred": { @@ -4493,6 +4592,7 @@ "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, "requires": { "safe-buffer": "^5.0.1" } @@ -4500,7 +4600,8 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true }, "type-check": { "version": "0.4.0", @@ -4533,15 +4634,15 @@ "dev": true }, "unbox-primitive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.0.tgz", - "integrity": "sha512-P/51NX+JXyxK/aigg1/ZgyccdAxm5K1+n8+tvqSntjOivPt19gvm1VC49RWYetsiub8WViUchdxl/KWHHB0kzA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", + "integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==", "dev": true, "requires": { "function-bind": "^1.1.1", - "has-bigints": "^1.0.0", - "has-symbols": "^1.0.0", - "which-boxed-primitive": "^1.0.1" + "has-bigints": "^1.0.1", + "has-symbols": "^1.0.2", + "which-boxed-primitive": "^1.0.2" } }, "unicode-length": { @@ -4560,6 +4661,12 @@ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", "dev": true }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -4575,8 +4682,17 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, "requires": { "punycode": "^2.1.0" + }, + "dependencies": { + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + } } }, "url": { @@ -4587,20 +4703,13 @@ "requires": { "punycode": "1.3.2", "querystring": "0.2.0" - }, - "dependencies": { - "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true - } } }, "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "dev": true }, "v8-compile-cache": { "version": "2.3.0", @@ -4622,6 +4731,7 @@ "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", diff --git a/package.json b/package.json index 566e8b6..6d11a9e 100644 --- a/package.json +++ b/package.json @@ -44,12 +44,13 @@ "json-summary" ], "files": [ + "test/integration", "test/unit" ], - "statements": 73, - "branches": 88, - "functions": 59, - "lines": 71 + "statements": 100, + "branches": 100, + "functions": 100, + "lines": 100 }, "husky": { "hooks": { @@ -58,14 +59,13 @@ }, "dependencies": { "@logdna/env-config": "^1.0.5", - "agentkeepalive": "^4.0.2", - "async": "^2.6.2", - "request": "^2.88.0" + "@logdna/logger": "^2.3.0" }, "devDependencies": { "aws-sdk": "^2.590.0", "eslint": "^7.4.0", "eslint-config-logdna": "^4.0.2", + "nock": "^13.0.11", "tap": "^14.11.0" }, "keywords": [ diff --git a/test/integration/index.js b/test/integration/index.js new file mode 100644 index 0000000..7338eae --- /dev/null +++ b/test/integration/index.js @@ -0,0 +1,286 @@ +'use strict' +process.env.INGESTION_KEY = 'abc123' + +const os = require('os') +const nock = require('nock') +const {test, threw} = require('tap') + +const {buildLoggerURL} = require('../../lib/logger.js') +const config = require('../../lib/config.js') +const {handler} = require('../../index.js') +const transformer = require('../../lib/transformer.js') +const { + formatObjectKey +, getProperty +, setProperty +, trimTags +} = require('../../lib/utils.js') + +const responseText = 'This is the ingester response' +const BUCKET_NAME = 'random-name' +const FILE_NAME = 'log+File.json' +const HOSTNAME_REGEX = /[^0-9a-zA-Z\-.]/g +const LOG_LEVEL = 'INFO' +const LOG_LINE = 'test log' +const SAMPLE_HOSTNAME = 'sampleHostname/test' +const SAMPLE_TAGS = ' ,test,sample ,something' +const TIMESTAMP = Date.now() +const EVENT_DATA = { + Records: [{ + eventTime: new Date(TIMESTAMP) + , s3: { + bucket: { + name: BUCKET_NAME + } + , object: { + key: FILE_NAME + } + } + }] +} + +// NOTE: We will NOT test the use of ENV vars directly here since they are cumbersome +// to work with (they can never be updated). For this, we will trust that env-config +// will do its job and read from the envrionment properly, but we will set the +// config's keys manually in these tests. +config.validateEnvVars() +nock.disableNetConnect() + +test('test getting, parsing, and sending S3 event', async (t) => { + t.test('test fully', async (t) => { + t.on('end', async () => { + nock.cleanAll() + }) + + config.set('tags', SAMPLE_TAGS) + const input = JSON.stringify({ + log: LOG_LINE + }) + + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + nock(buildLoggerURL(config)) + .post('', (body) => { + const numProps = Object.keys(body).length + t.strictEqual(numProps, 2, 'Number of request body properties') + t.match(body, { + e: 'ls' + , ls: [ + { + app: `${BUCKET_NAME}/${formatObjectKey(FILE_NAME)}` + , level: LOG_LEVEL + , line: input + , meta: { + bucket: getProperty(EVENT_DATA, 'Records.0.s3.bucket') + , object: getProperty(EVENT_DATA, 'Records.0.s3.object') + } + , timestamp: Number + } + ] + }) + t.strictEqual(body.ls.length, 1, 'log line count') + return true + }) + .query((qs) => { + t.match(qs, { + hostname: String + , tags: trimTags(SAMPLE_TAGS) + , now: /^\d+$/ + }, 'Querystring properties look correct') + return true + }) + .reply(200, responseText) + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await handler(EVENT_DATA, null) + }) + + t.test('test without tags', async (t) => { + t.on('end', async () => { + nock.cleanAll() + }) + + config.set('tags', undefined) + const input = JSON.stringify({ + log: LOG_LINE + }) + + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + nock(buildLoggerURL(config)) + .post('', (body) => { + const numProps = Object.keys(body).length + t.strictEqual(numProps, 2, 'Number of request body properties') + t.match(body, { + e: 'ls' + , ls: [ + { + app: `${BUCKET_NAME}/${formatObjectKey(FILE_NAME)}` + , level: LOG_LEVEL + , line: input + , meta: { + bucket: getProperty(EVENT_DATA, 'Records.0.s3.bucket') + , object: getProperty(EVENT_DATA, 'Records.0.s3.object') + } + , timestamp: Number + } + ] + }) + t.strictEqual(body.ls.length, 1, 'log line count') + return true + }) + .query((qs) => { + t.match(qs, { + hostname: String + , tags: '' + , now: /^\d+$/ + }, 'Querystring properties look correct') + return true + }) + .reply(200, responseText) + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await handler(EVENT_DATA, null) + }) + + t.test('test without tags and no bucket name', async (t) => { + t.on('end', async () => { + nock.cleanAll() + }) + + const input = JSON.stringify({ + log: LOG_LINE + }) + + setProperty(EVENT_DATA, 'Records.0.s3.bucket.name', undefined) + config.set('hostname', undefined) + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + nock(buildLoggerURL(config)) + .post('', (body) => { + const numProps = Object.keys(body).length + t.strictEqual(numProps, 2, 'Number of request body properties') + t.match(body, { + e: 'ls' + , ls: [ + { + app: formatObjectKey(FILE_NAME) + , level: LOG_LEVEL + , line: input + , meta: { + bucket: getProperty(EVENT_DATA, 'Records.0.s3.bucket') + , object: getProperty(EVENT_DATA, 'Records.0.s3.object') + } + , timestamp: Number + } + ] + }) + t.strictEqual(body.ls.length, 1, 'log line count') + return true + }) + .query((qs) => { + t.match(qs, { + hostname: os.hostname() + , tags: '' + , now: /^\d+$/ + }, 'Querystring properties look correct') + return true + }) + .reply(200, responseText) + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await handler(EVENT_DATA, null) + }) + + t.test('test without tags, bucket name but predefined hostname', async (t) => { + t.on('end', async () => { + nock.cleanAll() + }) + + const input = JSON.stringify({ + log: LOG_LINE + }) + + config.set('hostname', SAMPLE_HOSTNAME) + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + nock(buildLoggerURL(config)) + .post('', (body) => { + const numProps = Object.keys(body).length + t.strictEqual(numProps, 2, 'Number of request body properties') + t.match(body, { + e: 'ls' + , ls: [ + { + app: formatObjectKey(FILE_NAME) + , level: LOG_LEVEL + , line: input + , meta: { + bucket: getProperty(EVENT_DATA, 'Records.0.s3.bucket') + , object: getProperty(EVENT_DATA, 'Records.0.s3.object') + } + , timestamp: Number + } + ] + }) + t.strictEqual(body.ls.length, 1, 'log line count') + return true + }) + .query((qs) => { + t.match(qs, { + hostname: SAMPLE_HOSTNAME.replace(HOSTNAME_REGEX, '') + , tags: '' + , now: /^\d+$/ + }, 'Querystring properties look correct') + return true + }) + .reply(200, responseText) + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await handler(EVENT_DATA, null) + }) +}).catch(threw) diff --git a/test/unit/index.js b/test/unit/index.js new file mode 100644 index 0000000..7f1b0c8 --- /dev/null +++ b/test/unit/index.js @@ -0,0 +1,147 @@ +'use strict' +process.env.INGESTION_KEY = 'abc213' + +const {test, threw} = require('tap') + +const config = require('../../lib/config.js') +const {handler} = require('../../index.js') +const {setProperty} = require('../../lib/utils.js') +const transformer = require('../../lib/transformer.js') + +const BUCKET_NAME = 'random_name' +const EMPTY_EVENT = {} +const ERROR_MESSAGE = 'A connection-based error occurred that will not be retried.' + + ' See meta data for details.' +const ERROR_STATUS_CODE = 403 +const EVENT_ERROR_MESSAGE = 'Cannot Parse the S3 Event' +const FILE_NAME = 'log+File.json' +const LOG_LINE = 'test log' +const TIMESTAMP = Date.now() +const EVENT_DATA = { + Records: [{ + eventTime: new Date(TIMESTAMP) + , s3: { + bucket: { + name: BUCKET_NAME + } + , object: { + key: FILE_NAME + } + } + }] +} + +// NOTE: We will NOT test the use of ENV vars directly here since they are cumbersome +// to work with (they can never be updated). For this, we will trust that env-config +// will do its job and read from the envrionment properly, but we will set the +// config's keys manually in these tests. +config.validateEnvVars() + +test('handler is the main method in lambda functions', async (t) => { + t.test('test with empty event', async (t) => { + await t.rejects(handler(EMPTY_EVENT, null), { + message: EVENT_ERROR_MESSAGE + , meta: { + event: EMPTY_EVENT + } + }, 'Expected error is thrown') + }) + + t.test('test without tags', async (t) => { + config.set('hostname', undefined) + const input = JSON.stringify({ + log: LOG_LINE + }) + + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await t.rejects(handler(EVENT_DATA, null), { + message: ERROR_MESSAGE + , meta: { + code: ERROR_STATUS_CODE + , firstLine: input + , lastLine: null + , retrying: false + , attempts: 1 + } + }, 'Expected error is thrown') + }) + + t.test('test with tags', async (t) => { + config.set('hostname', undefined) + config.set('tags', `${BUCKET_NAME},, ,${FILE_NAME}`) + const input = JSON.stringify({ + log: LOG_LINE + }) + + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await t.rejects(handler(EVENT_DATA, null), { + message: ERROR_MESSAGE + , meta: { + code: ERROR_STATUS_CODE + , firstLine: input + , lastLine: null + , retrying: false + , attempts: 1 + } + }, 'Expected error is thrown') + }) + + t.test('test without hostname', async (t) => { + setProperty(EVENT_DATA, 'Records.0.s3.bucket.name', undefined) + config.set('hostname', undefined) + const input = JSON.stringify({ + log: LOG_LINE + }) + + const getObject = transformer.getObject + transformer.getObject = async function({ + Bucket: BUCKET_NAME + , Key: FILE_NAME + }) { + return { + Body: input + } + } + + t.tearDown(() => { + transformer.getObject = getObject + }) + + await t.rejects(handler(EVENT_DATA, null), { + message: ERROR_MESSAGE + , meta: { + code: ERROR_STATUS_CODE + , firstLine: input + , lastLine: null + , retrying: false + , attempts: 1 + } + }, 'Expected error is thrown') + }) +}).catch(threw) diff --git a/test/unit/lib/event-handler.js b/test/unit/lib/event-handler.js index e91da95..a3407a4 100644 --- a/test/unit/lib/event-handler.js +++ b/test/unit/lib/event-handler.js @@ -55,6 +55,49 @@ test('handleEvent', async (t) => { }, 'should pass this event') }) + t.test('event with invalid data', async (t) => { + const event = { + Records: [{ + awsRegion: AWS_REGION + , eventSource: EVENT_SOURCE + , eventTime: BUCKET_NAME + , s3: { + bucket: { + arn: BUCKET_ARN + , name: BUCKET_NAME + , ownerIdentity: { + principalId: PRINCIPAL_ID + } + } + , object: { + key: FILE_NAME + } + } + , userIdentity: { + principalId: PRINCIPAL_ID + } + }] + } + + t.match(handleEvent(event), { + file: `${BUCKET_NAME}/${FILE_NAME}` + , timestamp: /^[0-9]{13}$/ + , meta: { + bucket: { + name: BUCKET_NAME + , owner: PRINCIPAL_ID + , arn: BUCKET_ARN + } + , object: { + key: FILE_NAME + } + , region: AWS_REGION + , source: EVENT_SOURCE + , user: PRINCIPAL_ID + } + }, 'fully parse full event') + }) + t.test('full perfect data', async (t) => { const event = { Records: [{ @@ -80,7 +123,7 @@ test('handleEvent', async (t) => { } t.deepEqual(handleEvent(event), { - file: `${BUCKET_NAME}/${decodeURIComponent(FILE_NAME.replace(/\+/g, ' '))}` + file: `${BUCKET_NAME}/${FILE_NAME}` , timestamp: TIMESTAMP , meta: { bucket: { @@ -89,7 +132,7 @@ test('handleEvent', async (t) => { , arn: BUCKET_ARN } , object: { - key: decodeURIComponent(FILE_NAME.replace(/\+/g, ' ')) + key: FILE_NAME } , region: AWS_REGION , source: EVENT_SOURCE diff --git a/test/unit/lib/logger.js b/test/unit/lib/logger.js new file mode 100644 index 0000000..970bd08 --- /dev/null +++ b/test/unit/lib/logger.js @@ -0,0 +1,50 @@ +'use strict' + +const {test} = require('tap') +const config = require('../../../lib/config.js') +const {buildLoggerURL} = require('../../../lib/logger.js') + +const origMap = [...config.entries()] + +function cleanupAfter(t) { + t.teardown(() => { + config.clear() + for (const [key, val] of origMap) { + config.set(key, val) + } + }) +} + +test('buildLoggerURL builds the correct ingestion URL', async (t) => { + t.test('From config defaults', async (tt) => { + const url = await buildLoggerURL(config) + tt.strictEqual(url, 'https://logs.logdna.com:443/logs/ingest', 'URL value is correct') + }) + + t.test('Using the newer "ingestion-xxx" env vars', async (tt) => { + cleanupAfter(tt) + config.set('ingestion-host', 'someserver.com') + config.set('ingestion-port', '55500') + config.set('ingestion-endpoint', '/our/endpoint') + const url = await buildLoggerURL(config) + tt.strictEqual( + url + , 'https://someserver.com:55500/our/endpoint' + , 'URL value is correct' + ) + }) + + t.test('Using ldlogssl to turn off https', async (tt) => { + cleanupAfter(tt) + config.set('ssl', false) + config.set('ingestion-host', 'someserver.com') + config.set('ingestion-port', '55500') + config.set('ingestion-endpoint', '/our/endpoint') + const url = await buildLoggerURL(config) + tt.strictEqual( + url + , 'http://someserver.com:55500/our/endpoint' + , 'URL value is correct' + ) + }) +}) diff --git a/test/unit/lib/transformer.js b/test/unit/lib/transformer.js index 2256cde..4f29034 100644 --- a/test/unit/lib/transformer.js +++ b/test/unit/lib/transformer.js @@ -3,23 +3,25 @@ const zlib = require('zlib') const {test, threw} = require('tap') +const transformer = require('../../../lib/transformer.js') const { extractData , getLogs , prepareLogs -, s3 -} = require('../../../lib/transformer.js') +} = transformer +/* eslint-disable */ const INITIAL_GETLOGS_ERROR = 'Both Bucket and Key params must be provided' -const CORRUPTED_DATA_ERROR = 'Corrupted data returned from' -const JSON_PARSE_ERROR = 'Error in Parsing the JSON Data from' +const CORRUPTED_DATA_ERROR = 'Corrupted data returned from the object' +const JSON_PARSE_ERROR = 'Error in Parsing the JSON Data from the S3 Object' const META_EVENT_TIME = new Date(Date.now() - 1000) const LOG_EVENT_TIME = new Date() const LOG_LINE = 'test log' -const S3_GETOBJECT_ERROR = 's3.getObject failed to return an object' +const S3_GETOBJECT_ERROR = 'Error in Getting the S3 Object' const SAMPLE_BUCKET = 'sampleBucket' const SAMPLE_OBJECT_KEY = 'test' -const ZLIB_GUNZIP_ERROR = 'Error in Unzipping' +const ZLIB_GUNZIP_ERROR = 'Error in Unzipping the S3 Object' +/* eslint-enable */ test('extractData', async (t) => { t.test('no data', async (t) => { @@ -371,40 +373,227 @@ test('extractData', async (t) => { }) }).catch(threw) +test('prepareLogs', async (t) => { + t.test('undefined logs', async (t) => { + t.deepEqual(prepareLogs(null, null), [], 'must return an empty array') + }) + + t.test('non-Array logs', async (t) => { + t.deepEqual(prepareLogs({ + logs: ['logs'] + }, null), [], 'must return an empty array') + }) + + t.test('array logs not having line field', async (t) => { + t.deepEqual(prepareLogs([{ + ts: Date.now() + }, undefined], null), [], 'must return an empty array') + }) + + t.test('array of logs without meta, timestamp, and eventData', async (t) => { + const input = Array.from({ + length: 5 + }, function(_, i) { + return { + line: `${LOG_LINE} ${i}` + } + }) + + const output = input.map(function(item) { + const line = item.line + const opts = { + timestamp: /^[0-9]{13}$/ + , meta: {} + } + + return {line, opts} + }) + + t.match(prepareLogs(input, null), output, 'must return an array with no meta') + }) + + t.test('array of logs without timestamp and eventData', async (t) => { + const input = Array.from({ + length: 5 + }, function(_, i) { + return { + line: `${LOG_LINE} ${i}` + , meta: { + sampleField: 'sample' + } + } + }) + + const output = input.map(function(item) { + const line = item.line + const opts = { + timestamp: /^[0-9]{13}$/ + , meta: item.meta + } + + return {line, opts} + }) + + t.match(prepareLogs(input, null), output, 'must return an array with meta') + }) + + t.test('array of logs without eventData', async (t) => { + const input = Array.from({ + length: 5 + }, function(_, i) { + return { + line: `${LOG_LINE} ${i}` + , meta: { + sampleField: 'sample' + } + , timestamp: (new Date(LOG_EVENT_TIME)).getTime() + } + }) + + const output = input.map(function(item) { + const line = item.line + const opts = { + app: undefined + , meta: item.meta + , timestamp: item.timestamp + } + + return {line, opts} + }) + + t.deepEqual(prepareLogs(input, null), output + , 'must return an array with meta and timestamp') + }) + + t.test('array of logs with full eventData', async (t) => { + const input = Array.from({ + length: 5 + }, function(_, i) { + return { + line: `${LOG_LINE} ${i}` + , meta: { + sampleField: 'sample' + } + } + }) + + const eventData = { + file: SAMPLE_OBJECT_KEY + , meta: { + eventField: 'sampleEvent' + } + , timestamp: (new Date(META_EVENT_TIME)).getTime() + } + + const output = input.map(function(item) { + const line = item.line + const opts = { + app: eventData.file + , timestamp: eventData.timestamp + , meta: { + ...item.meta + , ...eventData.meta + } + } + + return {line, opts} + }) + + t.deepEqual(prepareLogs(input, eventData), output + , 'must return an array with full data') + }) + + t.test('array of logs with no timestamp', async (t) => { + const input = Array.from({ + length: 5 + }, function(_, i) { + return { + line: `${LOG_LINE} ${i}` + , meta: { + sampleField: 'sample' + } + } + }) + + const eventData = { + file: SAMPLE_OBJECT_KEY + , meta: { + eventField: 'sampleEvent' + } + } + + const output = input.map(function(item) { + const line = item.line + const opts = { + app: eventData.file + , timestamp: /^[0-9]{13}$/ + , meta: { + ...item.meta + , ...eventData.meta + } + } + + return {line, opts} + }) + + t.match(prepareLogs(input, eventData), output + , 'must return an array with default timestamp') + }) +}).catch(threw) + test('getLogs', async (t) => { t.test('undefined params', async (t) => { - getLogs(undefined, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, INITIAL_GETLOGS_ERROR, 'initial error') - }) + try { + await getLogs(undefined) + } catch (error) { + t.strictEqual(error.message, INITIAL_GETLOGS_ERROR + , 'error message should be strictly equal') + t.deepEqual(error.meta, {params: undefined}, 'error meta should be deeply equal') + } }) + t.test('null params', async (t) => { - getLogs(null, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, INITIAL_GETLOGS_ERROR, 'initial error') - }) + try { + await getLogs(null) + } catch (error) { + t.strictEqual(error.message, INITIAL_GETLOGS_ERROR + , 'error message should be strictly equal') + t.deepEqual(error.meta, {params: null}, 'error meta should be deeply equal') + } }) + t.test('empty params', async (t) => { - getLogs({}, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, INITIAL_GETLOGS_ERROR, 'initial error') - }) + try { + await getLogs({}) + } catch (error) { + t.strictEqual(error.message, INITIAL_GETLOGS_ERROR + , 'error message should be strictly equal') + t.deepEqual(error.meta, {params: {}}, 'error meta should be deeply equal') + } }) t.test('params having just Key', async (t) => { - getLogs({Key: SAMPLE_OBJECT_KEY}, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, INITIAL_GETLOGS_ERROR, 'initial error') - }) + const params = {Key: SAMPLE_OBJECT_KEY} + try { + await getLogs(params) + } catch (error) { + t.strictEqual(error.message, INITIAL_GETLOGS_ERROR + , 'error message should be strictly equal') + t.deepEqual(error.meta, {params}, 'error meta should be deeply equal') + } }) t.test('params having just Bucket', async (t) => { - getLogs({Bucket: SAMPLE_BUCKET}, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, INITIAL_GETLOGS_ERROR, 'initial error') - }) + const params = {Bucket: SAMPLE_BUCKET} + try { + await getLogs(params) + } catch (error) { + t.strictEqual(error.message, INITIAL_GETLOGS_ERROR + , 'error message should be strictly equal') + t.deepEqual(error.meta, {params}, 'error meta should be deeply equal') + } }) t.test('where s3 returns an error', async (t) => { @@ -413,19 +602,22 @@ test('getLogs', async (t) => { , Key: SAMPLE_OBJECT_KEY } - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { throw Error(S3_GETOBJECT_ERROR) } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error.split(': ')[2], S3_GETOBJECT_ERROR, 's3.getObject errors out') - }) + await t.rejects(getLogs(params), { + message: S3_GETOBJECT_ERROR + , meta: { + error: new Error(S3_GETOBJECT_ERROR) + , params + } + }, 'Expected error is thrown') }) t.test('where s3 returns an undefined data', async (t) => { @@ -434,64 +626,61 @@ test('getLogs', async (t) => { , Key: SAMPLE_OBJECT_KEY } - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return undefined } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, `${CORRUPTED_DATA_ERROR} ${SAMPLE_BUCKET}/${SAMPLE_OBJECT_KEY}` - , 's3.getObject result errors out') - }) + await t.rejects(getLogs(params), { + message: CORRUPTED_DATA_ERROR + , meta: {params} + }, 'Expected error is thrown') }) - t.test('where s3 returns a null data', async (t) => { + t.test('where s3 returns an undefined data', async (t) => { const params = { Bucket: SAMPLE_BUCKET , Key: SAMPLE_OBJECT_KEY } - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return null } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, `${CORRUPTED_DATA_ERROR} ${SAMPLE_BUCKET}/${SAMPLE_OBJECT_KEY}` - , 's3.getObject result errors out') - }) + await t.rejects(getLogs(params), { + message: CORRUPTED_DATA_ERROR + , meta: {params} + }, 'Expected error is thrown') }) - t.test('where s3 returns an empty data', async (t) => { + t.test('where s3 returns an undefined data', async (t) => { const params = { Bucket: SAMPLE_BUCKET , Key: SAMPLE_OBJECT_KEY } - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return {} } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error, `${CORRUPTED_DATA_ERROR} ${SAMPLE_BUCKET}/${SAMPLE_OBJECT_KEY}` - , 's3.getObject result errors out') - }) + await t.rejects(getLogs(params), { + message: CORRUPTED_DATA_ERROR + , meta: {params} + }, 'Expected error is thrown') }) t.test('where data is unzippable', async (t) => { @@ -500,52 +689,54 @@ test('getLogs', async (t) => { , Key: `${SAMPLE_OBJECT_KEY}.gz` } - const getObject = s3.getObject - s3.getObject = function(params) { + const type = { + json: false + , gz: true + } + + const getObject = transformer.getObject + transformer.getObject = async function(params) { return { Body: LOG_LINE } } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error.split(': ')[0] - , `${ZLIB_GUNZIP_ERROR} ${SAMPLE_BUCKET}/${params.Key}` - , 'zlib.gunzipSync errors out') - }) + await t.rejects(getLogs(params), { + message: ZLIB_GUNZIP_ERROR + , meta: {params, type} + }, 'Expected error is thrown') }) + t.test('where data is zippable', async (t) => { const params = { Bucket: SAMPLE_BUCKET , Key: `${SAMPLE_OBJECT_KEY}.gz` } - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return { Body: zlib.gzipSync(Buffer.from(LOG_LINE)) } } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.match(data, [{ - line: LOG_LINE - , timestamp: /^[0-9]{13}$/ - }], 'first success') - t.strictEqual(error, null, 'zlib.gunzipSync is clear') - }) + const data = await getLogs(params) + t.match(data, [{ + line: LOG_LINE + , timestamp: /^[0-9]{13}$/ + }], 'first success') }) - t.test('where data is zippable but corrupted json', async (t) => { + t.test('where data is valid zippable json', async (t) => { const params = { Bucket: SAMPLE_BUCKET , Key: `${SAMPLE_OBJECT_KEY}.json.gz` @@ -555,53 +746,54 @@ test('getLogs', async (t) => { log: LOG_LINE }) - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return { - Body: zlib.gzipSync(Buffer.from(input + ' noise')) + Body: zlib.gzipSync(Buffer.from(input)) } } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.strictEqual(data, undefined, 'no success') - t.strictEqual(error.split(': ')[0] - , `${JSON_PARSE_ERROR} ${SAMPLE_BUCKET}/${params.Key}` - , 'JSON.parse errors out') - }) + const data = await getLogs(params) + t.match(data, [{ + line: input + , timestamp: /^[0-9]{13}$/ + }], 'Zipped JSON success') }) - t.test('where data is valid zippable json', async (t) => { + t.test('where data is zippable but corrupted json', async (t) => { const params = { Bucket: SAMPLE_BUCKET , Key: `${SAMPLE_OBJECT_KEY}.json.gz` } + const type = { + json: true + , gz: true + } + const input = JSON.stringify({ log: LOG_LINE }) - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return { - Body: zlib.gzipSync(Buffer.from(input)) + Body: zlib.gzipSync(Buffer.from(input + ' noise')) } } t.tearDown(() => { - s3.getObject = getObject + transformer.getObject = getObject }) - getLogs(params, (error, data) => { - t.match(data, [{ - line: input - , timestamp: /^[0-9]{13}$/ - }], 'Zipped JSON success') - t.strictEqual(error, null, 'JSON.parse is clear') - }) + await t.rejects(getLogs(params), { + message: JSON_PARSE_ERROR + , meta: {params, type} + }, 'Expected error is thrown') }) t.test('where data is valid json', async (t) => { @@ -614,172 +806,21 @@ test('getLogs', async (t) => { log: LOG_LINE }) - const getObject = s3.getObject - s3.getObject = function(params) { + const getObject = transformer.getObject + transformer.getObject = async function(params) { return { Body: input } } t.tearDown(() => { - s3.getObject = getObject - }) - - getLogs(params, (error, data) => { - t.match(data, [{ - line: input - , timestamp: /^[0-9]{13}$/ - }], 'JSON success') - t.strictEqual(error, null, 'JSON.parse is clear') - }) - }) -}).catch(threw) - -test('prepareLogs', async (t) => { - t.test('undefined logs', async (t) => { - t.strictEqual(prepareLogs(null, null), undefined, 'must return undefined') - }) - - t.test('non-Array logs', async (t) => { - t.strictEqual(prepareLogs({ - logs: ['logs'] - }, null), undefined, 'must return undefined') - }) - - t.test('array logs not having line field', async (t) => { - t.deepEqual(prepareLogs([{ - ts: Date.now() - }, undefined], null), [], 'must return an empty array') - }) - - t.test('array of logs without meta, timestamp, and eventData', async (t) => { - const input = Array.from({ - length: 5 - }, function(_, i) { - return { - line: `${LOG_LINE} ${i}` - } - }) - - const output = input.map(function(item) { - item.timestamp = /^[0-9]{13}$/ - item.meta = {} - return item - }) - - t.match(prepareLogs(input, null), output, 'must return an array with no meta') - }) - - t.test('array of logs without timestamp and eventData', async (t) => { - const input = Array.from({ - length: 5 - }, function(_, i) { - return { - line: `${LOG_LINE} ${i}` - , meta: { - sampleField: 'sample' - } - } - }) - - const output = input.map(function(item) { - item.timestamp = /^[0-9]{13}$/ - return item - }) - - t.match(prepareLogs(input, null), output, 'must return an array with meta') - }) - - t.test('array of logs without eventData', async (t) => { - const input = Array.from({ - length: 5 - }, function(_, i) { - return { - line: `${LOG_LINE} ${i}` - , meta: { - sampleField: 'sample' - } - , timestamp: (new Date(LOG_EVENT_TIME)).getTime() - } - }) - - const output = input.map(function(item) { - item.file = undefined - return item - }) - - t.deepEqual(prepareLogs(input, null), output - , 'must return an array with meta and timestamp') - }) - - t.test('array of logs with full eventData', async (t) => { - const input = Array.from({ - length: 5 - }, function(_, i) { - return { - line: `${LOG_LINE} ${i}` - , meta: { - sampleField: 'sample' - } - } - }) - - const eventData = { - file: SAMPLE_OBJECT_KEY - , meta: { - eventField: 'sampleEvent' - } - , timestamp: (new Date(META_EVENT_TIME)).getTime() - } - - const output = input.map(function(item) { - const line = {...item} - line.timestamp = eventData.timestamp - line.file = eventData.file - line.meta = { - ...item.meta - , ...eventData.meta - } - - return line - }) - - t.deepEqual(prepareLogs(input, eventData), output - , 'must return an array with full data') - }) - - t.test('array of logs with no timestamp', async (t) => { - const input = Array.from({ - length: 5 - }, function(_, i) { - return { - line: `${LOG_LINE} ${i}` - , meta: { - sampleField: 'sample' - } - } + transformer.getObject = getObject }) - const eventData = { - file: SAMPLE_OBJECT_KEY - , meta: { - eventField: 'sampleEvent' - } - } - - const output = input.map(function(item) { - const line = {...item} - line.timestamp = /^[0-9]{13}$/ - line.file = eventData.file - line.meta = { - ...item.meta - , ...eventData.meta - } - - return line - }) - - t.match(prepareLogs(input, eventData), output - , 'must return an array with default timestamp') + const data = await getLogs(params) + t.match(data, [{ + line: input + , timestamp: /^[0-9]{13}$/ + }], 'JSON success') }) }).catch(threw) diff --git a/test/unit/lib/utils.js b/test/unit/lib/utils.js index bedacf5..b132561 100644 --- a/test/unit/lib/utils.js +++ b/test/unit/lib/utils.js @@ -2,34 +2,12 @@ const {test, threw} = require('tap') const { - batchify -, checkFileFormat + checkFileFormat , getProperty , hasProperty , setProperty } = require('../../../lib/utils.js') -test('batchify', async (t) => { - t.test('chunks empty array', async (t) => { - const input = [] - const result = batchify([], 2) - t.deepEqual(input, [], 'empty array') - t.notStrictEqual(input, result, 'new array created') - }) - - t.test('chunks input array', async (t) => { - t.deepEqual(batchify([1, 2, 3], 1), [[1], [2], [3]], 'single element chunks') - t.deepEqual( - batchify([1, 2, 3, 4, 5], 2) - , [[1, 2], [3, 4], [5]] - , 'remainder in separate chunk') - t.deepEqual( - batchify([{a: 'b'}, {c: 'd'}, {e: 'f'}], 2) - , [[{a: 'b'}, {c: 'd'}], [{e: 'f'}]] - , 'chunks an array of objects') - }) -}).catch(threw) - test('checkFileFormat', async (t) => { t.deepEqual(checkFileFormat('sampleData.json.gz'), { json: true