Skip to content

Commit

Permalink
refactor(logger): introduce @logdna/logger as a logger
Browse files Browse the repository at this point in the history
Replacing current flushing mechanism with @logdna/logger

Ref: #2
Semver: minor
  • Loading branch information
Samir Musali committed Apr 23, 2021
1 parent b86435a commit ea6caac
Show file tree
Hide file tree
Showing 18 changed files with 1,176 additions and 631 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
name: Build S3 Lambda
command: |
npm ci --production
zip logdna-s3.zip -r node_modules/ index.js package-lock.json lib/*.js
zip logdna-s3.zip -r node_modules/ index.js package.json lib/*.js
- persist_to_workspace:
root: .
paths:
Expand Down
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2021 LogDNA, Inc.
Copyright (c) 2019 LogDNA, Inc.

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
88 changes: 9 additions & 79 deletions doc/env.md
Original file line number Diff line number Diff line change
@@ -1,47 +1,33 @@
## Environment Variables

### `BATCH_INTERVAL`
### `FLUSH_INTERVAL`

> The number of milliseconds between sending each batch
> The number of milliseconds to wait between sending payloads to LogDNA
| Config | Value |
| --- | --- |
| Name | `batch-interval` |
| Environment Variable | `BATCH_INTERVAL` |
| Name | `flush-interval` |
| Environment Variable | `FLUSH_INTERVAL` |
| Type | `number` |
| Required | no |
| Default | `50` |
| Default | `1000` |

***

### `BATCH_LIMIT`
### `FLUSH_LIMIT`

> The number of lines within each batch
> If the length of the send buffer exceeds this length, send immediately
| Config | Value |
| --- | --- |
| Name | `batch-limit` |
| Environment Variable | `BATCH_LIMIT` |
| Name | `flush-limit` |
| Environment Variable | `FLUSH_LIMIT` |
| Type | `number` |
| Required | no |
| Default | `25` |

***

### `FREE_SOCKET_TIMEOUT`

> The number of milliseconds to wait for inactivity before timing out
| Config | Value |
| --- | --- |
| Name | `free-socket-timeout` |
| Environment Variable | `FREE_SOCKET_TIMEOUT` |
| Type | `number` |
| Required | no |
| Default | `300000` |

***

### `HOSTNAME`

> Optionally, use alternative host name set through the environment
Expand Down Expand Up @@ -140,34 +126,6 @@

***

### `MAX_REQUEST_RETRIES`

> Maximum number of retries for sending each batch
| Config | Value |
| --- | --- |
| Name | `max-request-retries` |
| Environment Variable | `MAX_REQUEST_RETRIES` |
| Type | `number` |
| Required | no |
| Default | `5` |

***

### `MAX_REQUEST_TIMEOUT`

> Maximum request timeout in sending each batch
| Config | Value |
| --- | --- |
| Name | `max-request-timeout` |
| Environment Variable | `MAX_REQUEST_TIMEOUT` |
| Type | `number` |
| Required | no |
| Default | `300` |

***

### `PROXY`

> A full proxy URL (including protocol) to pass through before going to LogDNA
Expand All @@ -182,20 +140,6 @@

***

### `REQUEST_RETRY_INTERVAL`

> The number of milliseconds between each retry
| Config | Value |
| --- | --- |
| Name | `request-retry-interval` |
| Environment Variable | `REQUEST_RETRY_INTERVAL` |
| Type | `number` |
| Required | no |
| Default | `100` |

***

### `SSL`

> Use https:// for log ingestion
Expand Down Expand Up @@ -224,20 +168,6 @@

***

### `URL`

> *Combination of SSL, INGESTION_HOST, INGESTION_PORT, and INGESTION_ENDPOINT*
| Config | Value |
| --- | --- |
| Name | `url` |
| Environment Variable | `URL` |
| Type | `string` |
| Required | no |
| Default | `https://logs.logdna.com/logs/ingest` |

***

### `USER_AGENT`

> user-agent header value to use while sending logs
Expand Down
54 changes: 29 additions & 25 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,50 +1,54 @@
'use strict'

const async = require('async')
const {once} = require('events')

const config = require('./lib/config.js')
const {handleEvent} = require('./lib/event-handler.js')
const {flush} = require('./lib/logger.js')
const {createLoggerClient} = require('./lib/logger.js')
const {getLogs, prepareLogs} = require('./lib/transformer.js')
const {batchify, getProperty} = require('./lib/utils.js')
const {getProperty, trimTags} = require('./lib/utils.js')

const DOT_REGEXP = /\./g
const HOSTNAME_REGEX = /[^0-9a-zA-Z\-.]/g

module.exports = {
handler
}

async function handler(event, context, callback) {
async function handler(event, context) {
config.validateEnvVars()
const tags = config.get('tags')
if (tags) {
config.set('tags', tags.split(',').map((tag) => {
return tag.trim()
}).join(','))
const eventData = handleEvent(event)
if (!eventData) {
const error = new Error('Cannot Parse the S3 Event')
error.meta = {event}
throw error
}

const eventData = handleEvent(event)
const s3params = {
Bucket: getProperty(eventData, 'meta.bucket.name')
, Key: getProperty(eventData, 'meta.object.key')
}

let lines
try {
lines = getLogs(s3params)
} catch (e) {
return callback(e)
const tags = config.get('tags')
if (tags) {
config.set('tags', trimTags(tags))
}

const hostname = config.get('hostname') || s3params.Bucket
if (hostname) {
config.set('hostname', hostname.replace(HOSTNAME_REGEX, ''))
}

const logArrays = prepareLogs(lines, eventData)
const batches = batchify(logArrays, config.get('batch-limit'))
if (!config.get('hostname')) {
config.set('hostname', s3params.Bucket.replace(DOT_REGEXP, '_'))
const logger = createLoggerClient(config)
const lines = await getLogs(s3params)
logger.on('error', console.error)
logger.on('warn', console.warn)
const logs = prepareLogs(lines, eventData)
for (const log of logs) {
const {line, opts} = log
logger.log(line, opts)
}

async.everySeries(batches, (batch, next) => {
setTimeout(() => {
return flush(batch, config, next)
}, config.get('batch-interval'))
}, callback)
// Ensure logs have been flushed to LogDNA before finishing
await once(logger, 'cleared')
return
}
30 changes: 5 additions & 25 deletions lib/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,13 @@ const pkg = require('../package.json')

const config = new Config([
Config
.number('batch-interval')
.default(50)
.desc('The number of milliseconds between sending each batch')
.number('flush-interval')
.default(1000)
.desc('The number of milliseconds to wait between sending payloads to LogDNA')
, Config
.number('batch-limit')
.number('flush-limit')
.default(25)
.desc('The number of lines within each batch')
, Config
.number('free-socket-timeout')
.default(300000)
.desc('The number of milliseconds to wait for inactivity before timing out')
.desc('If the length of the send buffer exceeds this length, send immediately')
, Config
.string('hostname')
.desc('Optionally, use alternative host name set through the environment')
Expand All @@ -42,32 +38,16 @@ const config = new Config([
.number('ingestion-port')
.default(443)
.desc('The port for log ingestion')
, Config
.number('max-request-retries')
.default(5)
.desc('Maximum number of retries for sending each batch')
, Config
.number('max-request-timeout')
.default(300)
.desc('Maximum request timeout in sending each batch')
, Config
.string('proxy')
.desc('A full proxy URL (including protocol) to pass through before going to LogDNA')
, Config
.number('request-retry-interval')
.default(100)
.desc('The number of milliseconds between each retry')
, Config
.boolean('ssl')
.default(true)
.desc('Use https:// for log ingestion')
, Config
.string('tags')
.desc('Optionally, use comma-separated tags set through the environment')
, Config
.string('url')
.default('https://logs.logdna.com/logs/ingest')
.desc('*Combination of SSL, INGESTION_HOST, INGESTION_PORT, and INGESTION_ENDPOINT*')
, Config
.string('user-agent')
.default(`${pkg.name}/${pkg.version}`)
Expand Down
16 changes: 0 additions & 16 deletions lib/constants.js

This file was deleted.

25 changes: 11 additions & 14 deletions lib/event-handler.js
Original file line number Diff line number Diff line change
@@ -1,27 +1,20 @@
'use strict'

const {
getProperty
, setProperty
} = require('./utils.js')
const {getProperty} = require('./utils.js')

module.exports = {
handleEvent
}

function handleEvent(event) {
let record = getProperty(event, 'Records.0')
const record = getProperty(event, 'Records.0')
if (!record) return undefined

let key = getProperty(record, 's3.object.key')
if (key) {
key = key.replace(/\+/g, ' ')
record = setProperty(record, 's3.object.key', decodeURIComponent(key))
}

const key = getProperty(record, 's3.object.key')
let timestamp = Date.now()
if (record.eventTime) {
timestamp = (new Date(record.eventTime)).getTime()
const eventTimestamp = (new Date(record.eventTime)).getTime()
if (!isNaN(eventTimestamp)) timestamp = eventTimestamp
}

const object = getProperty(record, 's3.object')
Expand All @@ -31,9 +24,13 @@ function handleEvent(event) {
, arn: getProperty(record, 's3.bucket.arn')
}

let file = bucket.name
let file = ''
if (bucket.name) {
file = `${file}${bucket.name}/`
}

if (key) {
file = `${file}/${key}`
file = `${file}${key}`
}

return {
Expand Down
Loading

0 comments on commit ea6caac

Please sign in to comment.