Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(aws-lambda): remove AWS Lambda API latency from kong latency #12835

Merged
merged 5 commits into from
Apr 25, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions changelog/unreleased/kong/fix-aws-lambda-kong-latency.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
message: "**AWS-Lambda**: fix an issue that the latency attributed to AWS Lambda API requests will be counted as part of the latency in Kong"
type: bugfix
scope: Plugin
29 changes: 20 additions & 9 deletions kong/plugins/aws-lambda/handler.lua
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ local AWSLambdaHandler = {


function AWSLambdaHandler:access(conf)
-- TRACING: set KONG_WAITING_TIME start
local kong_wait_time_start = get_now()

if initialize then
initialize()
end
Expand Down Expand Up @@ -164,9 +167,6 @@ function AWSLambdaHandler:access(conf)

local upstream_body_json = build_request_payload(conf)

-- TRACING: set KONG_WAITING_TIME start
local kong_wait_time_start = get_now()

local res, err = lambda_service:invoke({
FunctionName = conf.function_name,
InvocationType = conf.invocation_type,
Expand All @@ -175,6 +175,14 @@ function AWSLambdaHandler:access(conf)
Qualifier = conf.qualifier,
})

-- TRACING: set KONG_WAITING_TIME stop
local ctx = ngx.ctx
local lambda_wait_time_total = get_now() - kong_wait_time_start
-- setting the latency here is a bit tricky, but because we are not
-- actually proxying, it will not be overwritten
ctx.KONG_WAITING_TIME = lambda_wait_time_total
ctx.AWS_LAMBDA_WAIT_TIME = lambda_wait_time_total
windmgc marked this conversation as resolved.
Show resolved Hide resolved

if err then
return error(err)
end
Expand All @@ -184,12 +192,6 @@ function AWSLambdaHandler:access(conf)
return error(content.Message)
end

-- TRACING: set KONG_WAITING_TIME stop
local ctx = ngx.ctx
-- setting the latency here is a bit tricky, but because we are not
-- actually proxying, it will not be overwritten
ctx.KONG_WAITING_TIME = get_now() - kong_wait_time_start

local headers = res.headers

-- Remove Content-Length header returned by Lambda service,
Expand Down Expand Up @@ -242,4 +244,13 @@ function AWSLambdaHandler:access(conf)
end


function AWSLambdaHandler:header_filter(conf)
-- TRACING: remove the latency of requesting AWS Lambda service from the KONG_RESPONSE_LATENCY
local ctx = ngx.ctx
if ctx.KONG_RESPONSE_LATENCY then
ctx.KONG_RESPONSE_LATENCY = ctx.KONG_RESPONSE_LATENCY - (ctx.AWS_LAMBDA_WAIT_TIME or 0)
windmgc marked this conversation as resolved.
Show resolved Hide resolved
end
end


return AWSLambdaHandler
90 changes: 88 additions & 2 deletions spec/03-plugins/27-aws-lambda/99-access_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ local helpers = require "spec.helpers"
local meta = require "kong.meta"
local pl_file = require "pl.file"
local fixtures = require "spec.fixtures.aws-lambda"
local http_mock = require "spec.helpers.http_mock"

local TEST_CONF = helpers.test_conf
local server_tokens = meta._SERVER_TOKENS
Expand All @@ -15,6 +16,19 @@ for _, strategy in helpers.each_strategy() do
describe("Plugin: AWS Lambda (access) [#" .. strategy .. "]", function()
local proxy_client
local admin_client
local mock_http_server_port = helpers.get_available_port()

local mock = http_mock.new(mock_http_server_port, [[
ngx.print('hello world')
]], {
prefix = "mockserver",
log_opts = {
req = true,
req_body = true,
req_large_body = true,
},
tls = false,
})

lazy_setup(function()
local bp = helpers.get_db_utils(strategy, {
Expand Down Expand Up @@ -156,6 +170,12 @@ for _, strategy in helpers.each_strategy() do
service = null,
}

local route25 = bp.routes:insert {
hosts = { "lambda25.test" },
protocols = { "http", "https" },
service = null,
}

bp.plugins:insert {
name = "aws-lambda",
route = { id = route1.id },
Expand Down Expand Up @@ -482,6 +502,26 @@ for _, strategy in helpers.each_strategy() do
}
}

bp.plugins:insert {
name = "aws-lambda",
route = { id = route25.id },
config = {
port = 10001,
aws_key = "mock-key",
aws_secret = "mock-secret",
aws_region = "us-east-1",
function_name = "functionWithLatency",
}
}

bp.plugins:insert {
route = { id = route25.id },
name = "http-log",
config = {
http_endpoint = "http://localhost:" .. mock_http_server_port,
}
}

fixtures.dns_mock:A({
name = "custom.lambda.endpoint",
address = "127.0.0.1",
Expand All @@ -504,7 +544,7 @@ for _, strategy in helpers.each_strategy() do
lazy_setup(function()
assert(helpers.start_kong({
database = strategy,
plugins = "aws-lambda",
plugins = "aws-lambda, http-log",
nginx_conf = "spec/fixtures/custom_nginx.template",
-- we don't actually use any stream proxy features in this test suite,
-- but this is needed in order to load our forward-proxy stream_mock fixture
Expand Down Expand Up @@ -1193,7 +1233,7 @@ for _, strategy in helpers.each_strategy() do
helpers.setenv("AWS_REGION", "us-east-1")
assert(helpers.start_kong({
database = strategy,
plugins = "aws-lambda",
plugins = "aws-lambda, http-log",
nginx_conf = "spec/fixtures/custom_nginx.template",
-- we don't actually use any stream proxy features in this test suite,
-- but this is needed in order to load our forward-proxy stream_mock fixture
Expand All @@ -1219,6 +1259,52 @@ for _, strategy in helpers.each_strategy() do
assert.is_array(res.headers["Access-Control-Allow-Origin"])
end)
end)

describe("With latency", function()
lazy_setup(function()
assert(mock:start())

helpers.setenv("AWS_REGION", "us-east-1")
assert(helpers.start_kong({
database = strategy,
plugins = "aws-lambda, http-log",
nginx_conf = "spec/fixtures/custom_nginx.template",
-- we don't actually use any stream proxy features in this test suite,
-- but this is needed in order to load our forward-proxy stream_mock fixture
stream_listen = helpers.get_proxy_ip(false) .. ":19000",
}, nil, nil, fixtures))
end)

lazy_teardown(function()
helpers.stop_kong()
helpers.unsetenv("AWS_REGION")
assert(mock:stop())
end)

it("invokes a Lambda function with GET and latency", function()
local res = assert(proxy_client:send {
method = "GET",
path = "/get",
headers = {
["Host"] = "lambda25.test"
}
})

assert.res_status(200, res)
local http_log_entries
assert.eventually(function ()
http_log_entries = mock:get_all_logs()
return #http_log_entries >= 1
end).with_timeout(10).is_truthy()
assert.is_not_nil(http_log_entries[1])
local log_entry_with_latency = cjson.decode(http_log_entries[1].req.body)
-- Accessing the aws mock server will require some time for sure
-- So if latencies.kong < latencies.proxy we should assume that the
-- latency calculation is working. Checking a precise number will
-- result in flakiness here.
assert.True(log_entry_with_latency.latencies.kong < log_entry_with_latency.latencies.proxy)
windmgc marked this conversation as resolved.
Show resolved Hide resolved
end)
end)
end)

describe("Plugin: AWS Lambda with #vault [#" .. strategy .. "]", function ()
Expand Down
5 changes: 5 additions & 0 deletions spec/fixtures/aws-lambda.lua
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,11 @@ local fixtures = {
elseif string.match(ngx.var.uri, "functionWithTransferEncodingHeader") then
ngx.say("{\"statusCode\": 200, \"headers\": { \"Transfer-Encoding\": \"chunked\", \"transfer-encoding\": \"chunked\"}}")

elseif string.match(ngx.var.uri, "functionWithLatency") then
-- additional latency
ngx.sleep(2)
ngx.say("{\"statusCodge\": 200, \"body\": \"dGVzdA=\", \"isBase64Encoded\": false}")

elseif type(res) == 'string' then
ngx.header["Content-Length"] = #res + 1
ngx.say(res)
Expand Down
Loading