Skip to content
Snippets Groups Projects
Commit 596d082c authored by Edouard DE BRYE's avatar Edouard DE BRYE
Browse files

refactor(apigw) analyse logs from log group subscription with lambda and put...

refactor(apigw) analyse logs from log group subscription with lambda and put 400 errors in cloudwatch metrics
parent 47f69dde
No related branches found
No related tags found
No related merge requests found
......@@ -83,15 +83,37 @@ resource "aws_api_gateway_deployment" "main" {
create_before_destroy = true
}
}
locals {
stage_name = terraform.workspace
}
resource "aws_cloudwatch_log_group" "apigw_logs" {
name = "apigw_${aws_api_gateway_rest_api.main.id}/${local.stage_name}"
retention_in_days = 7
tags = local.common_tags
}
resource "aws_api_gateway_stage" "main" {
deployment_id = aws_api_gateway_deployment.main.id
rest_api_id = aws_api_gateway_rest_api.main.id
stage_name = terraform.workspace == "production" ? "api" : terraform.workspace
stage_name = local.stage_name
xray_tracing_enabled = true
tags = local.common_tags
access_log_settings {
format = jsonencode({
"stage" = "$context.stage",
"apiId" = "$context.apiId",
"requestId" = "$context.requestId",
"ip" = "$context.identity.sourceIp",
"requestTime" = "$context.requestTime",
"httpMethod" = "$context.httpMethod",
"resourcePath" = "$context.resourcePath",
"status" = "$context.status",
"protocol" = "$context.protocol",
"responseLength" = "$context.responseLength"
})
destination_arn = aws_cloudwatch_log_group.apigw_logs.arn
}
}
output "api_endpoint" {
value = aws_api_gateway_stage.main.invoke_url
}
......
locals {
API_GATEWAY_LOG_GROUP = "API-Gateway-Execution-Logs_${split("-", aws_api_gateway_stage.main.id)[1]}/${aws_api_gateway_stage.main.stage_name}"
}
output "log_groupe_name" {
value = local.API_GATEWAY_LOG_GROUP
}
resource "aws_lambda_function" "logs" {
filename = data.archive_file.lambda_logs_file.output_path
function_name = "${local.prefix}-logs-apigw"
resource "aws_lambda_function" "apigw_logs" {
filename = data.archive_file.lambda_logs.output_path
function_name = "${local.prefix}-analyse-logs-apigw"
role = aws_iam_role.iam_for_lambda_logs.arn
handler = "apigw_metrics.handler"
timeout = 10
source_code_hash = data.archive_file.lambda_logs_file.output_base64sha256
source_code_hash = data.archive_file.lambda_logs.output_base64sha256
runtime = "nodejs12.x"
reserved_concurrent_executions = 2
reserved_concurrent_executions = 5
layers = [
"arn:aws:lambda:eu-west-1:580247275435:layer:LambdaInsightsExtension:14"
]
environment {
variables = {
API_GATEWAY_LOG_GROUP = local.API_GATEWAY_LOG_GROUP
API_GATEWAY_LOG_GROUP = aws_cloudwatch_log_group.apigw_logs.name
}
}
tags = local.common_tags
}
resource "aws_lambda_function" "logs_events" {
filename = data.archive_file.lambda_logs_event_file.output_path
function_name = "${local.prefix}-logs-events-apigw"
role = aws_iam_role.iam_for_lambda_logs.arn
handler = "apigw_metrics_live.handler"
timeout = 10
source_code_hash = data.archive_file.lambda_logs_event_file.output_base64sha256
runtime = "nodejs12.x"
reserved_concurrent_executions = 200
layers = [
"arn:aws:lambda:eu-west-1:580247275435:layer:LambdaInsightsExtension:14"
]
environment {
variables = {
API_GATEWAY_LOG_GROUP = local.API_GATEWAY_LOG_GROUP
}
resource "aws_cloudwatch_log_subscription_filter" "lambdafunction_logfilter" {
name = "${local.prefix}-lambdafunction-subscription"
log_group_name = aws_cloudwatch_log_group.apigw_logs.name
filter_pattern = " "
destination_arn = aws_lambda_function.apigw_logs.arn
lifecycle {
ignore_changes = [
filter_pattern
]
}
}
tags = local.common_tags
resource "aws_lambda_permission" "log_subscription" {
statement_id = "AllowClouWatchInvoke"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.apigw_logs.function_name
principal = "logs.${var.aws_region}.amazonaws.com"
source_arn = "${aws_cloudwatch_log_group.apigw_logs.arn}:*"
}
data "archive_file" "lambda_logs_file" {
data "archive_file" "lambda_logs" {
type = "zip"
output_path = "${local.lambda_loc}/zip/apigw_metrics.zip"
source_file = "${local.lambda_loc}/apigw_metrics/apigw_metrics.js"
}
data "archive_file" "lambda_logs_event_file" {
type = "zip"
output_path = "${local.lambda_loc}/zip/apigw_metrics_events.zip"
source_file = "${local.lambda_loc}/apigw_metrics/apigw_metrics_live.js"
}
resource "aws_iam_role" "iam_for_lambda_logs" {
name = "${local.prefix}-lambda-admin-cloudwatch-logs"
assume_role_policy = file("./templates/lambda/assume-role-policy.json")
}
resource "aws_iam_role_policy_attachment" "lambda_logs_FA" {
resource "aws_iam_role_policy_attachment" "lambda_cloudwatch_FA" {
role = aws_iam_role.iam_for_lambda_logs.name
policy_arn = "arn:aws:iam::aws:policy/CloudWatchLogsFullAccess"
policy_arn = "arn:aws:iam::aws:policy/CloudWatchFullAccess"
}
resource "aws_iam_role_policy_attachment" "lambda_cloudwatch_FA" {
resource "aws_iam_role_policy_attachment" "lambda_apigw" {
role = aws_iam_role.iam_for_lambda_logs.name
policy_arn = "arn:aws:iam::aws:policy/CloudWatchFullAccess"
policy_arn = aws_iam_policy.apigw_read_only.arn
}
resource "aws_iam_policy" "apigw_read_only"{
name = "ApiGatewayReadOnly"
path = "/"
description = "Read only for describing APIGateway services"
# Terraform's "jsonencode" function converts a
# Terraform expression result to valid JSON syntax.
policy = jsonencode({
Version = "2012-10-17"
Statement = [
{
Action = [
"apigateway:GET",
]
Effect = "Allow"
Resource = "*"
}
]
})
}
......@@ -18,5 +18,57 @@ resource "aws_dynamodb_table" "main" {
lifecycle {
prevent_destroy = true
ignore_changes = [
write_capacity,
read_capacity
]
}
}
resource "aws_appautoscaling_target" "dynamodb_table_read_target" {
max_capacity = 10
min_capacity = 1
resource_id = "table/${aws_dynamodb_table.main.name}"
scalable_dimension = "dynamodb:table:ReadCapacityUnits"
service_namespace = "dynamodb"
}
resource "aws_appautoscaling_policy" "dynamodb_table_read_policy" {
name = "DynamoDBReadCapacityUtilization:${aws_appautoscaling_target.dynamodb_table_read_target.resource_id}"
policy_type = "TargetTrackingScaling"
resource_id = aws_appautoscaling_target.dynamodb_table_read_target.resource_id
scalable_dimension = aws_appautoscaling_target.dynamodb_table_read_target.scalable_dimension
service_namespace = aws_appautoscaling_target.dynamodb_table_read_target.service_namespace
target_tracking_scaling_policy_configuration {
predefined_metric_specification {
predefined_metric_type = "DynamoDBReadCapacityUtilization"
}
target_value = 50
}
}
resource "aws_appautoscaling_target" "dynamodb_table_write_target" {
max_capacity = 10
min_capacity = 1
resource_id = "table/${aws_dynamodb_table.main.name}"
scalable_dimension = "dynamodb:table:WriteCapacityUnits"
service_namespace = "dynamodb"
}
resource "aws_appautoscaling_policy" "dynamodb_table_write_policy" {
name = "DynamoDBWriteCapacityUtilization:${aws_appautoscaling_target.dynamodb_table_write_target.resource_id}"
policy_type = "TargetTrackingScaling"
resource_id = aws_appautoscaling_target.dynamodb_table_write_target.resource_id
scalable_dimension = aws_appautoscaling_target.dynamodb_table_write_target.scalable_dimension
service_namespace = aws_appautoscaling_target.dynamodb_table_write_target.service_namespace
target_tracking_scaling_policy_configuration {
predefined_metric_specification {
predefined_metric_type = "DynamoDBWriteCapacityUtilization"
}
target_value = 50
}
}
\ No newline at end of file
const AWS = require('aws-sdk')
const zlib = require('zlib');
const CW = new AWS.CloudWatch()
const CWL = new AWS.CloudWatchLogs()
const API_GATEWAY_LOG_GROUP = process.env.API_GATEWAY_LOG_GROUP
const now = Date.now()/1000
const period = 30 * 60 // seconds
exports.handler = async function(event){
const APIGW = new AWS.APIGateway()
const params = {
logGroupName: API_GATEWAY_LOG_GROUP,
startTime: now - period,
endTime: now,
queryString: `fields @timestamp, @message
| parse @message \"(*) *\" as reqId, lMessage
| filter lMessage like /Method completed with status:/
| parse lMessage \"Method completed with status: *\" as status
| display @timestamp, reqId, status`
};
// 1. Start the query. When we start a query, this returns a queryId for us to use on our next step.
const data = await CWL.startQuery(params).promise();
const { queryId } = data;
console.debug('query id', queryId);
exports.handler = async function(event,context){
return new Promise((resolve,reject)=>{
while (true) {
// 2. Send Insight query to CloudwatchLogs
const insightData = await CWL.getQueryResults({ queryId }).promise();
console.log(JSON.stringify(insightData,null,4))
// 3. Check if it is available
if (Array.isArray(insightData.results) && insightData.status === 'Complete') {
const insightResult = insightData.results;
// Change this line to publish to SNS or send to Slack
console.log(JSON.stringify(insightResult, null, 4))
break;
}
// 4. Otherwise, Wait for 100 ms for insight api result
await new Promise((resolve, reject) => setTimeout(resolve, 100));
}
console.log("Event: "+JSON.stringify(event))
var payload = Buffer.from(event.awslogs.data, 'base64');
zlib.gunzip(payload, function(e, result) {
if (e) {
console.error(JSON.stringify(e, null, 2))
reject(e)
} else {
//début du traitement du log stream fourni
result = JSON.parse(result.toString('ascii'));
console.log("Event Data after decoding: ", JSON.stringify(result, null, 2));
var promises = []
return 'ok';
}
\ No newline at end of file
result.logEvents.forEach(log => {
//for each log, get api name and push a metric data object associated in the payload for PutMetricData
var message = JSON.parse(log.message)
var getRestApiPromise = APIGW.getRestApi({restApiId:message.apiId}).promise()
promises.push(new Promise((res,rej)=>{
getRestApiPromise.then(data=>{
console.log("getRestApi result: "+JSON.stringify(data))
res(
{
MetricName: '400 Errors',
Dimensions: [
{
Name: 'API/REST',
Value: data.name
},
{
Name: 'Stage',
Value: message.stage
},
{
Name: 'Method',
Value: message.httpMethod
},
{
Name: 'Resource',
Value: message.resourcePath
},
],
Timestamp: Math.floor(log.timestamp/1000),
Unit: "Count",
Value: message.status === "400" ? "1":"0",
}
)
}).catch(err=>{
rej(err)
})
}))
});
//when all logs are processed, send the PutMetricData request
Promise.all(promises).then(metrics=>{ //if all the promises (cf line 26) succed (equivalent to all the getRestApiPromise)
const params = {
MetricData:metrics,
Namespace: "APIGW/custom"
};
console.log("PutMetricData parameters : "+JSON.stringify(params))
CW.putMetricData(params).promise()
.then(data=>{
console.log("PutMetricData succeeded :"+JSON.stringify(data))
resolve(data)
})
.catch(err=>{
console.error(err)
reject(err)
});
}).catch( err => { //if one of the promise (cf line 26) fails
console.error(err)
reject(err)
});
}
});
})
};
\ No newline at end of file
const AWS = require('aws-sdk')
const CW = new AWS.CloudWatch()
const CWL = new AWS.CloudWatchLogs()
const API_GATEWAY_LOG_GROUP = process.env.API_GATEWAY_LOG_GROUP
exports.handler = async function(event,context){
console.log(JSON.stringify(event))
console.log(JSON.stringify(context))
}
\ No newline at end of file
......@@ -5,7 +5,8 @@
"packages": {
"": {
"dependencies": {
"aws-sdk": "^2.864.0"
"aws-sdk": "^2.864.0",
"zlib": "^1.0.5"
}
},
"node_modules/aws-sdk": {
......@@ -133,6 +134,15 @@
"engines": {
"node": ">=4.0"
}
},
"node_modules/zlib": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/zlib/-/zlib-1.0.5.tgz",
"integrity": "sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA=",
"hasInstallScript": true,
"engines": {
"node": ">=0.2.0"
}
}
},
"dependencies": {
......@@ -229,6 +239,11 @@
"version": "9.0.7",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
"integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0="
},
"zlib": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/zlib/-/zlib-1.0.5.tgz",
"integrity": "sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA="
}
}
}
{
"dependencies": {
"aws-sdk": "^2.864.0"
"aws-sdk": "^2.864.0",
"zlib": "^1.0.5"
}
}
......@@ -8,8 +8,9 @@ const RESERVED_RESPONSE = `Error: You're using AWS reserved keywords as attribut
exports.handler = async (event = {}) => {
console.log(event)
if (!event.body) {
console.error("400 invalid request, you are missing the parameter body")
return { statusCode: 400, body: 'invalid request, you are missing the parameter body' };
var err = { statusCode: 400, body: JSON.stringify({status:32001,msg:'invalid request, you are missing the parameter body'})};
console.error(err)
return err;
}
const item = typeof event.body == 'object' ? event.body : JSON.parse(event.body);
item[PRIMARY_KEY] = uuidv4();
......
......@@ -4,15 +4,12 @@ const db = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.TABLE_NAME || '';
const PRIMARY_KEY = process.env.TABLE_KEY || '';
exports.handler = async (event = {}) => {
if (!event.queryStringParameters) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: `Error: You are missing the path parameter id` };
if (!event.queryStringParameters || !event.queryStringParameters.id) {
var err = { statusCode: 400, body: JSON.stringify({status:32002,msg:'invalid request, you are missing the path parameter id'})};
console.error(err)
return err;
}
const requestedItemId = event.queryStringParameters.id;
if (!requestedItemId) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: `Error: You are missing the path parameter id` };
}
const params = {
TableName: TABLE_NAME,
Key: {
......
......@@ -4,15 +4,12 @@ const db = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.TABLE_NAME || '';
const PRIMARY_KEY = process.env.TABLE_KEY || '';
exports.handler = async (event = {}) => {
if (!event.queryStringParameters) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: `Error: You are missing the path parameter id` };
if (!event.queryStringParameters || !event.queryStringParameters.id) {
var err = { statusCode: 400, body: JSON.stringify({status:32002,msg:'invalid request, you are missing the path parameter id'})};
console.error(err)
return err;
}
const requestedItemId = event.queryStringParameters.id;
if (!requestedItemId) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: `Error: You are missing the path parameter id` };
}
const params = {
TableName: TABLE_NAME,
Key: {
......
......@@ -6,18 +6,16 @@ const PRIMARY_KEY = process.env.TABLE_KEY || '';
const RESERVED_RESPONSE = `Error: You're using AWS reserved keywords as attributes`, DYNAMODB_EXECUTION_ERROR = `Error: Execution update, caused a Dynamodb error, please take a look at your CloudWatch Logs.`;
exports.handler = async (event = {}) => {
if (!event.body) {
console.error("400 invalid request, you are missing the parameter body")
return { statusCode: 400, body: 'invalid request, you are missing the parameter body' };
var err = { statusCode: 400, body: JSON.stringify({status:32001,msg:'invalid request, you are missing the parameter body'})};
console.error(err)
return err;
}
if (!event.queryStringParameters) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: `Error: You are missing the path parameter id` };
if (!event.queryStringParameters || !event.queryStringParameters.id) {
var err = { statusCode: 400, body: JSON.stringify({status:32002,msg:'invalid request, you are missing the path parameter id'})};
console.error(err)
return err;
}
const editedItemId = event.queryStringParameters.id;
if (!editedItemId) {
console.error("400 invalid request, you are missing the path parameter id")
return { statusCode: 400, body: 'invalid request, you are missing the path parameter id' };
}
const editedItem = typeof event.body == 'object' ? event.body : JSON.parse(event.body);
const editedItemProperties = Object.keys(editedItem);
if (!editedItem || editedItemProperties.length < 1) {
......
dynamodb_read_capacity = 1
dynamodb_write_capacity = 1
dynamodb_read_capacity = 2
dynamodb_write_capacity = 2
dynamodb_enable_pitr = false
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment