adding sns-to-cloudwatch-logs module

ctalarms-whitelist
lalanza808 4 years ago
parent f2f7f4e3d7
commit 95d3b23186

@ -0,0 +1,25 @@
# SNS to Cloudwatch Logs
This module creates an SNS topic with Lambda subscription and Cloudwatch Log groups. When messages are published to the SNS topic the Lambda function transforms the event message into a JSON object and pushes the event into a Cloudwatch Log stream.
This is useful in some cases where an AWS service only has an SNS topic option (like Guard Duty) but you want to export logs to other services (like Datadog).
The Python script found under `./lambda` is zipped up via the `archive_file` Terraform data type and stored onto S3 via `aws_s3_bucket_object` Terraform resource.
## Usage
```
module "sns" {
source = "github.com/lalanza808/tf-modules.git/monitoring/sns-to-cloudwatch-logs"
namespace = "sandbox-guardduty"
}
```
## Inputs
`namespace` is the only input. All the resources get named with this variable.
## Outputs
You will want to reference the outputs to retrieve the SNS topic ARN; many other modules will want to use it as an input to another module. See [output.tf](./output.tf) for full details.

@ -0,0 +1,9 @@
resource "aws_cloudwatch_log_group" "logs" {
name = var.namespace
retention_in_days = var.log_group_retention_days
}
resource "aws_cloudwatch_log_group" "lambda" {
name = "/aws/lambda/${var.namespace}"
retention_in_days = var.lambda_log_retention_days
}

@ -0,0 +1,28 @@
resource "aws_lambda_function" "lambda" {
function_name = var.namespace
description = "This lambda function takes incoming events from SNS and publishes them to a Cloudwatch Logs group. Useful for ingesting log streams into other platforms."
s3_bucket = aws_s3_bucket.lambda.id
s3_key = aws_s3_bucket_object.function.id
s3_object_version = aws_s3_bucket_object.function.version_id
role = aws_iam_role.lambda.arn
handler = "send_event.handler"
runtime = "python3.7"
timeout = "5"
memory_size = "128"
environment {
variables = {
log_group = var.namespace
}
}
depends_on = [aws_cloudwatch_log_group.lambda]
}
resource "aws_lambda_permission" "lambda" {
statement_id = var.namespace
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.lambda.function_name
principal = "sns.amazonaws.com"
source_arn = aws_sns_topic.this.arn
}

@ -0,0 +1,85 @@
from os import environ
from datetime import datetime
import boto3
import time
import json
now = datetime.utcnow()
ts_now = round(now.timestamp())
log_stream = f'{now.year}-{now.month}-{now.day}'
log_group = environ['log_group']
cloudwatch_logs = boto3.client('logs')
current_milli_time = lambda: int(round(time.time() * 1000))
def put_log_event(client, group, stream, message, seq_token='', create_stream=False) -> bool:
if create_stream:
print(f'[+] Creating new log stream "{stream}"')
client.create_log_stream(
logGroupName=group,
logStreamName=stream
)
print(f'[+] Putting log event to Cloudwatch Logs')
response = client.put_log_events(
logGroupName=group,
logStreamName=stream,
logEvents=[{
'timestamp': current_milli_time(),
'message': message
}],
sequenceToken=seq_token
)
return response
def get_log_streams(client, group) -> list:
log_streams = client.describe_log_streams(
logGroupName=group,
orderBy='LastEventTime',
descending=True
)
return log_streams
def handler(event, context) -> bool:
try:
message_source = event['Records'][0]['EventSource']
except KeyError:
return
if message_source == 'aws:sns':
log_streams = get_log_streams(cloudwatch_logs, log_group)
log_content = json.dumps({
"subject": event['Records'][0]['Sns']['Subject'],
"message": event['Records'][0]['Sns']['Message']
})
if len(log_streams['logStreams']) > 0:
ls_exists = False
# If there are log streams, check to see if the current date's log stream exists
for stream in log_streams['logStreams']:
# If LS does exist, retrieve the seq token and put event to it
if stream['logStreamName'] == log_stream:
ls_exists = True
if stream.get('uploadSequenceToken'):
seq_token = stream['uploadSequenceToken']
put_log_event(
cloudwatch_logs, log_group, log_stream,
log_content, seq_token, False
)
# If log stream doesn't exist after looping through, create it and put event to it
if ls_exists is False:
put_log_event(
cloudwatch_logs, log_group, log_stream,
log_content, '', True
)
# If there are no log streams, create a new one and publish an event to it
else:
put_log_event(
cloudwatch_logs, log_group, log_stream,
log_content, '', True
)

@ -0,0 +1,43 @@
resource "aws_iam_role" "lambda" {
name_prefix = "${var.namespace}-lambda-"
assume_role_policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
POLICY
}
resource "aws_iam_role_policy" "lambda" {
name = aws_iam_role.lambda.name
role = aws_iam_role.lambda.name
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"logs:CreateLogStream",
"logs:PutLogEvents",
"logs:DescribeLogStreams"
],
"Effect": "Allow",
"Resource": [
"${aws_cloudwatch_log_group.lambda.arn}",
"${aws_cloudwatch_log_group.logs.arn}"
]
}
]
}
POLICY
}

@ -0,0 +1,16 @@
data "aws_caller_identity" "current" {}
data "aws_region" "current" {}
data "archive_file" "functions" {
type = "zip"
source_dir = local.source_funcs
output_path = local.archive_funcs
}
locals {
source_funcs = "${path.module}/lambda/"
archive_funcs = "${path.module}/lambda/functions.zip"
region = data.aws_region.current.name
account_id = data.aws_caller_identity.current.account_id
}

@ -0,0 +1,11 @@
output "lambda_function_arn" {
value = aws_lambda_function.lambda.arn
}
output "log_group_arn" {
value = aws_cloudwatch_log_group.logs.arn
}
output "sns_topic_arn" {
value = aws_sns_topic.this.arn
}

@ -0,0 +1,25 @@
// Storage for versioned Lambda functions
resource "aws_s3_bucket" "lambda" {
bucket_prefix = "${var.namespace}-lambda-"
acl = "private"
force_destroy = true
versioning {
enabled = true
}
server_side_encryption_configuration {
rule {
apply_server_side_encryption_by_default {
sse_algorithm = "AES256"
}
}
}
}
resource "aws_s3_bucket_object" "function" {
bucket = aws_s3_bucket.lambda.id
key = "lambda_functions.zip"
source = data.archive_file.functions.output_path
etag = data.archive_file.functions.output_md5
}

@ -0,0 +1,54 @@
resource "aws_sns_topic" "this" {
name = var.namespace
}
resource "aws_sns_topic_subscription" "this" {
topic_arn = aws_sns_topic.this.arn
protocol = "lambda"
endpoint = aws_lambda_function.lambda.arn
raw_message_delivery = false
}
resource "aws_sns_topic_policy" "this" {
arn = aws_sns_topic.this.arn
policy = <<EOF
{
"Version": "2008-10-17",
"Statement": [
{
"Sid": "AllowAccountNotifications",
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": [
"SNS:Publish",
"SNS:RemovePermission",
"SNS:SetTopicAttributes",
"SNS:DeleteTopic",
"SNS:ListSubscriptionsByTopic",
"SNS:GetTopicAttributes",
"SNS:Receive",
"SNS:AddPermission",
"SNS:Subscribe"
],
"Resource": "${aws_sns_topic.this.arn}",
"Condition": {
"StringEquals": {
"AWS:SourceOwner": "${local.account_id}"
}
}
},
{
"Sid": "AllowBackupNotifications",
"Effect": "Allow",
"Principal": {
"Service": "backup.amazonaws.com"
},
"Action": "SNS:Publish",
"Resource": "${aws_sns_topic.this.arn}"
}
]
}
EOF
}

@ -0,0 +1,11 @@
variable "namespace" {
description = "Friendly name to refer to all module related resources"
}
variable "log_group_retention_days" {
default = 90
}
variable "lambda_log_retention_days" {
default = 90
}
Loading…
Cancel
Save