Skip to content

Commit

Permalink
add module files
Browse files Browse the repository at this point in the history
  • Loading branch information
lindeskar committed Apr 29, 2021
1 parent 2ca19aa commit e6cd596
Show file tree
Hide file tree
Showing 6 changed files with 187 additions and 0 deletions.
1 change: 1 addition & 0 deletions dataflow-sink-filter-catchall.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
logName:"organizations/" OR logName:"folders/" OR logName:"projects/" NOT (resource.type = "dataflow_step" AND resource.labels.job_name = "${dataflow_name}")
1 change: 1 addition & 0 deletions dataflow-sink-filter.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
logName:"organizations/${org_id}/logs/" OR logName:"projects/${log_project}/logs/" %{ for e_proj in extra_projects ~} OR logName:"projects/${e_proj}/logs/" %{ endfor ~} NOT (resource.type = "dataflow_step" AND resource.labels.job_name = "${dataflow_name}")
8 changes: 8 additions & 0 deletions dataflow-udf-js.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function transform(inJson) {
var obj = { "data" : JSON.parse(inJson) };
obj._metadata = {
source: obj.data.logName || "default",
sourcetype: "google:gcp:pubsub:message"
};
return JSON.stringify(obj);
}
119 changes: 119 additions & 0 deletions main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@

# Pub/Sub topics

resource "google_pubsub_topic" "log-topic" {
name = "${var.dataflow_base_name}-log-topic"
project = var.gcp_log_project
}

resource "google_pubsub_topic" "log-dl-topic" {
name = "${var.dataflow_base_name}-log-dl-topic"
project = var.gcp_log_project
}


# Pub/Sub subscriptions

resource "google_pubsub_subscription" "log-sub" {
name = "${var.dataflow_base_name}-log-sub"
project = var.gcp_log_project
topic = google_pubsub_topic.log-topic.name

message_retention_duration = "1200s"
}

resource "google_pubsub_subscription" "log-dl-sub" {
name = "${var.dataflow_base_name}-log-dl-sub"
project = var.gcp_log_project
topic = google_pubsub_topic.log-dl-topic.name

message_retention_duration = "1200s"
}


# Log sinks

resource "google_logging_organization_sink" "log-org-sink" {
name = "${var.dataflow_base_name}-log-org-sink"
count = var.log_sink_org_id == "" ? 0 : 1

org_id = var.log_sink_org_id
include_children = true
filter = templatefile("${path.module}/dataflow-sink-filter-catchall.tpl", {
org_id = var.log_sink_org_id,
log_project = var.gcp_log_project,
extra_projects = var.log_sink_org_filter_projects,
dataflow_name = var.dataflow_enable == 1 ? google_dataflow_job.splunk-job[0].name : ""
})

destination = "pubsub.googleapis.com/${google_pubsub_topic.log-topic.id}"
}

## Add project log sink ?


# Pub/Sub topic IAM policy

resource "google_pubsub_topic_iam_member" "log-topic-sink-member" {
project = google_pubsub_topic.log-topic.project
topic = google_pubsub_topic.log-topic.name
role = "roles/pubsub.publisher"
member = google_logging_organization_sink.log-org-sink.0.writer_identity
}


# Bucket for temp storage

resource "google_storage_bucket" "log-bucket" {
name = "${var.dataflow_base_name}-log-bucket"
project = var.gcp_log_project
location = var.gcp_region
force_destroy = true
uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "temp" {
name = "${var.dataflow_base_name}/temp/.ignore"
content = "IGNORE"
bucket = google_storage_bucket.log-bucket.name
depends_on = [google_storage_bucket.log-bucket]
}

resource "google_storage_bucket_object" "splunk-udf" {
name = "${var.dataflow_base_name}/js/splunk-udf.js"
content = templatefile("${path.module}/dataflow-udf-js.tpl", {
input_sub = google_pubsub_subscription.log-sub.name
})
bucket = google_storage_bucket.log-bucket.name
depends_on = [google_storage_bucket.log-bucket]
}


# Dataflow
resource "google_dataflow_job" "splunk-job" {
name = "${var.dataflow_base_name}-splunk-job"
count = var.dataflow_enable == 1 ? 1 : 0
project = var.gcp_log_project
region = var.gcp_region
zone = var.gcp_zone
machine_type = var.dataflow_worker_machine_type
max_workers = var.dataflow_max_workers

template_gcs_path = "gs://dataflow-templates-${var.gcp_region}/latest/Cloud_PubSub_to_Splunk"

temp_gcs_location = "${google_storage_bucket.log-bucket.url}/${dirname(google_storage_bucket_object.temp.name)}"

parameters = {
inputSubscription = google_pubsub_subscription.log-sub.id
outputDeadletterTopic = google_pubsub_topic.log-dl-topic.id
url = var.splunk_hec_url
token = var.splunk_hec_token
javascriptTextTransformGcsPath = "${google_storage_bucket.log-bucket.url}/${google_storage_bucket_object.splunk-udf.name}"
javascriptTextTransformFunctionName = "transform"
disableCertificateValidation = var.splunk_hec_disable_cert_validation
batchCount = var.dataflow_batchCount,
parallelism = var.dataflow_parallelism
}

on_delete = "cancel"
}
Empty file added outputs.tf
Empty file.
58 changes: 58 additions & 0 deletions variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
variable "gcp_region" {
description = ""
}
variable "gcp_zone" {
description = ""
}
variable "gcp_log_project" {
description = ""
}
variable "log_sink_org_id" {
description = ""
type = string
default = ""
}
variable "log_sink_org_filter_projects" {
description = ""
type = list(string)
default = []
}
variable "dataflow_base_name" {
description = ""
}
variable "dataflow_max_workers" {
description = ""
type = number
default = null
}
variable "dataflow_enable" {
description = ""
type = number
default = 1
}
variable "dataflow_worker_machine_type" {
description = ""
type = string
default = null
}
variable "splunk_hec_url" {
description = ""
}
variable "splunk_hec_token" {
description = ""
}
variable "splunk_hec_disable_cert_validation" {
description = ""
type = string
default = "false"
}
variable "dataflow_batchCount" {
description = ""
type = number
default = 10
}
variable "dataflow_parallelism" {
description = ""
type = number
default = 4
}

0 comments on commit e6cd596

Please sign in to comment.