🚨 Google Cloud – Public Bucket Alert (Terraform + Cloud Function)
Main.tf
provider "google" {
project = var.project_id
region = var.region
}
# Enable required services
resource "google_project_service" "services" {
for_each = toset([
"pubsub.googleapis.com",
"logging.googleapis.com",
"cloudfunctions.googleapis.com"
])
service = each.key
}
# Pub/Sub topic that receives log events
resource "google_pubsub_topic" "log_topic" {
name = "storage-policy-violations"
}
# Pub/Sub topic for SOC alerts
resource "google_pubsub_topic" "soc_alerts" {
name = "soc-alerts"
}
# Log sink to capture public bucket IAM changes
resource "google_logging_project_sink" "storage_sink" {
name = "storage-public-bucket-sink"
destination = "pubsub.googleapis.com/${google_pubsub_topic.log_topic.id}"
# Filter: only when public access is granted
filter =
variables.tf
variable "project_id" {
description = "Your GCP Project ID"
}
variable "region" {
default = "us-central1"
}
🔹 Cloud Function Code (main.py)
import base64
import json
from google.cloud import pubsub_v1
SOC_TOPIC = "soc-alerts"
def process_pubsub(event, context):
"""Triggered when a bucket is made public"""
if "data" not in event:
print("No data found in event")
return
# Decode log entry
payload = base64.b64decode(event["data"]).decode("utf-8")
try:
log_entry = json.loads(payload)
except Exception as e:
print(f"Could not parse log entry: {e}")
return
bucket_name = log_entry.get("resource", {}).get("labels", {}).get("bucket_name", "unknown")
# Create alert message
message = {
"alert": "PUBLIC_BUCKET_DETECTED",
"bucket": bucket_name,
"log": log_entry
}
# Publish to SOC topic
publisher = pubsub_v1.PublisherClient()
project_id = log_entry.get("resource", {}).get("labels", {}).get("project_id", "")
topic_path = publisher.topic_path(project_id, SOC_TOPIC)
publisher.publish(topic_path, json.dumps(message).encode("utf-8"))
print(f"⚠️ SOC ALERT: Public bucket detected -> {bucket_name}")
requirements.txt
google-cloud-pubsub
🔹 Windows Packaging
Compress-Archive -Path main.py, requirements.txt -DestinationPath function-source.zip -Force
🔹 Deployment Steps
- Enable APIs:
- Deploy Terraform:
- Test by making a bucket public:
gcloud services enable pubsub.googleapis.com logging.googleapis.com cloudfunctions.googleapis.com
terraform init
terraform apply
gsutil iam ch allUsers:objectViewer gs://<your-bucket>
→ This will trigger Cloud Logging → Pub/Sub → Cloud Function → SOC Pub/Sub topic.
✅ Result
This setup works exactly like AWS S3 Public Bucket Alerts, but implemented in Google Cloud.
main.tf
provider "google" {
project = var.project_id
region = var.region
}
# Enable required services
resource "google_project_service" "services" {
for_each = toset([
"pubsub.googleapis.com",
"logging.googleapis.com",
"cloudfunctions.googleapis.com"
])
service = each.key
}
# Pub/Sub topic that receives log events
resource "google_pubsub_topic" "log_topic" {
name = "storage-policy-violations"
}
# Pub/Sub topic for SOC alerts
resource "google_pubsub_topic" "soc_alerts" {
name = "soc-alerts"
}
# Log sink to capture public bucket IAM changes
resource "google_logging_project_sink" "storage_sink" {
name = "storage-public-bucket-sink"
destination = "pubsub.googleapis.com/${google_pubsub_topic.log_topic.id}"
# Filter: only when public access is granted
filter = <<EOT
resource.type="gcs_bucket"
protoPayload.methodName="storage.setIamPermissions"
(protoPayload.serviceData.policyDelta.bindingDeltas.member="allUsers"
OR protoPayload.serviceData.policyDelta.bindingDeltas.member="allAuthenticatedUsers")
EOT
unique_writer_identity = true
}
# Give sink permission to publish
resource "google_pubsub_topic_iam_member" "sink_pub" {
topic = google_pubsub_topic.log_topic.name
role = "roles/pubsub.publisher"
member = google_logging_project_sink.storage_sink.writer_identity
}
# Storage bucket for function code
resource "google_storage_bucket" "function_bucket" {
name = "${var.project_id}-function-src"
location = var.region
force_destroy = true
}
# Upload function zip
resource "google_storage_bucket_object" "function_source" {
name = "function-source.zip"
bucket = google_storage_bucket.function_bucket.name
source = "function-source.zip"
}
# Cloud Function
resource "google_cloudfunctions_function" "notify_soc" {
name = "storage-public-alert"
runtime = "python39"
region = var.region
entry_point = "process_pubsub"
source_archive_bucket = google_storage_bucket.function_bucket.name
source_archive_object = google_storage_bucket_object.function_source.name
event_trigger {
event_type = "google.pubsub.topic.publish"
resource = google_pubsub_topic.log_topic.name
}
available_memory_mb = 256
description = "Notifies SOC when a bucket is made public"
}
# Allow function to publish to SOC topic
resource "google_pubsub_topic_iam_member" "function_pub" {
topic = google_pubsub_topic.soc_alerts.name
role = "roles/pubsub.publisher"
member = "serviceAccount:${google_cloudfunctions_function.notify_soc.service_account_email}"
}
No comments:
Post a Comment