Creating alert rule via Terraform

I’m having an issue where I’m trying to setup an alarm group via Terraform, but I continuously receive the following error. status: 404, body: {“message”:“Not found”}. I’ve been at it for about a week and can’t figure out why this is happening. Here is my code.

resource "grafana_folder" "rule_folder" {
  title = "Confluent TF"
}
resource "grafana_rule_group" "rule_group" {
  name            = "Overall KB/s received WARNING TF"
  folder_uid      = grafana_folder.rule_folder.uid
  interval_seconds = 240
  org_id          = 1
  rule {
    name      = "Overall KB/s received WARNING TF"
    for       = "2m"
    condition = "B"
    no_data_state  = "NoData"
    exec_err_state = "Alerting"
    annotations = {
      "description" = "One or more Kafka client produces too much data, check topics list and stream lineage on confluent.cloud to find affected topic(s) and clients",
      "summary" = "Too much data going into Kafka cluster"
    }
    labels = {
      "yellow" = "WARNING"
    }
    is_paused = false
    data {
      ref_id = "A"
      query_type = ""
      relative_time_range {
        from = 21600
        to   = 0
      }
      datasource_uid = "xxxxxxxxxxxxxxx1"
      model = jsonencode({
        datasource     = {
            type       = "prometheus"
            uid        = "xxxxxxxxxxxxx"
        }
        editorMode     = "code"
        expr           = "sum(confluent_kafka_server_received_bytes) / 1024 / 60"
        instant        = false
        interval       = ""
        intervalMs     = 15000
        maxDataPoints  = 43200
        legendFormat   = "__auto"
        refId          = "A"
        range          = true
      })
    }
    data {
      ref_id     = "C"
      query_type = ""
      relative_time_range {
        from = 0
        to   = 0
      }
      datasource_uid = "-100"
      model = <<EOT
    {
        "conditions": [
            {
                "evaluator": {
                    "params": [
                        0,
                        0
                    ],
                    "type": "gt"
                },
                "operator": {
                    "type": "and"
                },
                "query": {
                    "params": []
                },
                "reducer": {
                    "params": [],
                    "type": "avg"
                },
                "type": "query"
            }
        ],
        "datasource": {
            "name": "Expression",
            "type": "__expr__",
            "uid": "-100"
        },
        "expression": "A",
        "intervalMs": 1000,
        "maxDataPoints": 43200,
        "reducer": "last",
        "refId": "C",
        "type": "reduce"
    }
    EOT
   }
   data {
      ref_id     = "B"
      query_type = ""
      relative_time_range {
        from = 0
        to   = 0
      }
      datasource_uid = "-100"
      model = <<EOT
    {
        "conditions": [
            {
                "evaluator": {
                    "params": [
                        10000,
                        0
                    ],
                    "type": "gt"
                },
                "operator": {
                    "type": "and"
                },
                "query": {
                    "params": []
                },
                "reducer": {
                    "params": [],
                    "type": "avg"
                },
                "type": "query"
            }
        ],
        "datasource": {
            "name": "Expression",
            "type": "__expr__",
            "uid": "-100"
        },
        "expression": "C",
        "intervalMs": 1000,
        "maxDataPoints": 43200,
        "refId": "B",
        "type": "threshold"
    }                         
    EOT  
   }
  }
}

I would create rule manually in the UI first and then export rule group in HCL format:


So you will have valid HCL, exactly for your Grafana version and you can use that snippet in your TF.

This was a tremendous point. I didn’t notice that they have the HCL already laid out. Thanks a bunch!