diff --git a/observe/resource_monitor.go b/observe/resource_monitor.go index 336161bc..51b2eaf8 100644 --- a/observe/resource_monitor.go +++ b/observe/resource_monitor.go @@ -380,7 +380,6 @@ func resourceMonitor() *schema.Resource { "log_stage_index": { Type: schema.TypeInt, Optional: true, - Default: 0, }, "source_log_dataset_id": { Type: schema.TypeString, @@ -630,7 +629,11 @@ func newMonitorRuleConfig(data *schema.ResourceData) (ruleInput *gql.MonitorRule idx := v.(int) stageId := fmt.Sprintf("stage-%d", idx) ruleInput.LogRule.LogStageId = &stageId + } else { + stageId := "stage-0" + ruleInput.LogRule.LogStageId = &stageId } + if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok { is, _ := oid.NewOID(v.(string)) ruleInput.LogRule.SourceLogDatasetId = &is.Id @@ -909,18 +912,6 @@ func flattenRule(data *schema.ResourceData, input gql.MonitorRule, stageIds []st } if logRule, ok := input.(*gql.MonitorRuleMonitorRuleLog); ok { - id := oid.OID{ - Type: oid.TypeDataset, - Id: *logRule.SourceLogDatasetId, - } - // check for existing version timestamp we can maintain - // same approach as in flattenAndSetQuery() for input datasets - if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok { - prv, err := oid.NewOID(v.(string)) - if err == nil && id.Id == prv.Id { - id.Version = prv.Version - } - } stageIndex := 0 for i, sId := range stageIds { if sId == logRule.LogStageId { @@ -930,12 +921,27 @@ func flattenRule(data *schema.ResourceData, input gql.MonitorRule, stageIds []st } log := map[string]interface{}{ - "compare_function": toSnake(string(logRule.CompareFunction)), - "compare_values": logRule.CompareValues, - "lookback_time": logRule.LookbackTime.String(), - "expression_summary": logRule.ExpressionSummary, - "log_stage_index": stageIndex, - "source_log_dataset_id": id.String(), + "compare_function": toSnake(string(logRule.CompareFunction)), + "compare_values": logRule.CompareValues, + "lookback_time": logRule.LookbackTime.String(), + "expression_summary": logRule.ExpressionSummary, + "log_stage_index": stageIndex, + } + if logRule.SourceLogDatasetId != nil { + id := oid.OID{ + Type: oid.TypeDataset, + Id: *logRule.SourceLogDatasetId, + } + // check for existing version timestamp we can maintain + // same approach as in flattenAndSetQuery() for input datasets + if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok { + prv, err := oid.NewOID(v.(string)) + if err == nil && id.Id == prv.Id { + id.Version = prv.Version + } + } + + log["source_log_dataset_id"] = id.String() } rule["log"] = []interface{}{log} diff --git a/observe/resource_monitor_test.go b/observe/resource_monitor_test.go index 5f593026..b4ca1577 100644 --- a/observe/resource_monitor_test.go +++ b/observe/resource_monitor_test.go @@ -562,7 +562,6 @@ func TestAccObserveMonitorLog(t *testing.T) { name = "%[1]s" inputs = { - // "test" = observe_datastream.test.dataset "test" = observe_dataset.first.oid } @@ -570,6 +569,12 @@ func TestAccObserveMonitorLog(t *testing.T) { pipeline = <<-EOF colmake kind:"test", description:"test" EOF + output_stage = true + } + stage { + pipeline = <<-EOF + filter kind ~ "test" + EOF } rule { @@ -580,7 +585,66 @@ func TestAccObserveMonitorLog(t *testing.T) { compare_values = [1] lookback_time = "1m" expression_summary = "Some text" - log_stage_index = 0 + log_stage_index = 1 + } + } + + notification_spec { + merge = "separate" + } + }`, randomPrefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("observe_monitor.first", "name", randomPrefix), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_function", "greater"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_values.0", "1"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.lookback_time", "1m0s"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.expression_summary", "Some text"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.log_stage_index", "1"), + ), + }, + { + Config: fmt.Sprintf(monitorConfigPreamble+` + resource "observe_dataset" "first" { + workspace = data.observe_workspace.default.oid + name = "%[1]s-first" + + inputs = { + "test" = observe_datastream.test.dataset + } + + stage { + pipeline = <<-EOF + make_col vt:BUNDLE_TIMESTAMP + make_interval vt + EOF + } + } + + resource "observe_monitor" "first" { + workspace = data.observe_workspace.default.oid + name = "%[1]s" + + inputs = { + "test" = observe_dataset.first.oid + } + + stage { + pipeline = <<-EOF + filter OBSERVATION_INDEX != 0 + EOF + } + stage { + pipeline = "timechart 1m, frame(back:10m), A_ContainerLogsClean_count:count(), group_by()" + } + + rule { + source_column = "A_ContainerLogsClean_count" + + log { + compare_function = "greater" + compare_values = [1] + lookback_time = "1m" + expression_summary = "Some text" source_log_dataset_id = observe_dataset.first.oid } } @@ -591,7 +655,12 @@ func TestAccObserveMonitorLog(t *testing.T) { }`, randomPrefix), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("observe_monitor.first", "name", randomPrefix), - //TODO + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_function", "greater"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_values.0", "1"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.lookback_time", "1m0s"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.expression_summary", "Some text"), + resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.log_stage_index", "0"), + resource.TestCheckResourceAttrPair("observe_monitor.first", "rule.0.log.0.source_log_dataset_id", "observe_dataset.first", "oid"), ), }, },