Skip to content

Commit

Permalink
Added tests for log rule
Browse files Browse the repository at this point in the history
  • Loading branch information
obs-gh-konstantintikhonov committed Dec 11, 2023
1 parent cb1fbe8 commit e9dde4a
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 22 deletions.
44 changes: 25 additions & 19 deletions observe/resource_monitor.go
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,6 @@ func resourceMonitor() *schema.Resource {
"log_stage_index": {
Type: schema.TypeInt,
Optional: true,
Default: 0,
},
"source_log_dataset_id": {
Type: schema.TypeString,
Expand Down Expand Up @@ -630,7 +629,11 @@ func newMonitorRuleConfig(data *schema.ResourceData) (ruleInput *gql.MonitorRule
idx := v.(int)
stageId := fmt.Sprintf("stage-%d", idx)
ruleInput.LogRule.LogStageId = &stageId
} else {
stageId := "stage-0"
ruleInput.LogRule.LogStageId = &stageId
}

if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok {
is, _ := oid.NewOID(v.(string))
ruleInput.LogRule.SourceLogDatasetId = &is.Id
Expand Down Expand Up @@ -909,18 +912,6 @@ func flattenRule(data *schema.ResourceData, input gql.MonitorRule, stageIds []st
}

if logRule, ok := input.(*gql.MonitorRuleMonitorRuleLog); ok {
id := oid.OID{
Type: oid.TypeDataset,
Id: *logRule.SourceLogDatasetId,
}
// check for existing version timestamp we can maintain
// same approach as in flattenAndSetQuery() for input datasets
if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok {
prv, err := oid.NewOID(v.(string))
if err == nil && id.Id == prv.Id {
id.Version = prv.Version
}
}
stageIndex := 0
for i, sId := range stageIds {
if sId == logRule.LogStageId {
Expand All @@ -930,12 +921,27 @@ func flattenRule(data *schema.ResourceData, input gql.MonitorRule, stageIds []st
}

log := map[string]interface{}{
"compare_function": toSnake(string(logRule.CompareFunction)),
"compare_values": logRule.CompareValues,
"lookback_time": logRule.LookbackTime.String(),
"expression_summary": logRule.ExpressionSummary,
"log_stage_index": stageIndex,
"source_log_dataset_id": id.String(),
"compare_function": toSnake(string(logRule.CompareFunction)),
"compare_values": logRule.CompareValues,
"lookback_time": logRule.LookbackTime.String(),
"expression_summary": logRule.ExpressionSummary,
"log_stage_index": stageIndex,
}
if logRule.SourceLogDatasetId != nil {
id := oid.OID{
Type: oid.TypeDataset,
Id: *logRule.SourceLogDatasetId,
}
// check for existing version timestamp we can maintain
// same approach as in flattenAndSetQuery() for input datasets
if v, ok := data.GetOk("rule.0.log.0.source_log_dataset_id"); ok {
prv, err := oid.NewOID(v.(string))
if err == nil && id.Id == prv.Id {
id.Version = prv.Version
}
}

log["source_log_dataset_id"] = id.String()
}

rule["log"] = []interface{}{log}
Expand Down
75 changes: 72 additions & 3 deletions observe/resource_monitor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -562,14 +562,19 @@ func TestAccObserveMonitorLog(t *testing.T) {
name = "%[1]s"
inputs = {
// "test" = observe_datastream.test.dataset
"test" = observe_dataset.first.oid
}
stage {
pipeline = <<-EOF
colmake kind:"test", description:"test"
EOF
output_stage = true
}
stage {
pipeline = <<-EOF
filter kind ~ "test"
EOF
}
rule {
Expand All @@ -580,7 +585,66 @@ func TestAccObserveMonitorLog(t *testing.T) {
compare_values = [1]
lookback_time = "1m"
expression_summary = "Some text"
log_stage_index = 0
log_stage_index = 1
}
}
notification_spec {
merge = "separate"
}
}`, randomPrefix),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("observe_monitor.first", "name", randomPrefix),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_function", "greater"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_values.0", "1"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.lookback_time", "1m0s"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.expression_summary", "Some text"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.log_stage_index", "1"),
),
},
{
Config: fmt.Sprintf(monitorConfigPreamble+`
resource "observe_dataset" "first" {
workspace = data.observe_workspace.default.oid
name = "%[1]s-first"
inputs = {
"test" = observe_datastream.test.dataset
}
stage {
pipeline = <<-EOF
make_col vt:BUNDLE_TIMESTAMP
make_interval vt
EOF
}
}
resource "observe_monitor" "first" {
workspace = data.observe_workspace.default.oid
name = "%[1]s"
inputs = {
"test" = observe_dataset.first.oid
}
stage {
pipeline = <<-EOF
filter OBSERVATION_INDEX != 0
EOF
}
stage {
pipeline = "timechart 1m, frame(back:10m), A_ContainerLogsClean_count:count(), group_by()"
}
rule {
source_column = "A_ContainerLogsClean_count"
log {
compare_function = "greater"
compare_values = [1]
lookback_time = "1m"
expression_summary = "Some text"
source_log_dataset_id = observe_dataset.first.oid
}
}
Expand All @@ -591,7 +655,12 @@ func TestAccObserveMonitorLog(t *testing.T) {
}`, randomPrefix),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("observe_monitor.first", "name", randomPrefix),
//TODO
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_function", "greater"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.compare_values.0", "1"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.lookback_time", "1m0s"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.expression_summary", "Some text"),
resource.TestCheckResourceAttr("observe_monitor.first", "rule.0.log.0.log_stage_index", "0"),
resource.TestCheckResourceAttrPair("observe_monitor.first", "rule.0.log.0.source_log_dataset_id", "observe_dataset.first", "oid"),
),
},
},
Expand Down

0 comments on commit e9dde4a

Please sign in to comment.