diff --git a/docs/resources/lts_metric_rule.md b/docs/resources/lts_metric_rule.md
new file mode 100644
index 00000000000..566bd95a247
--- /dev/null
+++ b/docs/resources/lts_metric_rule.md
@@ -0,0 +1,245 @@
+---
+subcategory: "Log Tank Service (LTS)"
+layout: "huaweicloud"
+page_title: "HuaweiCloud: huaweicloud_lts_metric_rule"
+description: |-
+ Manages an LTS log metric rule resource within HuaweiCloud.
+---
+
+# huaweicloud_lts_metric_rule
+
+Manages an LTS log metric rule resource within HuaweiCloud.
+
+## Example Usage
+
+```hcl
+variable "metric_rule_name" {}
+variable "log_group_id" {}
+variable "log_stream_id" {}
+variable "sinks_metric_name" {}
+variable "prometheus_instance_name" {}
+variable "prometheus_instance_id" {}
+variable "aggregator_field" {}
+variable "log_filters" {
+ type = list(object({
+ type = string
+ filters = list(object({
+ type = string
+ key = string
+ value = string
+ }))
+ }))
+}
+
+resource "huaweicloud_lts_metric_rule" "test" {
+ name = var.metric_rule_name
+ status = "enable"
+ log_group_id = var.log_group_id
+ log_stream_id = var.log_stream_id
+
+ sampler {
+ type = "random"
+ ratio = "0.5"
+ }
+
+ sinks {
+ type = "aom"
+ metric_name = var.sinks_metric_name
+ name = var.prometheus_instance_name
+ instance_id = var.prometheus_instance_id
+ }
+
+ aggregator {
+ type = "count"
+ field = var.aggregator_field
+ }
+
+ window_size = "PT1M"
+
+ filter {
+ type = "and"
+
+ dynamic "filters" {
+ for_each = var.log_filters
+ content {
+ type = filters.value.type
+
+ dynamic "filters" {
+ for_each = filters.value.filters
+ content {
+ type = filters.value.type
+ key = filters.value.key
+ value = filters.value.value
+ }
+ }
+ }
+ }
+ }
+}
+```
+
+## Argument Reference
+
+The following arguments are supported:
+
+* `region` - (Optional, String, ForceNew) Specifies the region in which to create the resource.
+ If omitted, the provider-level region will be used.
+ Changing this creates a new resource.
+
+* `name` - (Required, String) Specifies the name of the log metric rule. The name must be unique.
+ The name contain a maximum of `256` characters, and only English letters, digits, and hyphens (-) are allowed.
+
+* `status` - (Required, String) Specifies the status of the log metric rule.
+ The valid values are as follows:
+ + **enable**
+ + **disable**
+
+* `log_group_id` - (Required, String) Specifies the log group ID to which the log metric rule belongs.
+
+* `log_stream_id` - (Required, String) Specifies the log stream ID to which the log metric rule belongs.
+
+* `sampler` - (Required, List) Specifies the sampling configuration of the log.
+ The [sampler](#metric_rule_sampler) structure is documented below.
+
+* `sinks` - (Required, List) Specifies the storage location of the generated metrics.
+ The [sinks](#metric_rule_sinks) structure is documented below.
+
+* `aggregator` - (Required, List) Specifies the configuration of log statistics mode.
+ The [aggregator](#metric_rule_aggregator) structure is documented below.
+
+* `window_size` - (Required, String) Specifies the interval time for processing data windows.
+ The valid values are as follows:
+ + **PT5S**: Indicates `5` seconds.
+ + **PT1M**: Indicates `1` minute.
+ + **PT5M**: Indicates `5` minute.
+
+* `report` - (Optional, Bool) Specifies whether to report data to sinks, defaults to **false**.
+
+* `filter` - (Optional, List) Specifies the configuration of log filtering rule.
+ The [filter](#metric_rule_filter) structure is documented below.
+
+* `description` - (Optional, String) Specifies the description of the log metric rule.
+
+
+The `sampler` block supports:
+
+* `type` - (Required, String) Specifies the type of the log sampling.
+ The valid values are as follows:
+ + **random**: Indicates random sampling of the logs, processing only part of the data.
+ + **none**: Indicates random sampling is disabled and all data is processed.
+
+* `ratio` - (Required, String) Specifies the sampling rate of the log.
+ + If `sampler.type` is set to **random**, the valid value ranges from `0.1` to `1`.
+ + If `sampler.type` is set to **none**, the value is set to `1`.
+
+
+The `sinks` block supports:
+
+* `type` - (Required, String) Specifies the type of the stored object.
+ The valid values are as follows:
+ + **aom**
+
+* `metric_name` - (Required, String) Specifies the name of the generated log metric. The name must be unique.
+ The name only English letters, digits, hyphens (-) and colon(:) are allowed, and must start with an English letter.
+
+* `name` - (Optional, String) Specifies the name of the AOM Prometheus common instance.
+ This parameter is required and available only when the `sinks.type` parameter is set to **aom**.
+
+* `instance_id` - (Optional, String) Specifies the ID of the AOM Prometheus common instance.
+ This parameter is required and available only when the `sinks.type` parameter is set to **aom**.
+
+
+The `aggregator` block supports:
+
+* `type` - (Required, String) Specifies the type of the log statistics.
+ The valid values are as follows:
+ + **count**: Indicates the number of the logs.
+ + **countKeyword**: Indicates the number of times the keyword appears.
+ + **max**: Indicates the maximum value of the specified field.
+ + **min**: Indicates the minimum value of the specified field.
+ + **avg**: Indicates the average value of the specified field.
+ + **sum**: Indicates the sum value of the specified field.
+ + **p50**: Indicates the value below which `50%` of the data falls.
+ + **p70**: Indicates the value below which `75%` of the data falls.
+ + **p90**: Indicates the value below which `90%`of the data falls.
+ + **p95**: Indicates the value below which `95%` of the data falls.
+ + **p99**: Indicates the value below which `99%` of the data falls.
+
+* `field` - (Required, String) Specifies the field of the log statistics.
+
+* `group_by` - (Optional, List) Specifies the list of the group fields of the log statistics.
+
+* `keyword` - (Optional, String) Specifies the keyword of the log statistics. The keyword is case sensitive.
+ This parameter is required and available only when the `aggregator.type` parameter is set to **countKeyword**.
+
+
+The `filter` block supports:
+
+* `type` - (Optional, String) Specifies the filter type of the log.
+ The parameter must be used together with `filter.filters`.
+ The valid values are as follows:
+ + **or**
+ + **and**
+
+* `filters` - (Optional, List) Specifies the list of log filtering rule groups.
+ The [filters](#metric_rule_filter_groups) structure is documented below.
+
+
+The `filters` block supports:
+
+* `type` - (Optional, String) Specifies the filter type of the log.
+ The parameter must be used together with `filter.filters.filters`.
+ The valid values are as follows:
+ + **or**
+ + **and**
+
+* `filters` - (Optional, List) Specifies the list of the log filter rule associations.
+ The [filters](#metric_rule_associated_filters) structure is documented below.
+
+
+The `filters` block supports:
+
+* `key` - (Required, String) Specifies the filter field of the log.
+
+* `type` - (Required, String) Specifies the filter conditions of the log.
+ The valid values are as follows:
+ + **contains**: Applicable to `string` data type.
+ + **notContains**: Applicable to `string` data type.
+ + **fieldExist**: Applicable to `string` data type.
+ + **fieldNotExist**: Applicable to `string`, `float` and `long` data types.
+ + **equal**: Applicable to `string`, `float` and `long` data types.
+ + **notEqual**: Applicable to `string`, `float` and `long` data types.
+ + **gt**: Applicable to `float` and `long` data types.
+ + **gte**: Applicable to `float` and `long` data types.
+ + **lt**: Applicable to `float` and `long` data types.
+ + **lte**: Applicable to `float` and `long` data types.
+ + **range**: Applicable to `float` and `long` data types.
+ + **outRange**: Applicable to `float` and `long` data types.
+
+* `value` - (Optional, String) Specifies the value corresponding to the log filter field.
+ This parameter is required and available only when the `filters.filters.filters.type` parameter is set to **contains**,
+ **notContains**, **equal**, **notEqual**, **gt**, **gte**, **lt** or **lte**.
+
+* `lower` - (Optional, String) Specifies the minimum value corresponding to the log filter field.
+ This parameter is required and available only when the `filters.filters.filters.type` parameter is set to **range**
+ or **outRange**.
+
+* `upper` - (Optional, String) Specifies the maximum value corresponding to the log filter field.
+ This parameter is required and available only when the `filters.filters.filters.type` parameter is set to **range**
+ or **outRange**.
+
+## Attribute Reference
+
+In addition to all arguments above, the following attributes are exported:
+
+* `id` - The resource ID, also log metric rule ID.
+
+* `created_at` - The creation time of the log metric rule, in RFC3339 format.
+
+## Import
+
+The log metric rule resource can be imported using `id`, e.g.
+
+```bash
+$ terraform import huaweicloud_lts_metric_rule.test
+```
diff --git a/huaweicloud/provider.go b/huaweicloud/provider.go
index c0c78a5e5ab..19100ae0089 100644
--- a/huaweicloud/provider.go
+++ b/huaweicloud/provider.go
@@ -1837,6 +1837,7 @@ func Provider() *schema.Provider {
"huaweicloud_lts_host_access": lts.ResourceHostAccessConfig(),
"huaweicloud_lts_log_converge": lts.ResourceLogConverge(),
"huaweicloud_lts_log_converge_switch": lts.ResourceLogConvergeSwitch(),
+ "huaweicloud_lts_metric_rule": lts.ResourceMetricRule(),
"huaweicloud_lts_stream": lts.ResourceLTSStream(),
"huaweicloud_lts_structing_template": lts.ResourceStructConfig(),
"huaweicloud_lts_structuring_custom_configuration": lts.ResourceStructCustomConfig(),
diff --git a/huaweicloud/services/acceptance/lts/resource_huaweicloud_lts_metric_rule_test.go b/huaweicloud/services/acceptance/lts/resource_huaweicloud_lts_metric_rule_test.go
new file mode 100644
index 00000000000..64174dc3014
--- /dev/null
+++ b/huaweicloud/services/acceptance/lts/resource_huaweicloud_lts_metric_rule_test.go
@@ -0,0 +1,275 @@
+package lts
+
+import (
+ "fmt"
+ "regexp"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/config"
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/acceptance"
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/lts"
+)
+
+func getMetricRuleResourceFunc(cfg *config.Config, state *terraform.ResourceState) (interface{}, error) {
+ client, err := cfg.NewServiceClient("lts", acceptance.HW_REGION_NAME)
+ if err != nil {
+ return nil, fmt.Errorf("error creating LTS client: %s", err)
+ }
+
+ return lts.GetMetricRuleById(client, state.Primary.ID)
+}
+
+func TestAccMetricRule_basic(t *testing.T) {
+ var (
+ metricRule interface{}
+ rName = acceptance.RandomAccResourceName()
+ updateName = acceptance.RandomAccResourceName()
+ resourceName = "huaweicloud_lts_metric_rule.test"
+ rc = acceptance.InitResourceCheck(
+ resourceName,
+ &metricRule,
+ getMetricRuleResourceFunc)
+ )
+
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() {
+ acceptance.TestAccPreCheck(t)
+ },
+ ProviderFactories: acceptance.TestAccProviderFactories,
+ CheckDestroy: rc.CheckResourceDestroy(),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccMetricRule_basic_step1(rName),
+ Check: resource.ComposeTestCheckFunc(
+ rc.CheckResourceExists(),
+ resource.TestCheckResourceAttr(resourceName, "name", rName),
+ resource.TestCheckResourceAttr(resourceName, "status", "disable"),
+ resource.TestCheckResourceAttrPair(resourceName, "log_group_id", "huaweicloud_lts_group.test.0", "id"),
+ resource.TestCheckResourceAttrPair(resourceName, "log_stream_id", "huaweicloud_lts_stream.test.0", "id"),
+ resource.TestCheckResourceAttr(resourceName, "sampler.0.type", "none"),
+ resource.TestCheckResourceAttr(resourceName, "sampler.0.ratio", "1"),
+ resource.TestCheckResourceAttr(resourceName, "sinks.0.type", "aom"),
+ resource.TestCheckResourceAttr(resourceName, "sinks.0.metric_name", rName),
+ resource.TestCheckResourceAttrPair(resourceName, "sinks.0.name", "huaweicloud_aom_prom_instance.test.0", "prom_name"),
+ resource.TestCheckResourceAttrPair(resourceName, "sinks.0.instance_id", "huaweicloud_aom_prom_instance.test.0", "id"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.type", "countKeyword"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.field", "event_type"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.group_by.0", "project_id"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.keyword", "global"),
+ resource.TestCheckResourceAttr(resourceName, "window_size", "PT5S"),
+ resource.TestCheckResourceAttr(resourceName, "report", "false"),
+ resource.TestMatchResourceAttr(resourceName, "created_at",
+ regexp.MustCompile(`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}?(Z|([+-]\d{2}:\d{2}))$`)),
+ ),
+ },
+ {
+ Config: testAccMetricRule_basic_step2(rName, updateName),
+ Check: resource.ComposeTestCheckFunc(
+ rc.CheckResourceExists(),
+ resource.TestCheckResourceAttr(resourceName, "name", updateName),
+ resource.TestCheckResourceAttr(resourceName, "status", "enable"),
+ resource.TestCheckResourceAttrPair(resourceName, "log_group_id", "huaweicloud_lts_group.test.1", "id"),
+ resource.TestCheckResourceAttrPair(resourceName, "log_stream_id", "huaweicloud_lts_stream.test.1", "id"),
+ resource.TestCheckResourceAttr(resourceName, "sampler.0.type", "random"),
+ resource.TestCheckResourceAttr(resourceName, "sampler.0.ratio", "0.5"),
+ resource.TestCheckResourceAttr(resourceName, "sinks.0.type", "aom"),
+ resource.TestCheckResourceAttr(resourceName, "sinks.0.metric_name", updateName),
+ resource.TestCheckResourceAttrPair(resourceName, "sinks.0.name", "huaweicloud_aom_prom_instance.test.1", "prom_name"),
+ resource.TestCheckResourceAttrPair(resourceName, "sinks.0.instance_id", "huaweicloud_aom_prom_instance.test.1", "id"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.type", "count"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.field", "trace_id"),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.group_by.0", "hostIP"),
+ resource.TestCheckResourceAttr(resourceName, "window_size", "PT1M"),
+ resource.TestCheckResourceAttr(resourceName, "report", "true"),
+ resource.TestCheckResourceAttr(resourceName, "filter.0.type", "and"),
+ resource.TestCheckResourceAttr(resourceName, "filter.0.filters.#", "1"),
+ resource.TestCheckResourceAttr(resourceName, "filter.0.filters.0.type", "and"),
+ resource.TestCheckResourceAttr(resourceName, "filter.0.filters.0.filters.#", "2"),
+ resource.TestCheckResourceAttr(resourceName, "description", "Updated by terrafrom script"),
+ ),
+ },
+ {
+ Config: testAccMetricRule_basic_step3(rName, updateName),
+ Check: resource.ComposeTestCheckFunc(
+ rc.CheckResourceExists(),
+ resource.TestCheckResourceAttr(resourceName, "name", updateName),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.keyword", ""),
+ resource.TestCheckResourceAttr(resourceName, "aggregator.0.group_by.#", "0"),
+ resource.TestCheckResourceAttr(resourceName, "filter.0.filters.0.filters.#", "0"),
+ resource.TestCheckResourceAttr(resourceName, "description", ""),
+ ),
+ },
+ {
+ ResourceName: resourceName,
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ },
+ })
+}
+
+func testAccMetricRule_basic_base(rName string) string {
+ return fmt.Sprintf(`
+resource "huaweicloud_aom_prom_instance" "test" {
+ count = 2
+ prom_name = "%[1]s_${count.index}"
+ prom_type = "REMOTE_WRITE"
+}
+
+resource "huaweicloud_lts_group" "test" {
+ count = 2
+ group_name = "%[1]s_${count.index}"
+ ttl_in_days = 30
+}
+
+resource "huaweicloud_lts_stream" "test" {
+ count = 2
+ group_id = huaweicloud_lts_group.test[count.index].id
+ stream_name = "%[1]s_${count.index}"
+}
+
+resource "huaweicloud_lts_structing_template" "test" {
+ count = 2
+
+ log_group_id = huaweicloud_lts_group.test[count.index].id
+ log_stream_id = huaweicloud_lts_stream.test[count.index].id
+ template_name = "CTS"
+ template_type = "built_in"
+}
+`, rName)
+}
+
+func testAccMetricRule_basic_step1(rName string) string {
+ return fmt.Sprintf(`
+%[1]s
+
+resource "huaweicloud_lts_metric_rule" "test" {
+ name = "%[2]s"
+ status = "disable"
+ log_group_id = huaweicloud_lts_group.test[0].id
+ log_stream_id = huaweicloud_lts_stream.test[0].id
+
+ sampler {
+ type = "none"
+ ratio = "1"
+ }
+
+ sinks {
+ type = "aom"
+ metric_name = "%[2]s"
+ name = huaweicloud_aom_prom_instance.test[0].prom_name
+ instance_id = huaweicloud_aom_prom_instance.test[0].id
+ }
+
+ aggregator {
+ type = "countKeyword"
+ field = "event_type"
+ group_by = ["project_id"]
+ keyword = "global"
+ }
+
+ window_size = "PT5S"
+ report = false
+
+ filter {}
+
+ description = "Created by terrafrom script"
+}
+`, testAccMetricRule_basic_base(rName), rName)
+}
+
+func testAccMetricRule_basic_step2(rName, updateName string) string {
+ return fmt.Sprintf(`
+%[1]s
+
+resource "huaweicloud_lts_metric_rule" "test" {
+ name = "%[2]s"
+ status = "enable"
+ log_group_id = huaweicloud_lts_group.test[1].id
+ log_stream_id = huaweicloud_lts_stream.test[1].id
+
+ sampler {
+ type = "random"
+ ratio = "0.5"
+ }
+
+ sinks {
+ type = "aom"
+ metric_name = "%[2]s"
+ name = huaweicloud_aom_prom_instance.test[1].prom_name
+ instance_id = huaweicloud_aom_prom_instance.test[1].id
+ }
+
+ aggregator {
+ type = "count"
+ field = "trace_id"
+ group_by = ["hostIP"]
+ }
+
+ window_size = "PT1M"
+ report = true
+
+ filter {
+ type = "and"
+
+ filters {
+ type = "and"
+
+ filters {
+ type = "gt"
+ key = "code"
+ value = "200"
+ }
+ filters {
+ type = "fieldExist"
+ key = "event_type"
+ }
+ }
+ }
+
+ description = "Updated by terrafrom script"
+}
+`, testAccMetricRule_basic_base(rName), updateName)
+}
+
+func testAccMetricRule_basic_step3(rName, updateName string) string {
+ return fmt.Sprintf(`
+%[1]s
+
+resource "huaweicloud_lts_metric_rule" "test" {
+ name = "%[2]s"
+ status = "enable"
+ log_group_id = huaweicloud_lts_group.test[1].id
+ log_stream_id = huaweicloud_lts_stream.test[1].id
+
+ sampler {
+ type = "random"
+ ratio = "0.5"
+ }
+
+ sinks {
+ type = "aom"
+ metric_name = "%[2]s"
+ name = huaweicloud_aom_prom_instance.test[1].prom_name
+ instance_id = huaweicloud_aom_prom_instance.test[1].id
+ }
+
+ aggregator {
+ type = "count"
+ field = "trace_id"
+ }
+
+ window_size = "PT1M"
+ report = true
+
+ filter {
+ type = "and"
+
+ filters {}
+ }
+}
+`, testAccMetricRule_basic_base(rName), updateName)
+}
diff --git a/huaweicloud/services/lts/resource_huaweicloud_lts_metric_rule.go b/huaweicloud/services/lts/resource_huaweicloud_lts_metric_rule.go
new file mode 100644
index 00000000000..744b6063b10
--- /dev/null
+++ b/huaweicloud/services/lts/resource_huaweicloud_lts_metric_rule.go
@@ -0,0 +1,578 @@
+package lts
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
+
+ "github.com/chnsz/golangsdk"
+
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/common"
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/config"
+ "github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/utils"
+)
+
+// @API LTS POST /v2/{project_id}/lts/log2metric/rules
+// @API LTS GET /v2/{project_id}/lts/log2metric/rules/{rule_id}
+// @API LTS PUT /v2/{project_id}/lts/log2metric/rules/{rule_id}
+// @API LTS DELETE /v2/{project_id}/lts/log2metric/rules/{rule_id}
+func ResourceMetricRule() *schema.Resource {
+ return &schema.Resource{
+ CreateContext: resourceMetricRuleCreate,
+ ReadContext: resourceMetricRuleRead,
+ UpdateContext: resourceMetricRuleUpdate,
+ DeleteContext: resourceMetricRuleDelete,
+
+ Importer: &schema.ResourceImporter{
+ StateContext: schema.ImportStatePassthroughContext,
+ },
+
+ Schema: map[string]*schema.Schema{
+ "region": {
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ForceNew: true,
+ },
+ "name": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The name of the log metric rule.`,
+ },
+ "status": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The status of the log metric rule.`,
+ },
+ "log_group_id": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The log group ID to which the log metric rule belongs.`,
+ },
+ "log_stream_id": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The log stream ID to which the log metric rule belongs.`,
+ },
+ "sampler": {
+ Type: schema.TypeList,
+ Required: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The type of the log sampling.`,
+ },
+ "ratio": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The sampling rate of the log.`,
+ },
+ },
+ },
+ Description: `The sampling configuration of the log.`,
+ },
+ "sinks": {
+ Type: schema.TypeSet,
+ Required: true,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The type of the stored object.`,
+ },
+ "metric_name": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The name of the generated log metric.`,
+ },
+ "name": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The name of the AOM Prometheus common instance.`,
+ },
+ "instance_id": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The ID of the AOM Prometheus common instance.`,
+ },
+ },
+ },
+ Description: `The storage location of the generated metrics.`,
+ },
+ "aggregator": {
+ Type: schema.TypeList,
+ Required: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The type of the log statistics.`,
+ },
+ "field": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The field of the log statistics.`,
+ },
+ "group_by": {
+ Type: schema.TypeList,
+ Optional: true,
+ Elem: &schema.Schema{Type: schema.TypeString},
+ Description: `The list of the group fields of the log statistics.`,
+ },
+ "keyword": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The keyword of the log statistics.`,
+ },
+ },
+ },
+ Description: `The configuration of log statistics mode.`,
+ },
+ "window_size": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The interval time for processing data windows.`,
+ },
+ "report": {
+ Type: schema.TypeBool,
+ Optional: true,
+ Description: `Whether to report data to sinks.`,
+ },
+ "filter": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Optional: true,
+ RequiredWith: []string{"filter.0.filters"},
+ Description: `The filter type of the log.`,
+ },
+ "filters": {
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: metricRuleFiltersSchema(),
+ Description: `The list of log filtering rule groups.`,
+ },
+ },
+ },
+ DiffSuppressFunc: func(_, _, _ string, d *schema.ResourceData) bool {
+ oldRaw, newRaw := d.GetChange("filter")
+ // If filter is set to {}, the DiffSuppress function is needed to prevent changes.
+ return len(oldRaw.([]interface{})) == 0 && newRaw.([]interface{})[0] == nil
+ },
+ Description: `The configuration of log filtering rule.`,
+ },
+ "description": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The description of the log metric rule.`,
+ },
+ "created_at": {
+ Type: schema.TypeString,
+ Computed: true,
+ Description: `The creation time of the log metric rule, in RFC3339 format.`,
+ },
+ },
+ }
+}
+
+func metricRuleFiltersSchema() *schema.Resource {
+ return &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The filter type of the log.`,
+ },
+ "filters": {
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "key": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The filter field of the log.`,
+ },
+ "type": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: `The filter conditions of the log.`,
+ },
+ "value": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The value corresponding to the log filter field.`,
+ },
+ "lower": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The minimum value corresponding to the log filter field.`,
+ },
+ "upper": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: `The maximum value corresponding to the log filter field.`,
+ },
+ },
+ },
+ Description: `The list of the log filter rule associations.`,
+ },
+ },
+ }
+}
+
+func buildMetricRuleBodyParams(d *schema.ResourceData, domainId, projectId string) map[string]interface{} {
+ return map[string]interface{}{
+ "domain_id": domainId,
+ "project_id": projectId,
+ "name": d.Get("name"),
+ "status": d.Get("status"),
+ "log_group_id": d.Get("log_group_id"),
+ "log_stream_id": d.Get("log_stream_id"),
+ "sampler": buildMetricRuleSampler(d.Get("sampler.0")),
+ "report": d.Get("report"),
+ "sinks": buildMetricRuleSinks(d.Get("sinks").(*schema.Set).List()),
+ "aggregator": buildMetricRuleAggregator(d.Get("aggregator.0")),
+ "window_size": d.Get("window_size"),
+ "filter": buildMetricRuleFilter(d.Get("filter").([]interface{})),
+ "description": d.Get("description"),
+ }
+}
+
+func buildMetricRuleSampler(sampler interface{}) map[string]interface{} {
+ return map[string]interface{}{
+ "type": utils.PathSearch("type", sampler, nil),
+ "ratio": utils.PathSearch("ratio", sampler, nil),
+ }
+}
+
+func buildMetricRuleSinks(sinks []interface{}) []map[string]interface{} {
+ rest := make([]map[string]interface{}, len(sinks))
+ for i, item := range sinks {
+ rest[i] = map[string]interface{}{
+ "type": utils.PathSearch("type", item, nil),
+ "metric_name": utils.PathSearch("metric_name", item, nil),
+ "name": utils.PathSearch("name", item, nil),
+ "instance": utils.PathSearch("instance_id", item, nil),
+ }
+ }
+ return rest
+}
+
+func buildMetricRuleAggregator(aggregator interface{}) map[string]interface{} {
+ params := map[string]interface{}{
+ "type": utils.PathSearch("type", aggregator, nil),
+ "field": utils.PathSearch("field", aggregator, nil),
+ "group_by": utils.ValueIgnoreEmpty(utils.PathSearch("group_by", aggregator, nil)),
+ "keyword": utils.ValueIgnoreEmpty(utils.PathSearch("keyword", aggregator, nil)),
+ }
+ return utils.RemoveNil(params)
+}
+
+func buildMetricRuleFilter(filter []interface{}) map[string]interface{} {
+ if len(filter) == 0 || filter[0] == nil {
+ return nil
+ }
+ return map[string]interface{}{
+ "type": utils.PathSearch("type", filter[0], nil),
+ "filters": buildMetricRuleFilterRules(utils.PathSearch("filters", filter[0], schema.NewSet(schema.HashString, nil)).(*schema.Set)),
+ }
+}
+
+func buildMetricRuleFilterRules(filterRules *schema.Set) []map[string]interface{} {
+ rst := make([]map[string]interface{}, 0)
+ for _, v := range filterRules.List() {
+ filters := utils.PathSearch("filters", v, schema.NewSet(schema.HashString, nil)).(*schema.Set)
+ if filters.Len() > 0 {
+ rst = append(rst, map[string]interface{}{
+ "type": utils.PathSearch("type", v, nil),
+ "filters": buildMetricRuleFilters(utils.PathSearch("filters", v, schema.NewSet(schema.HashString, nil)).(*schema.Set)),
+ })
+ }
+ }
+ return rst
+}
+
+func buildMetricRuleFilters(filters *schema.Set) []map[string]interface{} {
+ if filters.Len() == 0 {
+ return nil
+ }
+
+ rst := make([]map[string]interface{}, filters.Len())
+ for i, v := range filters.List() {
+ params := map[string]interface{}{
+ "key": utils.PathSearch("key", v, nil),
+ "type": utils.PathSearch("type", v, nil),
+ "value": utils.ValueIgnoreEmpty(utils.PathSearch("value", v, nil)),
+ "lower": utils.ValueIgnoreEmpty(utils.PathSearch("lower", v, nil)),
+ "upper": utils.ValueIgnoreEmpty(utils.PathSearch("upper", v, nil)),
+ }
+ rst[i] = utils.RemoveNil(params)
+ }
+ return rst
+}
+
+func resourceMetricRuleCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ var (
+ cfg = meta.(*config.Config)
+ region = cfg.GetRegion(d)
+ httpUrl = "v2/{project_id}/lts/log2metric/rules"
+ )
+
+ client, err := cfg.NewServiceClient("lts", region)
+ if err != nil {
+ return diag.Errorf("error creating LTS client: %s", err)
+ }
+ projectId := client.ProjectID
+ createPath := client.Endpoint + httpUrl
+ createPath = strings.ReplaceAll(createPath, "{project_id}", projectId)
+
+ createOpts := golangsdk.RequestOpts{
+ KeepResponseBody: true,
+ JSONBody: buildMetricRuleBodyParams(d, cfg.DomainID, projectId),
+ }
+
+ requestResp, err := client.Request("POST", createPath, &createOpts)
+ if err != nil {
+ return diag.Errorf("error creating log metric rule: %s", err)
+ }
+
+ respBody, err := utils.FlattenResponse(requestResp)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ ruleId := utils.PathSearch("rule_id", respBody, "").(string)
+ if ruleId == "" {
+ return diag.Errorf("unable to find the log metric rule ID from the API response")
+ }
+
+ d.SetId(ruleId)
+
+ return resourceMetricRuleRead(ctx, d, meta)
+}
+
+// GetMetricRuleById is a method used to get metric rule detail by rule ID.
+func GetMetricRuleById(client *golangsdk.ServiceClient, ruleId string) (interface{}, error) {
+ httpUrl := "v2/{project_id}/lts/log2metric/rules/{rule_id}"
+ getPath := client.Endpoint + httpUrl
+ getPath = strings.ReplaceAll(getPath, "{project_id}", client.ProjectID)
+ getPath = strings.ReplaceAll(getPath, "{rule_id}", ruleId)
+
+ getOpt := golangsdk.RequestOpts{
+ KeepResponseBody: true,
+ MoreHeaders: map[string]string{"Content-Type": "application/json"},
+ }
+
+ requestResp, err := client.Request("GET", getPath, &getOpt)
+ if err != nil {
+ return nil, err
+ }
+
+ return utils.FlattenResponse(requestResp)
+}
+
+func resourceMetricRuleRead(_ context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ var (
+ cfg = meta.(*config.Config)
+ region = cfg.GetRegion(d)
+ ruleId = d.Id()
+ )
+
+ client, err := cfg.NewServiceClient("lts", region)
+ if err != nil {
+ return diag.Errorf("error creating LTS client: %s", err)
+ }
+
+ respBody, err := GetMetricRuleById(client, ruleId)
+ if err != nil {
+ return common.CheckDeletedDiag(d, err, fmt.Sprintf("error retrieving log metric rule (%s)", ruleId))
+ }
+
+ mErr := multierror.Append(nil,
+ d.Set("region", region),
+ d.Set("name", utils.PathSearch("name", respBody, nil)),
+ d.Set("status", utils.PathSearch("status", respBody, nil)),
+ d.Set("log_group_id", utils.PathSearch("log_group_id", respBody, nil)),
+ d.Set("log_stream_id", utils.PathSearch("log_stream_id", respBody, nil)),
+ d.Set("sampler", flattenMetricRuleSampler(utils.PathSearch("sampler", respBody, nil))),
+ d.Set("report", utils.PathSearch("report", respBody, nil)),
+ d.Set("sinks", flattenMetricRuleSinks(utils.PathSearch("sinks", respBody, make([]interface{}, 0)).([]interface{}))),
+ d.Set("aggregator", flattenMetricRuleAggregator(utils.PathSearch("aggregator", respBody, nil))),
+ d.Set("window_size", utils.PathSearch("window_size", respBody, nil)),
+ d.Set("filter", flattenMetricRuleFilter(utils.PathSearch("filter", respBody, nil))),
+ d.Set("description", utils.PathSearch("description", respBody, nil)),
+ d.Set("created_at", utils.FormatTimeStampRFC3339(int64(utils.PathSearch("create_time", respBody, 0).(float64))/1000, false)),
+ )
+
+ return diag.FromErr(mErr.ErrorOrNil())
+}
+
+func flattenMetricRuleSampler(sampler interface{}) []map[string]interface{} {
+ if sampler == nil {
+ return nil
+ }
+
+ return []map[string]interface{}{
+ {
+ "type": utils.PathSearch("type", sampler, nil),
+ "ratio": utils.PathSearch("ratio", sampler, nil),
+ },
+ }
+}
+
+func flattenMetricRuleSinks(sinks []interface{}) []map[string]interface{} {
+ if len(sinks) == 0 {
+ return nil
+ }
+
+ rest := make([]map[string]interface{}, len(sinks))
+ for i, item := range sinks {
+ rest[i] = map[string]interface{}{
+ "type": utils.PathSearch("type", item, nil),
+ "metric_name": utils.PathSearch("metric_name", item, nil),
+ "name": utils.PathSearch("name", item, nil),
+ "instance_id": utils.PathSearch("instance", item, nil),
+ }
+ }
+ return rest
+}
+
+func flattenMetricRuleAggregator(aggregator interface{}) []map[string]interface{} {
+ if aggregator == nil {
+ return nil
+ }
+
+ return []map[string]interface{}{
+ {
+ "type": utils.PathSearch("type", aggregator, nil),
+ "field": utils.PathSearch("field", aggregator, nil),
+ "group_by": utils.PathSearch("group_by", aggregator, nil),
+ "keyword": utils.PathSearch("keyword", aggregator, nil),
+ },
+ }
+}
+
+func flattenMetricRuleFilter(filter interface{}) []map[string]interface{} {
+ if filter == nil {
+ return nil
+ }
+
+ return []map[string]interface{}{
+ {
+ "type": utils.PathSearch("type", filter, nil),
+ "filters": flattenmetricRuleFliter(utils.PathSearch("filters", filter, make([]interface{}, 0)).([]interface{})),
+ },
+ }
+}
+
+func flattenmetricRuleFliter(filters []interface{}) []map[string]interface{} {
+ if len(filters) == 0 {
+ // The outermost layer is an object, and the `filter.filters` parameter and sub parameters are optional. so a default value
+ // needs to be set to prevent change.
+ return []map[string]interface{}{
+ {
+ "type": nil,
+ "filters": nil,
+ },
+ }
+ }
+
+ rest := make([]map[string]interface{}, len(filters))
+ for i, v := range filters {
+ rest[i] = map[string]interface{}{
+ "type": utils.PathSearch("type", v, nil),
+ "filters": flattenmetricRuleAssociatedFliter(utils.PathSearch("filters", v, make([]interface{}, 0)).([]interface{})),
+ }
+ }
+ return rest
+}
+
+func flattenmetricRuleAssociatedFliter(associatedFilters []interface{}) []map[string]interface{} {
+ if len(associatedFilters) == 0 {
+ return nil
+ }
+ rest := make([]map[string]interface{}, len(associatedFilters))
+ for i, v := range associatedFilters {
+ rest[i] = map[string]interface{}{
+ "key": utils.PathSearch("key", v, nil),
+ "type": utils.PathSearch("type", v, nil),
+ "value": utils.PathSearch("value", v, nil),
+ "lower": utils.PathSearch("lower", v, nil),
+ "upper": utils.PathSearch("upper", v, nil),
+ }
+ }
+ return rest
+}
+
+func resourceMetricRuleUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ var (
+ cfg = meta.(*config.Config)
+ region = cfg.GetRegion(d)
+ httpUrl = "v2/{project_id}/lts/log2metric/rules/{rule_id}"
+ ruleId = d.Id()
+ )
+
+ client, err := cfg.NewServiceClient("lts", region)
+ if err != nil {
+ return diag.Errorf("error creating LTS client: %s", err)
+ }
+
+ projectId := client.ProjectID
+ updatePath := client.Endpoint + httpUrl
+ updatePath = strings.ReplaceAll(updatePath, "{project_id}", client.ProjectID)
+ updatePath = strings.ReplaceAll(updatePath, "{rule_id}", ruleId)
+
+ updateOpt := golangsdk.RequestOpts{
+ KeepResponseBody: true,
+ JSONBody: buildMetricRuleBodyParams(d, cfg.DomainID, projectId),
+ }
+
+ _, err = client.Request("PUT", updatePath, &updateOpt)
+ if err != nil {
+ return diag.Errorf("error updating log metric rule (%s): %s", ruleId, err)
+ }
+
+ return resourceMetricRuleRead(ctx, d, meta)
+}
+
+func resourceMetricRuleDelete(_ context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ var (
+ cfg = meta.(*config.Config)
+ region = cfg.GetRegion(d)
+ httpUrl = "v2/{project_id}/lts/log2metric/rules/{rule_id}"
+ ruleId = d.Id()
+ )
+
+ client, err := cfg.NewServiceClient("lts", region)
+ if err != nil {
+ return diag.Errorf("error creating LTS client: %s", err)
+ }
+ deletePath := client.Endpoint + httpUrl
+ deletePath = strings.ReplaceAll(deletePath, "{project_id}", client.ProjectID)
+ deletePath = strings.ReplaceAll(deletePath, "{rule_id}", ruleId)
+
+ deleteOpts := golangsdk.RequestOpts{
+ KeepResponseBody: true,
+ MoreHeaders: map[string]string{"Content-Type": "application/json"},
+ }
+ _, err = client.Request("DELETE", deletePath, &deleteOpts)
+ if err != nil {
+ return common.CheckDeletedDiag(d, err, fmt.Sprintf("error deleting log metric rule (%s)", ruleId))
+ }
+ return nil
+}