diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md
index 8f9dbb46c3..67a266ffbb 100644
--- a/MIGRATION_GUIDE.md
+++ b/MIGRATION_GUIDE.md
@@ -30,13 +30,16 @@ New fields:
- `change_tracking`
- `is_recursive`
- `is_temporary`
+ - `data_metric_schedule`
+ - `data_metric_function`
- added `show_output` field that holds the response from SHOW VIEWS.
- added `describe_output` field that holds the response from DESCRIBE VIEW. Note that one needs to grant sufficient privileges e.g. with [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on the tables used in this view. Otherwise, this field is not filled.
#### *(breaking change)* Removed fields from snowflake_view resource
Removed fields:
-- `tag`
-The value of this field will be removed from the state automatically. Please, use [tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association) instead.
+- `or_replace` - `OR REPLACE` is added by the provider automatically when `copy_grants` is set to `"true"`
+- `tag` - Please, use [tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association) instead.
+The value of these field will be removed from the state automatically.
#### *(breaking change)* Required warehouse
For this resource, the provider now uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration.
diff --git a/docs/resources/view.md b/docs/resources/view.md
index a53ef624b5..e3f4c3dc52 100644
--- a/docs/resources/view.md
+++ b/docs/resources/view.md
@@ -38,7 +38,7 @@ resource "snowflake_view" "view" {
select * from foo;
SQL
}
-# resource with attached policies
+# resource with attached policies and data metric functions
resource "snowflake_view" "test" {
database = "database"
schema = "schema"
@@ -55,8 +55,15 @@ resource "snowflake_view" "test" {
policy_name = "aggregation_policy"
entity_key = ["id"]
}
+ data_metric_function {
+ function_name = "data_metric_function"
+ on = ["id"]
+ }
+ data_metric_schedule {
+ using_cron = "15 * * * * UTC"
+ }
statement = <<-SQL
- select id from foo;
+ SELECT id FROM TABLE;
SQL
}
```
@@ -78,11 +85,12 @@ SQL
- `aggregation_policy` (Block List, Max: 1) Specifies the aggregation policy to set on a view. (see [below for nested schema](#nestedblock--aggregation_policy))
- `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- `comment` (String) Specifies a comment for the view.
-- `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. OR REPLACE must be set when COPY GRANTS is set.
+- `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause.
+- `data_metric_function` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_function))
+- `data_metric_schedule` (Block List, Max: 1) Specifies the schedule to run the data metric functions periodically. (see [below for nested schema](#nestedblock--data_metric_schedule))
- `is_recursive` (String) Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- `is_secure` (String) Specifies that the view is secure. By design, the Snowflake's `SHOW VIEWS` command does not provide information about secure views (consult [view usage notes](https://docs.snowflake.com/en/sql-reference/sql/create-view#usage-notes)) which is essential to manage/import view with Terraform. Use the role owning the view while managing secure views. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- `is_temporary` (String) Specifies that the view persists only for the duration of the session that you created it in. A temporary view and all its contents are dropped at the end of the session. In context of this provider, it means that it's dropped after a Terraform operation. This results in a permanent plan with object creation. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
-- `or_replace` (Boolean) Overwrites the View if it exists.
- `row_access_policy` (Block List, Max: 1) Specifies the row access policy to set on a view. (see [below for nested schema](#nestedblock--row_access_policy))
### Read-Only
@@ -104,6 +112,24 @@ Optional:
- `entity_key` (Set of String) Defines which columns uniquely identify an entity within the view.
+
+### Nested Schema for `data_metric_function`
+
+Required:
+
+- `function_name` (String) Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis.
+- `on` (Set of String) The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.
+
+
+
+### Nested Schema for `data_metric_schedule`
+
+Optional:
+
+- `minutes` (Number) Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: `5` | `15` | `30` | `60` | `720` | `1440`. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.
+- `using_cron` (String) Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`.
+
+
### Nested Schema for `row_access_policy`
@@ -156,6 +182,5 @@ Read-Only:
Import is supported using the following syntax:
```shell
-# format is database name | schema name | view name
-terraform import snowflake_view.example 'dbName|schemaName|viewName'
+terraform import snowflake_view.example '""."".""'
```
diff --git a/examples/resources/snowflake_view/import.sh b/examples/resources/snowflake_view/import.sh
index e6f3ed83d3..a5ddab454c 100644
--- a/examples/resources/snowflake_view/import.sh
+++ b/examples/resources/snowflake_view/import.sh
@@ -1,2 +1 @@
-# format is database name | schema name | view name
-terraform import snowflake_view.example 'dbName|schemaName|viewName'
+terraform import snowflake_view.example '""."".""'
diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf
index d4506dfcf7..de20fb54cb 100644
--- a/examples/resources/snowflake_view/resource.tf
+++ b/examples/resources/snowflake_view/resource.tf
@@ -18,7 +18,7 @@ resource "snowflake_view" "view" {
select * from foo;
SQL
}
-# resource with attached policies
+# resource with attached policies and data metric functions
resource "snowflake_view" "test" {
database = "database"
schema = "schema"
@@ -35,7 +35,14 @@ resource "snowflake_view" "test" {
policy_name = "aggregation_policy"
entity_key = ["id"]
}
+ data_metric_function {
+ function_name = "data_metric_function"
+ on = ["id"]
+ }
+ data_metric_schedule {
+ using_cron = "15 * * * * UTC"
+ }
statement = <<-SQL
- select id from foo;
+ SELECT id FROM TABLE;
SQL
}
diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go
index ebac0c6239..089d8fded2 100644
--- a/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go
+++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go
@@ -57,8 +57,8 @@ func (v *ViewResourceAssert) HasCopyGrantsString(expected string) *ViewResourceA
return v
}
-func (v *ViewResourceAssert) HasDataMetricFunctionsString(expected string) *ViewResourceAssert {
- v.AddAssertion(assert.ValueSet("data_metric_functions", expected))
+func (v *ViewResourceAssert) HasDataMetricFunctionString(expected string) *ViewResourceAssert {
+ v.AddAssertion(assert.ValueSet("data_metric_function", expected))
return v
}
@@ -141,8 +141,8 @@ func (v *ViewResourceAssert) HasNoCopyGrants() *ViewResourceAssert {
return v
}
-func (v *ViewResourceAssert) HasNoDataMetricFunctions() *ViewResourceAssert {
- v.AddAssertion(assert.ValueNotSet("data_metric_functions"))
+func (v *ViewResourceAssert) HasNoDataMetricFunction() *ViewResourceAssert {
+ v.AddAssertion(assert.ValueNotSet("data_metric_function"))
return v
}
diff --git a/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go
index 8c1a2e0ff6..62e2d6cadb 100644
--- a/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go
+++ b/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go
@@ -15,7 +15,7 @@ type ViewModel struct {
Columns tfconfig.Variable `json:"columns,omitempty"`
Comment tfconfig.Variable `json:"comment,omitempty"`
CopyGrants tfconfig.Variable `json:"copy_grants,omitempty"`
- DataMetricFunctions tfconfig.Variable `json:"data_metric_functions,omitempty"`
+ DataMetricFunctions tfconfig.Variable `json:"data_metric_function,omitempty"`
DataMetricSchedule tfconfig.Variable `json:"data_metric_schedule,omitempty"`
Database tfconfig.Variable `json:"database,omitempty"`
IsRecursive tfconfig.Variable `json:"is_recursive,omitempty"`
@@ -80,7 +80,7 @@ func (v *ViewModel) WithCopyGrants(copyGrants bool) *ViewModel {
return v
}
-// data_metric_functions attribute type is not yet supported, so WithDataMetricFunctions can't be generated
+// data_metric_function attribute type is not yet supported, so WithDataMetricFunctions can't be generated
// data_metric_schedule attribute type is not yet supported, so WithDataMetricSchedule can't be generated
diff --git a/pkg/acceptance/helpers/data_metric_function_references_client.go b/pkg/acceptance/helpers/data_metric_function_references_client.go
index dcdbaacd6b..66914c4a0a 100644
--- a/pkg/acceptance/helpers/data_metric_function_references_client.go
+++ b/pkg/acceptance/helpers/data_metric_function_references_client.go
@@ -2,10 +2,10 @@ package helpers
import (
"context"
- "fmt"
"testing"
"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
+ "github.com/stretchr/testify/require"
)
type DataMetricFunctionReferencesClient struct {
@@ -19,29 +19,12 @@ func NewDataMetricFunctionReferencesClient(context *TestClientContext) *DataMetr
}
// GetDataMetricFunctionReferences is based on https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references.
-func (c *DataMetricFunctionReferencesClient) GetDataMetricFunctionReferences(t *testing.T, id sdk.SchemaObjectIdentifier, objectType sdk.ObjectType) ([]DataMetricFunctionReference, error) {
+func (c *DataMetricFunctionReferencesClient) GetDataMetricFunctionReferences(t *testing.T, id sdk.SchemaObjectIdentifier, domain sdk.DataMetricFuncionRefEntityDomainOption) []sdk.DataMetricFunctionReference {
t.Helper()
ctx := context.Background()
- s := []DataMetricFunctionReference{}
- dmfReferencesId := sdk.NewSchemaObjectIdentifier(id.DatabaseName(), "INFORMATION_SCHEMA", "DATA_METRIC_FUNCTION_REFERENCES")
- err := c.context.client.QueryForTests(ctx, &s, fmt.Sprintf(`SELECT * FROM TABLE(%s(REF_ENTITY_NAME => '%s', REF_ENTITY_DOMAIN => '%v'))`, dmfReferencesId.FullyQualifiedName(), id.FullyQualifiedName(), objectType))
+ refs, err := c.context.client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(id, domain))
+ require.NoError(t, err)
- return s, err
-}
-
-type DataMetricFunctionReference struct {
- MetricDatabaseName string `db:"METRIC_DATABASE_NAME"`
- MetricSchemaName string `db:"METRIC_SCHEMA_NAME"`
- MetricName string `db:"METRIC_NAME"`
- MetricSignature string `db:"METRIC_SIGNATURE"`
- MetricDataType string `db:"METRIC_DATA_TYPE"`
- RefEntityDatabaseName string `db:"REF_ENTITY_DATABASE_NAME"`
- RefEntitySchemaName string `db:"REF_ENTITY_SCHEMA_NAME"`
- RefEntityName string `db:"REF_ENTITY_NAME"`
- RefEntityDomain string `db:"REF_ENTITY_DOMAIN"`
- RefArguments string `db:"REF_ARGUMENTS"`
- RefId string `db:"REF_ID"`
- Schedule string `db:"SCHEDULE"`
- ScheduleStatus string `db:"SCHEDULE_STATUS"`
+ return refs
}
diff --git a/pkg/acceptance/importchecks/import_checks.go b/pkg/acceptance/importchecks/import_checks.go
index e71c6c86bf..2a1a57d27b 100644
--- a/pkg/acceptance/importchecks/import_checks.go
+++ b/pkg/acceptance/importchecks/import_checks.go
@@ -46,7 +46,7 @@ func TestCheckResourceAttrInstanceState(id string, attributeName, attributeValue
if attrVal, ok := v.Attributes[attributeName]; ok {
if attrVal != attributeValue {
- return fmt.Errorf("expected: %s, got: %s", attributeValue, attrVal)
+ return fmt.Errorf("invalid value for attribute %s - expected: %s, got: %s", attributeName, attributeValue, attrVal)
}
return nil
diff --git a/pkg/datasources/views_acceptance_test.go b/pkg/datasources/views_acceptance_test.go
index ef5069cb31..7edce8f234 100644
--- a/pkg/datasources/views_acceptance_test.go
+++ b/pkg/datasources/views_acceptance_test.go
@@ -41,17 +41,11 @@ func TestAcc_Views(t *testing.T) {
func views(viewId sdk.SchemaObjectIdentifier) string {
return fmt.Sprintf(`
- resource "snowflake_unsafe_execute" "use_warehouse" {
- execute = "USE WAREHOUSE \"%v\""
- revert = "SELECT 1"
- }
-
resource snowflake_view "v"{
name = "%v"
schema = "%v"
database = "%v"
statement = "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'"
- depends_on = [snowflake_unsafe_execute.use_warehouse]
}
data snowflake_views "v" {
@@ -59,5 +53,5 @@ func views(viewId sdk.SchemaObjectIdentifier) string {
schema = snowflake_view.v.schema
depends_on = [snowflake_view.v]
}
- `, acc.TestWarehouseName, viewId.Name(), viewId.SchemaName(), viewId.DatabaseName())
+ `, viewId.Name(), viewId.SchemaName(), viewId.DatabaseName())
}
diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go
index a7e8a278f1..9fe20cf817 100644
--- a/pkg/resources/doc_helpers.go
+++ b/pkg/resources/doc_helpers.go
@@ -5,10 +5,10 @@ import (
"strings"
)
-func possibleValuesListed[T ~string](values []T) string {
+func possibleValuesListed[T ~string | ~int](values []T) string {
valuesWrapped := make([]string, len(values))
for i, value := range values {
- valuesWrapped[i] = fmt.Sprintf("`%s`", value)
+ valuesWrapped[i] = fmt.Sprintf("`%v`", value)
}
return strings.Join(valuesWrapped, " | ")
}
diff --git a/pkg/resources/doc_helpers_test.go b/pkg/resources/doc_helpers_test.go
index 60842d565b..81ecbccd8f 100644
--- a/pkg/resources/doc_helpers_test.go
+++ b/pkg/resources/doc_helpers_test.go
@@ -6,7 +6,7 @@ import (
"github.com/stretchr/testify/assert"
)
-func Test_PossibleValuesListed(t *testing.T) {
+func Test_PossibleValuesListedStrings(t *testing.T) {
values := []string{"abc", "DEF"}
result := possibleValuesListed(values)
@@ -14,6 +14,14 @@ func Test_PossibleValuesListed(t *testing.T) {
assert.Equal(t, "`abc` | `DEF`", result)
}
+func Test_PossibleValuesListedInts(t *testing.T) {
+ values := []int{42, 21}
+
+ result := possibleValuesListed(values)
+
+ assert.Equal(t, "`42` | `21`", result)
+}
+
func Test_PossibleValuesListed_empty(t *testing.T) {
var values []string
diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf
index 2bd1ef8145..e403c93692 100644
--- a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf
+++ b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf
@@ -11,6 +11,13 @@ resource "snowflake_view" "test" {
policy_name = var.aggregation_policy
entity_key = var.aggregation_policy_entity_key
}
+ data_metric_function {
+ function_name = var.data_metric_function
+ on = var.data_metric_function_on
+ }
+ data_metric_schedule {
+ using_cron = var.data_metric_schedule_using_cron
+ }
statement = var.statement
comment = var.comment
}
diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf
index 42cc6286e8..e2da9f2f40 100644
--- a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf
+++ b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf
@@ -33,3 +33,15 @@ variable "aggregation_policy_entity_key" {
variable "comment" {
type = string
}
+
+variable "data_metric_schedule_using_cron" {
+ type = string
+}
+
+variable "data_metric_function" {
+ type = string
+}
+
+variable "data_metric_function_on" {
+ type = list(string)
+}
diff --git a/pkg/resources/testdata/TestAcc_View/complete/test.tf b/pkg/resources/testdata/TestAcc_View/complete/test.tf
index 45a4a42eb0..6e4c53c023 100644
--- a/pkg/resources/testdata/TestAcc_View/complete/test.tf
+++ b/pkg/resources/testdata/TestAcc_View/complete/test.tf
@@ -4,24 +4,23 @@ resource "snowflake_view" "test" {
database = var.database
schema = var.schema
is_secure = var.is_secure
- or_replace = var.or_replace
copy_grants = var.copy_grants
change_tracking = var.change_tracking
is_temporary = var.is_temporary
+ data_metric_function {
+ function_name = var.data_metric_function
+ on = var.data_metric_function_on
+ }
+ data_metric_schedule {
+ using_cron = var.data_metric_schedule_using_cron
+ }
row_access_policy {
policy_name = var.row_access_policy
on = var.row_access_policy_on
-
}
aggregation_policy {
policy_name = var.aggregation_policy
entity_key = var.aggregation_policy_entity_key
}
- statement = var.statement
- depends_on = [snowflake_unsafe_execute.use_warehouse]
-}
-
-resource "snowflake_unsafe_execute" "use_warehouse" {
- execute = "USE WAREHOUSE \"${var.warehouse}\""
- revert = "SELECT 1"
+ statement = var.statement
}
diff --git a/pkg/resources/testdata/TestAcc_View/complete/variables.tf b/pkg/resources/testdata/TestAcc_View/complete/variables.tf
index 4423777db3..4cdf99c64b 100644
--- a/pkg/resources/testdata/TestAcc_View/complete/variables.tf
+++ b/pkg/resources/testdata/TestAcc_View/complete/variables.tf
@@ -22,10 +22,6 @@ variable "change_tracking" {
type = string
}
-variable "or_replace" {
- type = bool
-}
-
variable "copy_grants" {
type = bool
}
@@ -57,3 +53,15 @@ variable "statement" {
variable "warehouse" {
type = string
}
+
+variable "data_metric_schedule_using_cron" {
+ type = string
+}
+
+variable "data_metric_function" {
+ type = string
+}
+
+variable "data_metric_function_on" {
+ type = list(string)
+}
diff --git a/pkg/resources/user_password_policy_attachment.go b/pkg/resources/user_password_policy_attachment.go
index 12882103a3..5ec96deebb 100644
--- a/pkg/resources/user_password_policy_attachment.go
+++ b/pkg/resources/user_password_policy_attachment.go
@@ -80,7 +80,7 @@ func ReadUserPasswordPolicyAttachment(d *schema.ResourceData, meta any) error {
passwordPolicyReferences := make([]sdk.PolicyReference, 0)
for _, policyReference := range policyReferences {
- if policyReference.PolicyKind == "PASSWORD_POLICY" {
+ if policyReference.PolicyKind == sdk.PolicyKindPasswordPolicy {
passwordPolicyReferences = append(passwordPolicyReferences, policyReference)
}
}
diff --git a/pkg/resources/validators.go b/pkg/resources/validators.go
index e43003b90c..b4bac596fd 100644
--- a/pkg/resources/validators.go
+++ b/pkg/resources/validators.go
@@ -174,6 +174,24 @@ func StringInSlice(valid []string, ignoreCase bool) schema.SchemaValidateDiagFun
}
}
+// IntInSlice has the same implementation as validation.StringInSlice, but adapted to schema.SchemaValidateDiagFunc
+func IntInSlice(valid []int) schema.SchemaValidateDiagFunc {
+ return func(i interface{}, path cty.Path) diag.Diagnostics {
+ v, ok := i.(int)
+ if !ok {
+ return diag.Errorf("expected type of %v to be integer", path)
+ }
+
+ for _, validInt := range valid {
+ if v == validInt {
+ return nil
+ }
+ }
+
+ return diag.Errorf("expected %v to be one of %q, got %d", path, valid, v)
+ }
+}
+
func sdkValidation[T any](normalize func(string) (T, error)) schema.SchemaValidateDiagFunc {
return func(val interface{}, _ cty.Path) diag.Diagnostics {
_, err := normalize(val.(string))
diff --git a/pkg/resources/view.go b/pkg/resources/view.go
index c61f823b1b..e9e929f2da 100644
--- a/pkg/resources/view.go
+++ b/pkg/resources/view.go
@@ -41,22 +41,14 @@ var viewSchema = map[string]*schema.Schema{
ForceNew: true,
DiffSuppressFunc: suppressIdentifierQuoting,
},
- "or_replace": {
- Type: schema.TypeBool,
- Optional: true,
- Default: false,
- Description: "Overwrites the View if it exists.",
- },
- // TODO [SNOW-1348118: this is used only during or_replace, we would like to change the behavior before v1
"copy_grants": {
Type: schema.TypeBool,
Optional: true,
Default: false,
- Description: "Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. OR REPLACE must be set when COPY GRANTS is set.",
+ Description: "Retains the access permissions from the original view when a new view is created using the OR REPLACE clause.",
DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool {
return oldValue != "" && oldValue != newValue
},
- RequiredWith: []string{"or_replace"},
},
"is_secure": {
Type: schema.TypeString,
@@ -69,7 +61,6 @@ var viewSchema = map[string]*schema.Schema{
"is_temporary": {
Type: schema.TypeString,
Optional: true,
- ForceNew: true,
Default: BooleanDefault,
ValidateDiagFunc: validateBooleanString,
Description: booleanStringFieldDescription("Specifies that the view persists only for the duration of the session that you created it in. A temporary view and all its contents are dropped at the end of the session. In context of this provider, it means that it's dropped after a Terraform operation. This results in a permanent plan with object creation."),
@@ -77,7 +68,6 @@ var viewSchema = map[string]*schema.Schema{
"is_recursive": {
Type: schema.TypeString,
Optional: true,
- ForceNew: true,
Default: BooleanDefault,
ValidateDiagFunc: validateBooleanString,
Description: booleanStringFieldDescription("Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression)."),
@@ -92,59 +82,69 @@ var viewSchema = map[string]*schema.Schema{
}),
Description: booleanStringFieldDescription("Specifies to enable or disable change tracking on the table."),
},
- // TODO(next pr): support remaining fields
- // "data_metric_functions": {
- // Type: schema.TypeSet,
- // Optional: true,
- // Elem: &schema.Resource{
- // Schema: map[string]*schema.Schema{
- // "metric_name": {
- // Type: schema.TypeString,
- // Optional: true,
- // Description: "Identifier of the data metric function to add to the table or view or drop from the table or view.",
- // },
- // "column_name": {
- // Type: schema.TypeString,
- // Optional: true,
- // Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.",
- // },
- // },
- // },
- // Description: "Data metric functions used for the view.",
- // },
- // "data_metric_schedule": {
- // Type: schema.TypeList,
- // Optional: true,
- // MaxItems: 1,
- // Elem: &schema.Resource{
- // Schema: map[string]*schema.Schema{
- // "minutes": {
- // Type: schema.TypeInt,
- // Optional: true,
- // Description: "Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron` and `trigger_on_changes`.",
- // // TODO: move to sdk
- // ValidateFunc: validation.IntInSlice([]int{5, 15, 30, 60, 720, 1440}),
- // ConflictsWith: []string{"data_metric_schedule.using_cron", "data_metric_schedule.trigger_on_changes"},
- // },
- // "using_cron": {
- // Type: schema.TypeString,
- // Optional: true,
- // Description: "Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes` and `trigger_on_changes`.",
- // // TODO: validate?
- // ConflictsWith: []string{"data_metric_schedule.minutes", "data_metric_schedule.trigger_on_changes"},
- // },
- // "trigger_on_changes": {
- // Type: schema.TypeString,
- // Optional: true,
- // Default: BooleanDefault,
- // Description: booleanStringFieldDescription("Specifies that the DMF runs when a DML operation modifies the table, such as inserting a new row or deleting a row. Conflicts with `minutes` and `using_cron`."),
- // ConflictsWith: []string{"data_metric_schedule.minutes", "data_metric_schedule.using_cron"},
- // },
- // },
- // },
- // Description: "Specifies the schedule to run the data metric function periodically.",
- // },
- // "columns": {
+ "data_metric_function": {
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "function_name": {
+ Type: schema.TypeString,
+ Required: true,
+ Description: "Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis.",
+ DiffSuppressFunc: suppressIdentifierQuoting,
+ },
+ "on": {
+ Type: schema.TypeSet,
+ Required: true,
+ Elem: &schema.Schema{
+ Type: schema.TypeString,
+ },
+ Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.",
+ },
+ // TODO (SNOW-1348118 - next pr)
+ // "schedule_status": {
+ // Type: schema.TypeString,
+ // Optional: true,
+ // ValidateDiagFunc: sdkValidation(sdk.ToAllowedDataMetricScheduleStatusOption),
+ // Description: fmt.Sprintf("The status of the metrics association. Valid values are: %v. When status of a data metric function is changed, it is being reassigned with `DROP DATA METRIC FUNCTION` and `ADD DATA METRIC FUNCTION`, and then its status is changed by `MODIFY DATA METRIC FUNCTION` ", possibleValuesListed(sdk.AllAllowedDataMetricScheduleStatusOptions)),
+ // DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToAllowedDataMetricScheduleStatusOption), func(_, oldValue, newValue string, _ *schema.ResourceData) bool {
+ // if newValue == "" {
+ // return true
+ // }
+ // return false
+ // }),
+ // },
+ },
+ },
+ Description: "Data metric functions used for the view.",
+ RequiredWith: []string{"data_metric_schedule"},
+ },
+ "data_metric_schedule": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "minutes": {
+ Type: schema.TypeInt,
+ Optional: true,
+ Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListed(sdk.AllViewDataMetricScheduleMinutes)),
+ ValidateDiagFunc: IntInSlice(sdk.AllViewDataMetricScheduleMinutes),
+ ConflictsWith: []string{"data_metric_schedule.using_cron"},
+ },
+ "using_cron": {
+ Type: schema.TypeString,
+ Optional: true,
+ Description: "Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`.",
+ ConflictsWith: []string{"data_metric_schedule.minutes"},
+ },
+ },
+ },
+ Description: "Specifies the schedule to run the data metric functions periodically.",
+ RequiredWith: []string{"data_metric_function"},
+ },
+ // TODO (SNOW-1348118 - next pr): add columns
+ // "column": {
// Type: schema.TypeList,
// Optional: true,
// Elem: &schema.Resource{
@@ -159,7 +159,6 @@ var viewSchema = map[string]*schema.Schema{
// Optional: true,
// Elem: &schema.Resource{
// Schema: map[string]*schema.Schema{
- // // TODO: change to `name`? in other policies as well
// "policy_name": {
// Type: schema.TypeString,
// Required: true,
@@ -182,11 +181,11 @@ var viewSchema = map[string]*schema.Schema{
// DiffSuppressFunc: DiffSuppressStatement,
// Description: "Specifies the projection policy to set on a column.",
// },
- // "comment": {
- // Type: schema.TypeString,
- // Optional: true,
- // Description: "Specifies a comment for the column.",
- // },
+ // "comment": {
+ // Type: schema.TypeString,
+ // Optional: true,
+ // Description: "Specifies a comment for the column.",
+ // },
// },
// },
// Description: "If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.)",
@@ -304,16 +303,24 @@ func View() *schema.Resource {
func ImportView(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) {
log.Printf("[DEBUG] Starting view import")
client := meta.(*provider.Context).Client
- id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier)
+ id, err := sdk.ParseSchemaObjectIdentifier(d.Id())
+ if err != nil {
+ return nil, err
+ }
v, err := client.Views.ShowByID(ctx, id)
if err != nil {
return nil, err
}
- if err := d.Set("name", v.Name); err != nil {
+ if err := d.Set("name", id.Name()); err != nil {
+ return nil, err
+ }
+ if err := d.Set("database", id.DatabaseName()); err != nil {
+ return nil, err
+ }
+ if err := d.Set("schema", id.SchemaName()); err != nil {
return nil, err
}
-
if err := d.Set("change_tracking", booleanStringFromBool(v.IsChangeTracking())); err != nil {
return nil, err
}
@@ -340,13 +347,12 @@ func CreateView(orReplace bool) schema.CreateContextFunc {
statement := d.Get("statement").(string)
req := sdk.NewCreateViewRequest(id, statement)
- // TODO(next pr): remove or_replace field
- if v := d.Get("or_replace"); v.(bool) || orReplace {
+ if orReplace {
req.WithOrReplace(true)
}
if v := d.Get("copy_grants"); v.(bool) {
- req.WithCopyGrants(true)
+ req.WithCopyGrants(true).WithOrReplace(true)
}
if v := d.Get("is_secure").(string); v != BooleanDefault {
@@ -378,11 +384,18 @@ func CreateView(orReplace bool) schema.CreateContextFunc {
}
if v := d.Get("row_access_policy"); len(v.([]any)) > 0 {
- req.WithRowAccessPolicy(*sdk.NewViewRowAccessPolicyRequest(extractPolicyWithColumns(v, "on")))
+ id, columns, err := extractPolicyWithColumns(v, "on")
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ req.WithRowAccessPolicy(*sdk.NewViewRowAccessPolicyRequest(id, columns))
}
if v := d.Get("aggregation_policy"); len(v.([]any)) > 0 {
- id, columns := extractPolicyWithColumns(v, "entity_key")
+ id, columns, err := extractPolicyWithColumns(v, "entity_key")
+ if err != nil {
+ return diag.FromErr(err)
+ }
aggregationPolicyReq := sdk.NewViewAggregationPolicyRequest(id)
if len(columns) > 0 {
aggregationPolicyReq.WithEntityKey(columns)
@@ -395,7 +408,7 @@ func CreateView(orReplace bool) schema.CreateContextFunc {
return diag.FromErr(fmt.Errorf("error creating view %v err = %w", id.Name(), err))
}
- d.SetId(helpers.EncodeSnowflakeID(id))
+ d.SetId(helpers.EncodeResourceIdentifier(id))
if v := d.Get("change_tracking").(string); v != BooleanDefault {
parsed, err := booleanStringToBool(v)
@@ -409,24 +422,93 @@ func CreateView(orReplace bool) schema.CreateContextFunc {
}
}
+ if v := d.Get("data_metric_schedule"); len(v.([]any)) > 0 {
+ var req *sdk.ViewSetDataMetricScheduleRequest
+ dmsConfig := v.([]any)[0].(map[string]any)
+ if v, ok := dmsConfig["minutes"]; ok && v.(int) > 0 {
+ req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("%d MINUTE", v.(int)))
+ } else if v, ok := dmsConfig["using_cron"]; ok {
+ req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", v.(string)))
+ }
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*req))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error setting data matric schedule in view %v err = %w", id.Name(), err))
+ }
+ }
+
+ if v, ok := d.GetOk("data_metric_function"); ok {
+ addedRaw, err := extractDataMetricFunctions(v.(*schema.Set).List())
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ added := make([]sdk.ViewDataMetricFunction, len(addedRaw))
+ for i := range addedRaw {
+ added[i] = sdk.ViewDataMetricFunction{
+ DataMetricFunction: addedRaw[i].DataMetricFunction,
+ On: addedRaw[i].On,
+ }
+ }
+ err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added)))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err))
+ }
+ // TODO (SNOW-1348118 - next pr)
+ // changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw))
+ // for i := range addedRaw {
+ // if addedRaw[i].ScheduleStatus != "" {
+ // expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus)
+ // if err != nil {
+ // return diag.FromErr(err)
+ // }
+ // var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption
+ // switch expectedStatus {
+ // case sdk.DataMetricScheduleStatusStarted:
+ // statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume
+ // case sdk.DataMetricScheduleStatusSuspended:
+ // statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend
+ // default:
+ // return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus))
+ // }
+ // changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{
+ // DataMetricFunction: addedRaw[i].DataMetricFunction,
+ // On: addedRaw[i].On,
+ // ViewDataMetricScheduleStatusOperationOption: statusCmd,
+ // })
+ // }
+ // }
+ // if len(changeSchedule) > 0 {
+ // err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule)))
+ // if err != nil {
+ // return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err))
+ // }
+ // }
+ }
+
return ReadView(false)(ctx, d, meta)
}
}
-func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column) {
+func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column, error) {
policyConfig := v.([]any)[0].(map[string]any)
+ id, err := sdk.ParseSchemaObjectIdentifier(policyConfig["policy_name"].(string))
+ if err != nil {
+ return sdk.SchemaObjectIdentifier{}, nil, err
+ }
columnsRaw := expandStringList(policyConfig[columnsKey].(*schema.Set).List())
columns := make([]sdk.Column, len(columnsRaw))
for i := range columnsRaw {
columns[i] = sdk.Column{Value: columnsRaw[i]}
}
- return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(policyConfig["policy_name"].(string)), columns
+ return id, columns, nil
}
func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc {
return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
client := meta.(*provider.Context).Client
- id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier)
+ id, err := sdk.ParseSchemaObjectIdentifier(d.Id())
+ if err != nil {
+ return diag.FromErr(err)
+ }
view, err := client.Views.ShowByID(ctx, id)
if err != nil {
@@ -443,18 +525,9 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc {
return diag.FromErr(err)
}
- if err = d.Set("name", view.Name); err != nil {
- return diag.FromErr(err)
- }
if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil {
return diag.FromErr(err)
}
- if err = d.Set("database", view.DatabaseName); err != nil {
- return diag.FromErr(err)
- }
- if err = d.Set("schema", view.SchemaName); err != nil {
- return diag.FromErr(err)
- }
if err = d.Set("copy_grants", view.HasCopyGrants()); err != nil {
return diag.FromErr(err)
}
@@ -490,6 +563,10 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc {
if err != nil {
return diag.FromErr(err)
}
+ err = handleDataMetricFunctions(ctx, client, id, d)
+ if err != nil {
+ return diag.FromErr(err)
+ }
if view.Text != "" {
// Want to only capture the SELECT part of the query because before that is the CREATE part of the view.
extractor := snowflake.NewViewSelectStatementExtractor(view.Text)
@@ -530,7 +607,7 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche
for _, p := range policyRefs {
policyName := sdk.NewSchemaObjectIdentifier(*p.PolicyDb, *p.PolicySchema, p.PolicyName)
switch p.PolicyKind {
- case string(sdk.PolicyKindAggregationPolicy):
+ case sdk.PolicyKindAggregationPolicy:
var entityKey []string
if p.RefArgColumnNames != nil {
entityKey = sdk.ParseCommaSeparatedStringArray(*p.RefArgColumnNames, true)
@@ -539,7 +616,7 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche
"policy_name": policyName.FullyQualifiedName(),
"entity_key": entityKey,
})
- case string(sdk.PolicyKindRowAccessPolicy):
+ case sdk.PolicyKindRowAccessPolicy:
var on []string
if p.RefArgColumnNames != nil {
on = sdk.ParseCommaSeparatedStringArray(*p.RefArgColumnNames, true)
@@ -561,12 +638,100 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche
return err
}
+func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.SchemaObjectIdentifier, d *schema.ResourceData) error {
+ dataMetricFunctionReferences, err := client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(id, sdk.DataMetricFuncionRefEntityDomainView))
+ if err != nil {
+ return err
+ }
+ if len(dataMetricFunctionReferences) == 0 {
+ return d.Set("data_metric_schedule", nil)
+ }
+ dataMetricFunctions := make([]map[string]any, len(dataMetricFunctionReferences))
+ var schedule string
+ for i, dmfRef := range dataMetricFunctionReferences {
+ dmfName := sdk.NewSchemaObjectIdentifier(dmfRef.MetricDatabaseName, dmfRef.MetricSchemaName, dmfRef.MetricName)
+ var columns []string
+ for _, v := range dmfRef.RefArguments {
+ columns = append(columns, v.Name)
+ }
+ // TODO (SNOW-1348118 - next pr)
+ // var scheduleStatus sdk.DataMetricScheduleStatusOption
+ // status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus)
+ // if err != nil {
+ // return err
+ // }
+ // if slices.Contains(sdk.AllDataMetricScheduleStatusStartedOptions, status) {
+ // scheduleStatus = sdk.DataMetricScheduleStatusStarted
+ // }
+ // if slices.Contains(sdk.AllDataMetricScheduleStatusSuspendedOptions, status) {
+ // scheduleStatus = sdk.DataMetricScheduleStatusSuspended
+ // }
+ dataMetricFunctions[i] = map[string]any{
+ "function_name": dmfName.FullyQualifiedName(),
+ "on": columns,
+ // "schedule_status": string(scheduleStatus),
+ }
+ schedule = dmfRef.Schedule
+ }
+ if err = d.Set("data_metric_function", dataMetricFunctions); err != nil {
+ return err
+ }
+
+ return d.Set("data_metric_schedule", []map[string]any{
+ {
+ "using_cron": schedule,
+ },
+ })
+}
+
+type ViewDataMetricFunctionConfig struct {
+ DataMetricFunction sdk.SchemaObjectIdentifier
+ On []sdk.Column
+ ScheduleStatus string
+}
+
+func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionConfig, err error) {
+ for _, v := range v.([]any) {
+ config := v.(map[string]any)
+ columnsRaw := expandStringList(config["on"].(*schema.Set).List())
+ columns := make([]sdk.Column, len(columnsRaw))
+ for i := range columnsRaw {
+ columns[i] = sdk.Column{Value: columnsRaw[i]}
+ }
+ id, err := sdk.ParseSchemaObjectIdentifier(config["function_name"].(string))
+ if err != nil {
+ return nil, err
+ }
+ dmfs = append(dmfs, ViewDataMetricFunctionConfig{
+ DataMetricFunction: id,
+ On: columns,
+ // TODO (SNOW-1348118 - next pr)
+ // ScheduleStatus: config["schedule_status"].(string),
+ })
+ }
+ return
+}
+
+func changedKeys(d *schema.ResourceData, keys []string) []string {
+ changed := make([]string, 0, len(keys))
+ for _, key := range keys {
+ if d.HasChange(key) {
+ changed = append(changed, key)
+ }
+ }
+ return changed
+}
+
func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
client := meta.(*provider.Context).Client
- id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier)
+ id, err := sdk.ParseSchemaObjectIdentifier(d.Id())
+ if err != nil {
+ return diag.FromErr(err)
+ }
// change on these fields can not be ForceNew because then view is dropped explicitly and copying grants does not have effect
if d.HasChange("statement") || d.HasChange("is_temporary") || d.HasChange("is_recursive") || d.HasChange("copy_grant") {
+ log.Printf("[DEBUG] Detected change on %q, recreating...", changedKeys(d, []string{"statement", "is_temporary", "is_recursive", "copy_grant"}))
return CreateView(true)(ctx, d, meta)
}
@@ -578,7 +743,7 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag
return diag.FromErr(fmt.Errorf("error renaming view %v err = %w", d.Id(), err))
}
- d.SetId(helpers.EncodeSnowflakeID(newId))
+ d.SetId(helpers.EncodeResourceIdentifier(newId))
id = newId
}
@@ -631,17 +796,83 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag
}
}
+ if d.HasChange("data_metric_schedule") {
+ if v := d.Get("data_metric_schedule"); len(v.([]any)) > 0 {
+ var req *sdk.ViewSetDataMetricScheduleRequest
+ dmsConfig := v.([]any)[0].(map[string]any)
+ if v := dmsConfig["minutes"]; v.(int) > 0 {
+ req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("%d MINUTE", v.(int)))
+ } else if v, ok := dmsConfig["using_cron"]; ok {
+ req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", v.(string)))
+ }
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*req))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error setting data matric schedule in view %v err = %w", id.Name(), err))
+ }
+ } else {
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithUnsetDataMetricSchedule(*sdk.NewViewUnsetDataMetricScheduleRequest()))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error unsetting data matric schedule in view %v err = %w", id.Name(), err))
+ }
+ }
+ }
+
+ if d.HasChange("data_metric_function") {
+ old, new := d.GetChange("data_metric_function")
+ removedRaw, addedRaw := old.(*schema.Set).List(), new.(*schema.Set).List()
+ addedConfig, err := extractDataMetricFunctions(addedRaw)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ removedConfig, err := extractDataMetricFunctions(removedRaw)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ if len(removedConfig) > 0 {
+ removed := make([]sdk.ViewDataMetricFunction, len(removedConfig))
+ for i := range removedConfig {
+ removed[i] = sdk.ViewDataMetricFunction{
+ DataMetricFunction: removedConfig[i].DataMetricFunction,
+ On: removedConfig[i].On,
+ }
+ }
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithDropDataMetricFunction(*sdk.NewViewDropDataMetricFunctionRequest(removed)))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err))
+ }
+ }
+ if len(addedConfig) > 0 {
+ added := make([]sdk.ViewDataMetricFunction, len(addedConfig))
+ for i := range addedConfig {
+ added[i] = sdk.ViewDataMetricFunction{
+ DataMetricFunction: addedConfig[i].DataMetricFunction,
+ On: addedConfig[i].On,
+ }
+ }
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added)))
+ if err != nil {
+ return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err))
+ }
+ }
+ }
+
if d.HasChange("row_access_policy") {
var addReq *sdk.ViewAddRowAccessPolicyRequest
var dropReq *sdk.ViewDropRowAccessPolicyRequest
oldRaw, newRaw := d.GetChange("row_access_policy")
if len(oldRaw.([]any)) > 0 {
- oldId, _ := extractPolicyWithColumns(oldRaw, "on")
+ oldId, _, err := extractPolicyWithColumns(oldRaw, "on")
+ if err != nil {
+ return diag.FromErr(err)
+ }
dropReq = sdk.NewViewDropRowAccessPolicyRequest(oldId)
}
if len(newRaw.([]any)) > 0 {
- newId, newColumns := extractPolicyWithColumns(newRaw, "on")
+ newId, newColumns, err := extractPolicyWithColumns(newRaw, "on")
+ if err != nil {
+ return diag.FromErr(err)
+ }
addReq = sdk.NewViewAddRowAccessPolicyRequest(newId, newColumns)
}
req := sdk.NewAlterViewRequest(id)
@@ -659,12 +890,15 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag
}
if d.HasChange("aggregation_policy") {
if v, ok := d.GetOk("aggregation_policy"); ok {
- newId, newColumns := extractPolicyWithColumns(v, "entity_key")
+ newId, newColumns, err := extractPolicyWithColumns(v, "entity_key")
+ if err != nil {
+ return diag.FromErr(err)
+ }
aggregationPolicyReq := sdk.NewViewSetAggregationPolicyRequest(newId)
if len(newColumns) > 0 {
aggregationPolicyReq.WithEntityKey(newColumns)
}
- err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*aggregationPolicyReq.WithForce(true)))
+ err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*aggregationPolicyReq.WithForce(true)))
if err != nil {
return diag.FromErr(fmt.Errorf("error setting aggregation policy for view %v: %w", d.Id(), err))
}
@@ -680,10 +914,14 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag
}
func DeleteView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
- id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier)
+ id, err := sdk.ParseSchemaObjectIdentifier(d.Id())
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
client := meta.(*provider.Context).Client
- err := client.Views.Drop(ctx, sdk.NewDropViewRequest(id).WithIfExists(true))
+ err = client.Views.Drop(ctx, sdk.NewDropViewRequest(id).WithIfExists(true))
if err != nil {
return diag.Diagnostics{
diag.Diagnostic{
diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go
index be3b810735..8c5cd24590 100644
--- a/pkg/resources/view_acceptance_test.go
+++ b/pkg/resources/view_acceptance_test.go
@@ -25,44 +25,58 @@ import (
)
// TODO(SNOW-1423486): Fix using warehouse in all tests and remove unsetting testenvs.ConfigureClientOnce
-// TODO(next pr): cleanup setting warehouse with unsafe_execute
func TestAcc_View_basic(t *testing.T) {
t.Setenv(string(testenvs.ConfigureClientOnce), "")
_ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance)
acc.TestAccPreCheck(t)
- rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeVARCHAR)
+ rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber)
t.Cleanup(rowAccessPolicyCleanup)
aggregationPolicy, aggregationPolicyCleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t)
t.Cleanup(aggregationPolicyCleanup)
- rowAccessPolicy2, rowAccessPolicy2Cleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeVARCHAR)
+ rowAccessPolicy2, rowAccessPolicy2Cleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber)
t.Cleanup(rowAccessPolicy2Cleanup)
aggregationPolicy2, aggregationPolicy2Cleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t)
t.Cleanup(aggregationPolicy2Cleanup)
+ functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG")
+ function2Id := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "MAX")
+
+ cron, cron2 := "10 * * * * UTC", "20 * * * * UTC"
+
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
- statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- otherStatement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'"
+ resourceId := helpers.EncodeResourceIdentifier(id)
+ table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{
+ *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber),
+ *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber),
+ })
+ t.Cleanup(tableCleanup)
+ statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName())
+ otherStatement := fmt.Sprintf("SELECT foo, id FROM %s", table.ID().FullyQualifiedName())
comment := "Terraform test resource'"
viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
- viewModelWithDependency := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModelWithDependency := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
// generators currently don't handle lists, so use the old way
- basicUpdate := func(rap, ap sdk.SchemaObjectIdentifier, statement string) config.Variables {
+ basicUpdate := func(rap, ap, functionId sdk.SchemaObjectIdentifier, statement, cron string, scheduleStatus sdk.DataMetricScheduleStatusOption) config.Variables {
return config.Variables{
- "name": config.StringVariable(id.Name()),
- "database": config.StringVariable(id.DatabaseName()),
- "schema": config.StringVariable(id.SchemaName()),
- "statement": config.StringVariable(statement),
- "row_access_policy": config.StringVariable(rap.FullyQualifiedName()),
- "row_access_policy_on": config.ListVariable(config.StringVariable("ROLE_NAME")),
- "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()),
- "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ROLE_NAME")),
- "comment": config.StringVariable(comment),
+ "name": config.StringVariable(id.Name()),
+ "database": config.StringVariable(id.DatabaseName()),
+ "schema": config.StringVariable(id.SchemaName()),
+ "statement": config.StringVariable(statement),
+ "row_access_policy": config.StringVariable(rap.FullyQualifiedName()),
+ "row_access_policy_on": config.ListVariable(config.StringVariable("ID")),
+ "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()),
+ "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")),
+ "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()),
+ "data_metric_function_on": config.ListVariable(config.StringVariable("ID")),
+ "data_metric_function_schedule_status": config.StringVariable(string(scheduleStatus)),
+ "data_metric_schedule_using_cron": config.StringVariable(cron),
+ "comment": config.StringVariable(comment),
}
}
@@ -75,7 +89,7 @@ func TestAcc_View_basic(t *testing.T) {
Steps: []resource.TestStep{
// without optionals
{
- Config: accconfig.FromModel(t, viewModelWithDependency) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModelWithDependency),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(statement).
@@ -87,18 +101,25 @@ func TestAcc_View_basic(t *testing.T) {
Config: accconfig.FromModel(t, viewModel),
ResourceName: "snowflake_view.test",
ImportState: true,
- ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())),
- resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)).
+ ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())),
+ resourceassert.ImportedViewResource(t, resourceId).
HasNameString(id.Name()).
HasDatabaseString(id.DatabaseName()).
HasSchemaString(id.SchemaName()).
HasStatementString(statement)),
},
- // set policies externally
+ // set policies and dmfs externally
{
PreConfig: func() {
- acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddRowAccessPolicy(*sdk.NewViewAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []sdk.Column{{Value: "ROLE_NAME"}})))
+ acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddRowAccessPolicy(*sdk.NewViewAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []sdk.Column{{Value: "ID"}})))
acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*sdk.NewViewSetAggregationPolicyRequest(aggregationPolicy)))
+ acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", cron))))
+ acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest([]sdk.ViewDataMetricFunction{
+ {
+ DataMetricFunction: functionId,
+ On: []sdk.Column{{Value: "ID"}},
+ },
+ })))
},
Config: accconfig.FromModel(t, viewModel),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
@@ -108,12 +129,14 @@ func TestAcc_View_basic(t *testing.T) {
HasSchemaString(id.SchemaName()),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "0")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "0")),
),
},
// set other fields
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, statement),
+ ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, statement, cron, sdk.DataMetricScheduleStatusStarted),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
plancheck.ExpectResourceAction("snowflake_view.test", plancheck.ResourceActionUpdate),
@@ -128,17 +151,52 @@ func TestAcc_View_basic(t *testing.T) {
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
+ ),
+ },
+ // change policies and dmfs
+ {
+ ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
+ ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, function2Id, statement, cron2, sdk.DataMetricScheduleStatusStarted),
+ Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
+ HasNameString(id.Name()).
+ HasStatementString(statement).
+ HasDatabaseString(id.DatabaseName()).
+ HasSchemaString(id.SchemaName()).
+ HasCommentString(comment),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
),
},
- // change policies
+ // change dmf status
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, statement),
+ ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, function2Id, statement, cron2, sdk.DataMetricScheduleStatusSuspended),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(statement).
@@ -148,17 +206,25 @@ func TestAcc_View_basic(t *testing.T) {
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
),
},
// change statement and policies
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement),
+ ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(otherStatement).
@@ -168,11 +234,18 @@ func TestAcc_View_basic(t *testing.T) {
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
),
},
// change statements externally
@@ -181,7 +254,7 @@ func TestAcc_View_basic(t *testing.T) {
acc.TestClient().View.RecreateView(t, id, statement)
},
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement),
+ ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(otherStatement).
@@ -191,11 +264,18 @@ func TestAcc_View_basic(t *testing.T) {
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
),
},
// unset policies externally
@@ -205,7 +285,7 @@ func TestAcc_View_basic(t *testing.T) {
acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetAggregationPolicy(*sdk.NewViewUnsetAggregationPolicyRequest()))
},
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement),
+ ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(otherStatement).
@@ -215,22 +295,29 @@ func TestAcc_View_basic(t *testing.T) {
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")),
- assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
),
},
// import - with optionals
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"),
- ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement),
+ ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted),
ResourceName: "snowflake_view.test",
ImportState: true,
- ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())),
- resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)).
+ ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())),
+ resourceassert.ImportedViewResource(t, resourceId).
HasNameString(id.Name()).
HasStatementString(otherStatement).
HasDatabaseString(id.DatabaseName()).
@@ -239,14 +326,14 @@ func TestAcc_View_basic(t *testing.T) {
HasIsSecureString("false").
HasIsTemporaryString("false").
HasChangeTrackingString("false"),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.0", "ROLE_NAME")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.0", "ROLE_NAME")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")),
),
},
// unset
@@ -261,6 +348,8 @@ func TestAcc_View_basic(t *testing.T) {
HasCommentString(""),
assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")),
assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")),
+ assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")),
+ assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")),
),
},
// recreate - change is_recursive
@@ -277,6 +366,8 @@ func TestAcc_View_basic(t *testing.T) {
HasChangeTrackingString("default"),
assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")),
assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")),
+ assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")),
+ assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")),
),
},
},
@@ -289,7 +380,7 @@ func TestAcc_View_recursive(t *testing.T) {
acc.TestAccPreCheck(t)
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
resource.Test(t, resource.TestCase{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
@@ -299,7 +390,7 @@ func TestAcc_View_recursive(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(statement).
@@ -308,11 +399,11 @@ func TestAcc_View_recursive(t *testing.T) {
HasIsRecursiveString("true")),
},
{
- Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")),
ResourceName: "snowflake_view.test",
ImportState: true,
- ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())),
- resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)).
+ ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())),
+ resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)).
HasNameString(id.Name()).
HasDatabaseString(id.DatabaseName()).
HasSchemaString(id.SchemaName()).
@@ -331,7 +422,7 @@ func TestAcc_View_temporary(t *testing.T) {
acc.TestAccPreCheck(t)
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
resource.Test(t, resource.TestCase{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
@@ -340,7 +431,7 @@ func TestAcc_View_temporary(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel.WithIsTemporary("true")) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithIsTemporary("true")),
Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test").
HasNameString(id.Name()).
HasStatementString(statement).
@@ -357,33 +448,50 @@ func TestAcc_View_complete(t *testing.T) {
_ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance)
acc.TestAccPreCheck(t)
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
- // use a simple table to test change_tracking, otherwise it fails with: Change tracking is not supported on queries with joins of type '[LEFT_OUTER_JOIN]'
- table, tableCleanup := acc.TestClient().Table.CreateTable(t)
+ resourceId := helpers.EncodeResourceIdentifier(id)
+ table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{
+ *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber),
+ *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber),
+ })
t.Cleanup(tableCleanup)
- statement := fmt.Sprintf("SELECT id FROM %s", table.ID().FullyQualifiedName())
+ statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName())
rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber)
t.Cleanup(rowAccessPolicyCleanup)
aggregationPolicy, aggregationPolicyCleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t)
t.Cleanup(aggregationPolicyCleanup)
+ projectionPolicy, projectionPolicyCleanup := acc.TestClient().ProjectionPolicy.CreateProjectionPolicy(t)
+ t.Cleanup(projectionPolicyCleanup)
+
+ maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyIdentity(t, sdk.DataTypeNumber)
+ t.Cleanup(maskingPolicyCleanup)
+
+ functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG")
+
m := func() map[string]config.Variable {
return map[string]config.Variable{
- "name": config.StringVariable(id.Name()),
- "database": config.StringVariable(id.DatabaseName()),
- "schema": config.StringVariable(id.SchemaName()),
- "comment": config.StringVariable("Terraform test resource"),
- "is_secure": config.BoolVariable(true),
- "is_temporary": config.BoolVariable(false),
- "or_replace": config.BoolVariable(false),
- "copy_grants": config.BoolVariable(false),
- "change_tracking": config.BoolVariable(true),
- "row_access_policy": config.StringVariable(rowAccessPolicy.ID().FullyQualifiedName()),
- "row_access_policy_on": config.ListVariable(config.StringVariable("ID")),
- "aggregation_policy": config.StringVariable(aggregationPolicy.FullyQualifiedName()),
- "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")),
- "statement": config.StringVariable(statement),
- "warehouse": config.StringVariable(acc.TestWarehouseName),
+ "name": config.StringVariable(id.Name()),
+ "database": config.StringVariable(id.DatabaseName()),
+ "schema": config.StringVariable(id.SchemaName()),
+ "comment": config.StringVariable("Terraform test resource"),
+ "is_secure": config.BoolVariable(true),
+ "is_temporary": config.BoolVariable(false),
+ "copy_grants": config.BoolVariable(false),
+ "change_tracking": config.BoolVariable(true),
+ "row_access_policy": config.StringVariable(rowAccessPolicy.ID().FullyQualifiedName()),
+ "row_access_policy_on": config.ListVariable(config.StringVariable("ID")),
+ "aggregation_policy": config.StringVariable(aggregationPolicy.FullyQualifiedName()),
+ "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")),
+ "statement": config.StringVariable(statement),
+ "warehouse": config.StringVariable(acc.TestWarehouseName),
+ "column_name": config.StringVariable("ID"),
+ "masking_policy": config.StringVariable(maskingPolicy.ID().FullyQualifiedName()),
+ "masking_policy_using": config.ListVariable(config.StringVariable("ID")),
+ "projection_policy": config.StringVariable(projectionPolicy.FullyQualifiedName()),
+ "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()),
+ "data_metric_function_on": config.ListVariable(config.StringVariable("ID")),
+ "data_metric_schedule_using_cron": config.StringVariable("5 * * * * UTC"),
}
}
resource.Test(t, resource.TestCase{
@@ -405,6 +513,13 @@ func TestAcc_View_complete(t *testing.T) {
HasIsSecureString("true").
HasIsTemporaryString("false").
HasChangeTrackingString("true"),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", "5 * * * * UTC")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")),
+ assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")),
@@ -427,8 +542,8 @@ func TestAcc_View_complete(t *testing.T) {
ConfigVariables: m(),
ResourceName: "snowflake_view.test",
ImportState: true,
- ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())),
- resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)).
+ ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())),
+ resourceassert.ImportedViewResource(t, resourceId).
HasNameString(id.Name()).
HasStatementString(statement).
HasDatabaseString(id.DatabaseName()).
@@ -436,14 +551,21 @@ func TestAcc_View_complete(t *testing.T) {
HasCommentString("Terraform test resource").
HasIsSecureString("true").
HasIsTemporaryString("false").HasChangeTrackingString("true"),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.0", "ID")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.#", "1")),
- assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.0", "ID")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.using_cron", "5 * * * * UTC")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.minutes", "0")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.function_name", functionId.FullyQualifiedName())),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.0", "ID")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")),
+ assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")),
),
},
},
@@ -455,7 +577,7 @@ func TestAcc_View_Rename(t *testing.T) {
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
newId := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithComment("foo").WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithComment("foo")
newViewModel := model.View("test", newId.DatabaseName(), newId.Name(), newId.SchemaName(), statement).WithComment("foo")
resource.Test(t, resource.TestCase{
@@ -467,7 +589,7 @@ func TestAcc_View_Rename(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()),
resource.TestCheckResourceAttr("snowflake_view.test", "comment", "foo"),
@@ -497,8 +619,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) {
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithOrReplace(false).WithCopyGrants(false).
- WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(false)
var createdOn string
@@ -511,7 +632,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()),
resource.TestCheckResourceAttr("snowflake_view.test", "database", id.DatabaseName()),
@@ -526,7 +647,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) {
},
// Checks that copy_grants changes don't trigger a drop
{
- Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"),
resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error {
@@ -547,9 +668,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) {
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithOrReplace(true).WithCopyGrants(true).
- WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
-
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(true)
var createdOn string
resource.Test(t, resource.TestCase{
@@ -561,7 +680,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"),
resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"),
@@ -573,7 +692,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) {
),
},
{
- Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false)) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false)),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"),
resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error {
@@ -604,7 +723,7 @@ func TestAcc_ViewCopyGrantsStatementUpdate(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: viewConfigWithGrants(viewId, tableId, `\"name\"`) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: viewConfigWithGrants(viewId, tableId, `\"name\"`),
Check: resource.ComposeAggregateTestCheckFunc(
// there should be more than one privilege, because we applied grant all privileges and initially there's always one which is ownership
resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.#", "2"),
@@ -612,7 +731,7 @@ func TestAcc_ViewCopyGrantsStatementUpdate(t *testing.T) {
),
},
{
- Config: viewConfigWithGrants(viewId, tableId, "*") + useWarehouseConfig(acc.TestWarehouseName),
+ Config: viewConfigWithGrants(viewId, tableId, "*"),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.#", "2"),
resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.1.privilege", "SELECT"),
@@ -626,7 +745,7 @@ func TestAcc_View_copyGrants(t *testing.T) {
t.Setenv(string(testenvs.ConfigureClientOnce), "")
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
resource.Test(t, resource.TestCase{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
PreCheck: func() { acc.TestAccPreCheck(t) },
@@ -636,19 +755,10 @@ func TestAcc_View_copyGrants(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)) + useWarehouseConfig(acc.TestWarehouseName),
- ExpectError: regexp.MustCompile("all of `copy_grants,or_replace` must be specified"),
- },
- {
- Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()),
- ),
- },
- {
- Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()),
+ resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"),
),
},
},
@@ -672,7 +782,7 @@ func TestAcc_View_Issue2640(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.View),
Steps: []resource.TestStep{
{
- Config: viewConfigWithMultilineUnionStatement(id, part1, part2) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: viewConfigWithMultilineUnionStatement(id, part1, part2),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()),
resource.TestCheckResourceAttr("snowflake_view.test", "statement", statement),
@@ -698,8 +808,8 @@ func TestAcc_View_Issue2640(t *testing.T) {
},
ResourceName: "snowflake_view.test",
ImportState: true,
- ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())),
- resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)).
+ ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())),
+ resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)).
HasNameString(id.Name()).
HasStatementString(statement).
HasDatabaseString(id.DatabaseName()).
@@ -717,7 +827,7 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) {
id := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
resourceName := "snowflake_view.test"
statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
- viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"})
+ viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement)
tag, tagCleanup := acc.TestClient().Tag.CreateTag(t)
t.Cleanup(tagCleanup)
@@ -741,36 +851,30 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) {
resource.TestCheckResourceAttr(resourceName, "tag.#", "1"),
resource.TestCheckResourceAttr(resourceName, "tag.0.name", tag.Name),
resource.TestCheckResourceAttr(resourceName, "tag.0.value", "foo"),
+ resource.TestCheckResourceAttr(resourceName, "or_replace", "true"),
),
},
{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
- Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName),
+ Config: accconfig.FromModel(t, viewModel),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "name", id.Name()),
resource.TestCheckNoResourceAttr(resourceName, "tag.#"),
+ resource.TestCheckNoResourceAttr(resourceName, "or_replace"),
),
},
},
})
}
-func useWarehouseConfig(name string) string {
- return fmt.Sprintf(`
-resource "snowflake_unsafe_execute" "use_warehouse" {
- execute = "USE WAREHOUSE \"%s\""
- revert = "SELECT 1"
-}
-`, name)
-}
-
func viewv_0_94_1_WithTags(id sdk.SchemaObjectIdentifier, tagSchema, tagName, tagValue, statement string) string {
s := `
resource "snowflake_view" "test" {
name = "%[1]s"
database = "%[2]s"
- schema = "%[6]s"
+ schema = "%[6]s"
statement = "%[7]s"
+ or_replace = true
tag {
name = "%[4]s"
value = "%[5]s"
@@ -801,10 +905,8 @@ resource "snowflake_view" "test" {
database = "%[1]s"
schema = "%[2]s"
statement = "select %[5]s from \"%[1]s\".\"%[2]s\".\"${snowflake_table.table.name}\""
- or_replace = true
copy_grants = true
is_secure = true
- depends_on = [snowflake_unsafe_execute.use_warehouse, snowflake_table.table]
}
resource "snowflake_account_role" "test" {
@@ -821,7 +923,7 @@ resource "snowflake_grant_privileges_to_account_role" "grant" {
}
data "snowflake_grants" "grants" {
- depends_on = [snowflake_grant_privileges_to_account_role.grant, snowflake_view.test, snowflake_unsafe_execute.use_warehouse]
+ depends_on = [snowflake_grant_privileges_to_account_role.grant, snowflake_view.test]
grants_on {
object_name = "\"%[1]s\".\"%[2]s\".\"${snowflake_view.test.name}\""
object_type = "VIEW"
@@ -842,7 +944,6 @@ resource "snowflake_view" "test" {
%[5]s
SQL
is_secure = true
- depends_on = [snowflake_unsafe_execute.use_warehouse]
}
`, id.DatabaseName(), id.SchemaName(), id.Name(), part1, part2)
}
diff --git a/pkg/resources/view_state_upgraders.go b/pkg/resources/view_state_upgraders.go
index f48b54c568..d8a2366684 100644
--- a/pkg/resources/view_state_upgraders.go
+++ b/pkg/resources/view_state_upgraders.go
@@ -15,6 +15,7 @@ func v0_94_1_ViewStateUpgrader(ctx context.Context, rawState map[string]any, met
}
delete(rawState, "tag")
+ delete(rawState, "or_replace")
- return rawState, nil
+ return migratePipeSeparatedObjectIdentifierResourceIdToFullyQualifiedName(ctx, rawState, meta)
}
diff --git a/pkg/sdk/client.go b/pkg/sdk/client.go
index fe50185a18..c541793ef5 100644
--- a/pkg/sdk/client.go
+++ b/pkg/sdk/client.go
@@ -39,54 +39,55 @@ type Client struct {
ReplicationFunctions ReplicationFunctions
// DDL Commands
- Accounts Accounts
- Alerts Alerts
- ApiIntegrations ApiIntegrations
- ApplicationPackages ApplicationPackages
- ApplicationRoles ApplicationRoles
- Applications Applications
- Comments Comments
- CortexSearchServices CortexSearchServices
- DatabaseRoles DatabaseRoles
- Databases Databases
- DynamicTables DynamicTables
- ExternalFunctions ExternalFunctions
- ExternalTables ExternalTables
- EventTables EventTables
- FailoverGroups FailoverGroups
- FileFormats FileFormats
- Functions Functions
- Grants Grants
- ManagedAccounts ManagedAccounts
- MaskingPolicies MaskingPolicies
- MaterializedViews MaterializedViews
- NetworkPolicies NetworkPolicies
- NetworkRules NetworkRules
- NotificationIntegrations NotificationIntegrations
- Parameters Parameters
- PasswordPolicies PasswordPolicies
- Pipes Pipes
- PolicyReferences PolicyReferences
- Procedures Procedures
- ResourceMonitors ResourceMonitors
- Roles Roles
- RowAccessPolicies RowAccessPolicies
- Schemas Schemas
- SecurityIntegrations SecurityIntegrations
- Sequences Sequences
- SessionPolicies SessionPolicies
- Sessions Sessions
- Shares Shares
- Stages Stages
- StorageIntegrations StorageIntegrations
- Streamlits Streamlits
- Streams Streams
- Tables Tables
- Tags Tags
- Tasks Tasks
- Users Users
- Views Views
- Warehouses Warehouses
+ Accounts Accounts
+ Alerts Alerts
+ ApiIntegrations ApiIntegrations
+ ApplicationPackages ApplicationPackages
+ ApplicationRoles ApplicationRoles
+ Applications Applications
+ Comments Comments
+ CortexSearchServices CortexSearchServices
+ DatabaseRoles DatabaseRoles
+ Databases Databases
+ DataMetricFunctionReferences DataMetricFunctionReferences
+ DynamicTables DynamicTables
+ ExternalFunctions ExternalFunctions
+ ExternalTables ExternalTables
+ EventTables EventTables
+ FailoverGroups FailoverGroups
+ FileFormats FileFormats
+ Functions Functions
+ Grants Grants
+ ManagedAccounts ManagedAccounts
+ MaskingPolicies MaskingPolicies
+ MaterializedViews MaterializedViews
+ NetworkPolicies NetworkPolicies
+ NetworkRules NetworkRules
+ NotificationIntegrations NotificationIntegrations
+ Parameters Parameters
+ PasswordPolicies PasswordPolicies
+ Pipes Pipes
+ PolicyReferences PolicyReferences
+ Procedures Procedures
+ ResourceMonitors ResourceMonitors
+ Roles Roles
+ RowAccessPolicies RowAccessPolicies
+ Schemas Schemas
+ SecurityIntegrations SecurityIntegrations
+ Sequences Sequences
+ SessionPolicies SessionPolicies
+ Sessions Sessions
+ Shares Shares
+ Stages Stages
+ StorageIntegrations StorageIntegrations
+ Streamlits Streamlits
+ Streams Streams
+ Tables Tables
+ Tags Tags
+ Tasks Tasks
+ Users Users
+ Views Views
+ Warehouses Warehouses
}
func (c *Client) GetAccountLocator() string {
@@ -205,6 +206,7 @@ func (c *Client) initialize() {
c.CortexSearchServices = &cortexSearchServices{client: c}
c.DatabaseRoles = &databaseRoles{client: c}
c.Databases = &databases{client: c}
+ c.DataMetricFunctionReferences = &dataMetricFunctionReferences{client: c}
c.DynamicTables = &dynamicTables{client: c}
c.ExternalFunctions = &externalFunctions{client: c}
c.ExternalTables = &externalTables{client: c}
diff --git a/pkg/sdk/data_metric_function_references_def.go b/pkg/sdk/data_metric_function_references_def.go
new file mode 100644
index 0000000000..ea24d761b3
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_def.go
@@ -0,0 +1,134 @@
+package sdk
+
+import (
+ "fmt"
+ "strings"
+
+ g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator"
+)
+
+//go:generate go run ./poc/main.go
+
+type DataMetricFuncionRefEntityDomainOption string
+
+const (
+ DataMetricFuncionRefEntityDomainView DataMetricFuncionRefEntityDomainOption = "VIEW"
+)
+
+type DataMetricScheduleStatusOption string
+
+const (
+ DataMetricScheduleStatusStarted DataMetricScheduleStatusOption = "STARTED"
+ DataMetricScheduleStatusStartedAndPendingScheduleUpdate DataMetricScheduleStatusOption = "STARTED_AND_PENDING_SCHEDULE_UPDATE"
+ DataMetricScheduleStatusSuspended DataMetricScheduleStatusOption = "SUSPENDED"
+ DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_TABLE_DOES_NOT_EXIST_OR_NOT_AUTHORIZED"
+ DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_DATA_METRIC_FUNCTION_DOES_NOT_EXIST_OR_NOT_AUTHORIZED"
+ DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_TABLE_COLUMN_DOES_NOT_EXIST_OR_NOT_AUTHORIZED"
+ DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction DataMetricScheduleStatusOption = "SUSPENDED_INSUFFICIENT_PRIVILEGE_TO_EXECUTE_DATA_METRIC_FUNCTION"
+ DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_ACTIVE_EVENT_TABLE_DOES_NOT_EXIST_OR_NOT_AUTHORIZED"
+ DataMetricScheduleStatusSuspendedByUserAction DataMetricScheduleStatusOption = "SUSPENDED_BY_USER_ACTION"
+)
+
+var AllAllowedDataMetricScheduleStatusOptions = []DataMetricScheduleStatusOption{
+ DataMetricScheduleStatusStarted,
+ DataMetricScheduleStatusSuspended,
+}
+
+var AllDataMetricScheduleStatusStartedOptions = []DataMetricScheduleStatusOption{
+ DataMetricScheduleStatusStarted,
+ DataMetricScheduleStatusStartedAndPendingScheduleUpdate,
+}
+
+var AllDataMetricScheduleStatusSuspendedOptions = []DataMetricScheduleStatusOption{
+ DataMetricScheduleStatusSuspended,
+ DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized,
+ DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized,
+ DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized,
+ DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction,
+ DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized,
+}
+
+func ToAllowedDataMetricScheduleStatusOption(s string) (DataMetricScheduleStatusOption, error) {
+ s = strings.ToUpper(s)
+ switch s {
+ case string(DataMetricScheduleStatusStarted):
+ return DataMetricScheduleStatusStarted, nil
+ case string(DataMetricScheduleStatusSuspended):
+ return DataMetricScheduleStatusSuspended, nil
+ default:
+ return "", fmt.Errorf("invalid DataMetricScheduleStatusOption: %s", s)
+ }
+}
+
+func ToDataMetricScheduleStatusOption(s string) (DataMetricScheduleStatusOption, error) {
+ s = strings.ToUpper(s)
+ switch s {
+ case string(DataMetricScheduleStatusStarted):
+ return DataMetricScheduleStatusStarted, nil
+ case string(DataMetricScheduleStatusStartedAndPendingScheduleUpdate):
+ return DataMetricScheduleStatusStartedAndPendingScheduleUpdate, nil
+ case string(DataMetricScheduleStatusSuspended):
+ return DataMetricScheduleStatusSuspended, nil
+ case string(DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized):
+ return DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized, nil
+ case string(DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized):
+ return DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized, nil
+ case string(DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized):
+ return DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized, nil
+ case string(DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction):
+ return DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction, nil
+ case string(DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized):
+ return DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized, nil
+ case string(DataMetricScheduleStatusSuspendedByUserAction):
+ return DataMetricScheduleStatusSuspendedByUserAction, nil
+ default:
+ return "", fmt.Errorf("invalid DataMetricScheduleStatusOption: %s", s)
+ }
+}
+
+var DataMetricFunctionReferenceDef = g.NewInterface(
+ "DataMetricFunctionReferences",
+ "DataMetricFunctionReference",
+ g.KindOfT[SchemaObjectIdentifier](),
+).CustomOperation(
+ "GetForEntity",
+ "https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references",
+ g.NewQueryStruct("GetForEntity").
+ SQL("SELECT * FROM TABLE(REF_ENTITY_NAME => ").
+ Identifier("refEntityName", g.KindOfT[SchemaObjectIdentifier](), g.IdentifierOptions().Required()).
+ SQL(", ").
+ Assignment(
+ "REF_ENTITY_DOMAIN",
+ g.KindOfT[DataMetricFuncionRefEntityDomainOption](),
+ g.ParameterOptions().SingleQuotes().ArrowEquals().Required(),
+ ).
+ SQL(")"),
+ g.DbStruct("dataMetricFunctionReferencesRow").
+ Text("metric_database_name").
+ Text("metric_schema_name").
+ Text("metric_name").
+ Text("argument_signature").
+ Text("data_type").
+ Text("ref_database_name").
+ Text("ref_schema_name").
+ Text("ref_entity_name").
+ Text("ref_entity_domain").
+ Text("ref_arguments").
+ Text("ref_id").
+ Text("schedule").
+ Text("schedule_status"),
+ g.PlainStruct("DataMetricFunctionReference").
+ Text("MetricDatabaseName").
+ Text("MetricSchemaName").
+ Text("MetricName").
+ Text("ArgumentSignature").
+ Text("DataType").
+ Text("RefDatabaseName").
+ Text("RefSchemaName").
+ Text("RefEntityName").
+ Text("RefEntityDomain").
+ Text("RefArguments").
+ Text("RefId").
+ Text("Schedule").
+ Text("ScheduleStatus"),
+)
diff --git a/pkg/sdk/data_metric_function_references_dto_builders_gen.go b/pkg/sdk/data_metric_function_references_dto_builders_gen.go
new file mode 100644
index 0000000000..a78dd8844a
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_dto_builders_gen.go
@@ -0,0 +1,15 @@
+// Code generated by dto builder generator; DO NOT EDIT.
+
+package sdk
+
+import ()
+
+func NewGetForEntityDataMetricFunctionReferenceRequest(
+ refEntityName ObjectIdentifier,
+ RefEntityDomain DataMetricFuncionRefEntityDomainOption,
+) *GetForEntityDataMetricFunctionReferenceRequest {
+ s := GetForEntityDataMetricFunctionReferenceRequest{}
+ s.refEntityName = refEntityName
+ s.RefEntityDomain = RefEntityDomain
+ return &s
+}
diff --git a/pkg/sdk/data_metric_function_references_dto_gen.go b/pkg/sdk/data_metric_function_references_dto_gen.go
new file mode 100644
index 0000000000..68006a0545
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_dto_gen.go
@@ -0,0 +1,10 @@
+package sdk
+
+//go:generate go run ./dto-builder-generator/main.go
+
+var _ optionsProvider[GetForEntityDataMetricFunctionReferenceOptions] = new(GetForEntityDataMetricFunctionReferenceRequest)
+
+type GetForEntityDataMetricFunctionReferenceRequest struct {
+ refEntityName ObjectIdentifier // required
+ RefEntityDomain DataMetricFuncionRefEntityDomainOption // required
+}
diff --git a/pkg/sdk/data_metric_function_references_gen.go b/pkg/sdk/data_metric_function_references_gen.go
new file mode 100644
index 0000000000..7170ae8f59
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_gen.go
@@ -0,0 +1,85 @@
+package sdk
+
+import (
+ "context"
+ "encoding/json"
+ "log"
+ "strings"
+)
+
+type DataMetricFunctionReferences interface {
+ GetForEntity(ctx context.Context, request *GetForEntityDataMetricFunctionReferenceRequest) ([]DataMetricFunctionReference, error)
+}
+
+// GetForEntityDataMetricFunctionReferenceOptions is based on https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references.
+type GetForEntityDataMetricFunctionReferenceOptions struct {
+ selectEverythingFrom bool `ddl:"static" sql:"SELECT * FROM TABLE"`
+ parameters *dataMetricFunctionReferenceParameters `ddl:"list,parentheses,no_comma"`
+}
+type dataMetricFunctionReferenceParameters struct {
+ functionFullyQualifiedName bool `ddl:"static" sql:"SNOWFLAKE.INFORMATION_SCHEMA.DATA_METRIC_FUNCTION_REFERENCES"`
+ arguments *dataMetricFunctionReferenceFunctionArguments `ddl:"list,parentheses"`
+}
+type dataMetricFunctionReferenceFunctionArguments struct {
+ refEntityName []ObjectIdentifier `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_NAME"`
+ refEntityDomain *DataMetricFuncionRefEntityDomainOption `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_DOMAIN"`
+}
+
+type dataMetricFunctionReferencesRow struct {
+ MetricDatabaseName string `db:"METRIC_DATABASE_NAME"`
+ MetricSchemaName string `db:"METRIC_SCHEMA_NAME"`
+ MetricName string `db:"METRIC_NAME"`
+ ArgumentSignature string `db:"METRIC_SIGNATURE"`
+ DataType string `db:"METRIC_DATA_TYPE"`
+ RefDatabaseName string `db:"REF_ENTITY_DATABASE_NAME"`
+ RefSchemaName string `db:"REF_ENTITY_SCHEMA_NAME"`
+ RefEntityName string `db:"REF_ENTITY_NAME"`
+ RefEntityDomain string `db:"REF_ENTITY_DOMAIN"`
+ RefArguments string `db:"REF_ARGUMENTS"`
+ RefId string `db:"REF_ID"`
+ Schedule string `db:"SCHEDULE"`
+ ScheduleStatus string `db:"SCHEDULE_STATUS"`
+}
+
+type DataMetricFunctionRefArgument struct {
+ Domain string `json:"domain"`
+ Id string `json:"id"`
+ Name string `json:"name"`
+}
+type DataMetricFunctionReference struct {
+ MetricDatabaseName string
+ MetricSchemaName string
+ MetricName string
+ ArgumentSignature string
+ DataType string
+ RefEntityDatabaseName string
+ RefEntitySchemaName string
+ RefEntityName string
+ RefEntityDomain string
+ RefArguments []DataMetricFunctionRefArgument
+ RefId string
+ Schedule string
+ ScheduleStatus string
+}
+
+func (row dataMetricFunctionReferencesRow) convert() *DataMetricFunctionReference {
+ x := &DataMetricFunctionReference{
+ MetricDatabaseName: strings.Trim(row.MetricDatabaseName, `"`),
+ MetricSchemaName: strings.Trim(row.MetricSchemaName, `"`),
+ MetricName: strings.Trim(row.MetricName, `"`),
+ ArgumentSignature: row.ArgumentSignature,
+ DataType: row.DataType,
+ RefEntityDatabaseName: strings.Trim(row.RefDatabaseName, `"`),
+ RefEntitySchemaName: strings.Trim(row.RefSchemaName, `"`),
+ RefEntityName: strings.Trim(row.RefEntityName, `"`),
+ RefEntityDomain: row.RefEntityDomain,
+ RefId: row.RefId,
+ Schedule: row.Schedule,
+ ScheduleStatus: row.ScheduleStatus,
+ }
+ err := json.Unmarshal([]byte(row.RefArguments), &x.RefArguments)
+ if err != nil {
+ log.Println(err)
+ }
+ return x
+}
diff --git a/pkg/sdk/data_metric_function_references_gen_test.go b/pkg/sdk/data_metric_function_references_gen_test.go
new file mode 100644
index 0000000000..db7d737167
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_gen_test.go
@@ -0,0 +1,56 @@
+package sdk
+
+import "testing"
+
+func TestDataMetricFunctionReferences_GetForEntity(t *testing.T) {
+ t.Run("validation: nil options", func(t *testing.T) {
+ var opts *GetForEntityDataMetricFunctionReferenceOptions
+ assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions)
+ })
+
+ t.Run("validation: missing parameters", func(t *testing.T) {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{}
+ assertOptsInvalidJoinedErrors(t, opts, errNotSet("GetForEntityDataMetricFunctionReferenceOptions", "parameters"))
+ })
+
+ t.Run("validation: missing arguments", func(t *testing.T) {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{
+ parameters: &dataMetricFunctionReferenceParameters{},
+ }
+ assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceParameters", "arguments"))
+ })
+
+ t.Run("validation: missing refEntityName", func(t *testing.T) {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{
+ parameters: &dataMetricFunctionReferenceParameters{
+ arguments: &dataMetricFunctionReferenceFunctionArguments{
+ refEntityDomain: Pointer(DataMetricFuncionRefEntityDomainView),
+ },
+ },
+ }
+ assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityName"))
+ })
+
+ t.Run("validation: missing refEntityDomain", func(t *testing.T) {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{
+ parameters: &dataMetricFunctionReferenceParameters{
+ arguments: &dataMetricFunctionReferenceFunctionArguments{
+ refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("a", "b", "c")},
+ },
+ },
+ }
+ assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityDomain"))
+ })
+
+ t.Run("view domain", func(t *testing.T) {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{
+ parameters: &dataMetricFunctionReferenceParameters{
+ arguments: &dataMetricFunctionReferenceFunctionArguments{
+ refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("a", "b", "c")},
+ refEntityDomain: Pointer(DataMetricFuncionRefEntityDomainView),
+ },
+ },
+ }
+ assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.DATA_METRIC_FUNCTION_REFERENCES (REF_ENTITY_NAME => '\"a\".\"b\".\"c\"', REF_ENTITY_DOMAIN => 'VIEW'))`)
+ })
+}
diff --git a/pkg/sdk/data_metric_function_references_impl_gen.go b/pkg/sdk/data_metric_function_references_impl_gen.go
new file mode 100644
index 0000000000..c44e74eac5
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_impl_gen.go
@@ -0,0 +1,33 @@
+package sdk
+
+import (
+ "context"
+)
+
+var _ DataMetricFunctionReferences = (*dataMetricFunctionReferences)(nil)
+
+type dataMetricFunctionReferences struct {
+ client *Client
+}
+
+func (v *dataMetricFunctionReferences) GetForEntity(ctx context.Context, request *GetForEntityDataMetricFunctionReferenceRequest) ([]DataMetricFunctionReference, error) {
+ opts := request.toOpts()
+ dbRows, err := validateAndQuery[dataMetricFunctionReferencesRow](v.client, ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+ resultList := convertRows[dataMetricFunctionReferencesRow, DataMetricFunctionReference](dbRows)
+ return resultList, nil
+}
+
+func (r *GetForEntityDataMetricFunctionReferenceRequest) toOpts() *GetForEntityDataMetricFunctionReferenceOptions {
+ opts := &GetForEntityDataMetricFunctionReferenceOptions{
+ parameters: &dataMetricFunctionReferenceParameters{
+ arguments: &dataMetricFunctionReferenceFunctionArguments{
+ refEntityName: []ObjectIdentifier{r.refEntityName},
+ refEntityDomain: Pointer(r.RefEntityDomain),
+ },
+ },
+ }
+ return opts
+}
diff --git a/pkg/sdk/data_metric_function_references_validations_gen.go b/pkg/sdk/data_metric_function_references_validations_gen.go
new file mode 100644
index 0000000000..395a1f323c
--- /dev/null
+++ b/pkg/sdk/data_metric_function_references_validations_gen.go
@@ -0,0 +1,27 @@
+package sdk
+
+import "errors"
+
+var _ validatable = new(GetForEntityDataMetricFunctionReferenceOptions)
+
+func (opts *GetForEntityDataMetricFunctionReferenceOptions) validate() error {
+ if opts == nil {
+ return ErrNilOptions
+ }
+ var errs []error
+ if !valueSet(opts.parameters) {
+ errs = append(errs, errNotSet("GetForEntityDataMetricFunctionReferenceOptions", "parameters"))
+ } else {
+ if !valueSet(opts.parameters.arguments) {
+ errs = append(errs, errNotSet("dataMetricFunctionReferenceParameters", "arguments"))
+ } else {
+ if opts.parameters.arguments.refEntityDomain == nil {
+ errs = append(errs, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityDomain"))
+ }
+ if opts.parameters.arguments.refEntityName == nil {
+ errs = append(errs, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityName"))
+ }
+ }
+ }
+ return errors.Join(errs...)
+}
diff --git a/pkg/sdk/poc/main.go b/pkg/sdk/poc/main.go
index f8f1014bdb..e62a113123 100644
--- a/pkg/sdk/poc/main.go
+++ b/pkg/sdk/poc/main.go
@@ -17,31 +17,32 @@ import (
)
var definitionMapping = map[string]*generator.Interface{
- "database_role_def.go": example.DatabaseRole,
- "network_policies_def.go": sdk.NetworkPoliciesDef,
- "session_policies_def.go": sdk.SessionPoliciesDef,
- "tasks_def.go": sdk.TasksDef,
- "streams_def.go": sdk.StreamsDef,
- "application_roles_def.go": sdk.ApplicationRolesDef,
- "views_def.go": sdk.ViewsDef,
- "stages_def.go": sdk.StagesDef,
- "functions_def.go": sdk.FunctionsDef,
- "procedures_def.go": sdk.ProceduresDef,
- "event_tables_def.go": sdk.EventTablesDef,
- "application_packages_def.go": sdk.ApplicationPackagesDef,
- "storage_integration_def.go": sdk.StorageIntegrationDef,
- "managed_accounts_def.go": sdk.ManagedAccountsDef,
- "row_access_policies_def.go": sdk.RowAccessPoliciesDef,
- "applications_def.go": sdk.ApplicationsDef,
- "sequences_def.go": sdk.SequencesDef,
- "materialized_views_def.go": sdk.MaterializedViewsDef,
- "api_integrations_def.go": sdk.ApiIntegrationsDef,
- "notification_integrations_def.go": sdk.NotificationIntegrationsDef,
- "external_functions_def.go": sdk.ExternalFunctionsDef,
- "streamlits_def.go": sdk.StreamlitsDef,
- "network_rule_def.go": sdk.NetworkRuleDef,
- "security_integrations_def.go": sdk.SecurityIntegrationsDef,
- "cortex_search_services_def.go": sdk.CortexSearchServiceDef,
+ "database_role_def.go": example.DatabaseRole,
+ "network_policies_def.go": sdk.NetworkPoliciesDef,
+ "session_policies_def.go": sdk.SessionPoliciesDef,
+ "tasks_def.go": sdk.TasksDef,
+ "streams_def.go": sdk.StreamsDef,
+ "application_roles_def.go": sdk.ApplicationRolesDef,
+ "views_def.go": sdk.ViewsDef,
+ "stages_def.go": sdk.StagesDef,
+ "functions_def.go": sdk.FunctionsDef,
+ "procedures_def.go": sdk.ProceduresDef,
+ "event_tables_def.go": sdk.EventTablesDef,
+ "application_packages_def.go": sdk.ApplicationPackagesDef,
+ "storage_integration_def.go": sdk.StorageIntegrationDef,
+ "managed_accounts_def.go": sdk.ManagedAccountsDef,
+ "row_access_policies_def.go": sdk.RowAccessPoliciesDef,
+ "applications_def.go": sdk.ApplicationsDef,
+ "sequences_def.go": sdk.SequencesDef,
+ "materialized_views_def.go": sdk.MaterializedViewsDef,
+ "api_integrations_def.go": sdk.ApiIntegrationsDef,
+ "notification_integrations_def.go": sdk.NotificationIntegrationsDef,
+ "external_functions_def.go": sdk.ExternalFunctionsDef,
+ "streamlits_def.go": sdk.StreamlitsDef,
+ "network_rule_def.go": sdk.NetworkRuleDef,
+ "security_integrations_def.go": sdk.SecurityIntegrationsDef,
+ "cortex_search_services_def.go": sdk.CortexSearchServiceDef,
+ "data_metric_function_references_def.go": sdk.DataMetricFunctionReferenceDef,
}
func main() {
diff --git a/pkg/sdk/policy_references.go b/pkg/sdk/policy_references.go
index 9f5ee04e21..8decc63793 100644
--- a/pkg/sdk/policy_references.go
+++ b/pkg/sdk/policy_references.go
@@ -68,19 +68,20 @@ type policyReferenceFunctionArguments struct {
refEntityDomain *PolicyEntityDomain `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_DOMAIN"`
}
-// TODO: use PolicyKind in PolicyReference
type PolicyKind string
const (
PolicyKindAggregationPolicy PolicyKind = "AGGREGATION_POLICY"
PolicyKindRowAccessPolicy PolicyKind = "ROW_ACCESS_POLICY"
+ PolicyKindPasswordPolicy PolicyKind = "PASSWORD_POLICY"
+ PolicyKindMaskingPolicy PolicyKind = "MASKING_POLICY"
)
type PolicyReference struct {
PolicyDb *string
PolicySchema *string
PolicyName string
- PolicyKind string
+ PolicyKind PolicyKind
RefDatabaseName *string
RefSchemaName *string
RefEntityName string
@@ -113,7 +114,7 @@ type policyReferenceDBRow struct {
func (row policyReferenceDBRow) convert() *PolicyReference {
policyReference := PolicyReference{
PolicyName: row.PolicyName,
- PolicyKind: row.PolicyKind,
+ PolicyKind: PolicyKind(row.PolicyKind),
RefEntityName: row.RefEntityName,
RefEntityDomain: row.RefEntityDomain,
}
diff --git a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go
new file mode 100644
index 0000000000..cf10780f10
--- /dev/null
+++ b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go
@@ -0,0 +1,50 @@
+package testint
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
+ "github.com/stretchr/testify/require"
+)
+
+func TestInt_DataMetricFunctionReferences(t *testing.T) {
+ client := testClient(t)
+ ctx := testContext(t)
+
+ t.Run("view domain", func(t *testing.T) {
+ functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "BLANK_COUNT")
+ statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES"
+ view, viewCleanup := testClientHelper().View.CreateView(t, statement)
+ t.Cleanup(viewCleanup)
+
+ // when we specify schedule by a number of minutes, a cron is returned from Snowflake - see SNOW-1640024
+ err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest("5 MINUTE")))
+ require.NoError(t, err)
+ err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest([]sdk.ViewDataMetricFunction{{
+ DataMetricFunction: functionId,
+ On: []sdk.Column{{Value: "ROLE_NAME"}},
+ }})))
+ require.NoError(t, err)
+
+ dmfs, err := client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(view.ID(), sdk.DataMetricFuncionRefEntityDomainView))
+ require.NoError(t, err)
+ require.Equal(t, 1, len(dmfs))
+ dmf := dmfs[0]
+ assert.Equal(t, string(sdk.DataMetricFuncionRefEntityDomainView), strings.ToUpper(dmf.RefEntityDomain))
+ assert.Equal(t, functionId.DatabaseName(), dmf.MetricDatabaseName)
+ assert.Equal(t, functionId.SchemaName(), dmf.MetricSchemaName)
+ assert.Equal(t, functionId.Name(), dmf.MetricName)
+ assert.Equal(t, view.ID().DatabaseName(), dmf.RefEntityDatabaseName)
+ assert.Equal(t, view.ID().SchemaName(), dmf.RefEntitySchemaName)
+ assert.Equal(t, view.ID().Name(), dmf.RefEntityName)
+ assert.Equal(t, "TABLE(VARCHAR)", dmf.ArgumentSignature)
+ assert.Equal(t, "NUMBER(38,0)", dmf.DataType)
+ assert.NotEmpty(t, dmf.RefArguments)
+ assert.NotEmpty(t, dmf.RefId)
+ assert.Equal(t, "*/5 * * * * UTC", dmf.Schedule)
+ assert.Equal(t, string(sdk.DataMetricScheduleStatusStarted), dmf.ScheduleStatus)
+ })
+}
diff --git a/pkg/sdk/testint/policy_references_integration_test.go b/pkg/sdk/testint/policy_references_integration_test.go
index c1d0d8bc2e..2f0c09e841 100644
--- a/pkg/sdk/testint/policy_references_integration_test.go
+++ b/pkg/sdk/testint/policy_references_integration_test.go
@@ -35,7 +35,7 @@ func TestInt_PolicyReferences(t *testing.T) {
require.NoError(t, err)
require.Equal(t, 1, len(policyReferences))
require.Equal(t, passwordPolicyId.Name(), policyReferences[0].PolicyName)
- require.Equal(t, "PASSWORD_POLICY", policyReferences[0].PolicyKind)
+ require.Equal(t, sdk.PolicyKindPasswordPolicy, policyReferences[0].PolicyKind)
})
t.Run("tag domain", func(t *testing.T) {
@@ -54,7 +54,7 @@ func TestInt_PolicyReferences(t *testing.T) {
require.NoError(t, err)
require.Equal(t, 1, len(policyReferences))
require.Equal(t, maskingPolicy.ID().Name(), policyReferences[0].PolicyName)
- require.Equal(t, "MASKING_POLICY", policyReferences[0].PolicyKind)
+ require.Equal(t, sdk.PolicyKindMaskingPolicy, policyReferences[0].PolicyKind)
err = client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tag.ID()).WithUnset(
sdk.NewTagUnsetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{maskingPolicy.ID()}),
diff --git a/pkg/sdk/testint/views_gen_integration_test.go b/pkg/sdk/testint/views_gen_integration_test.go
index 59e77f666b..78003a45d6 100644
--- a/pkg/sdk/testint/views_gen_integration_test.go
+++ b/pkg/sdk/testint/views_gen_integration_test.go
@@ -106,7 +106,7 @@ func TestInt_Views(t *testing.T) {
}
}
- assertDataMetricFunctionReference := func(t *testing.T, dataMetricFunctionReference helpers.DataMetricFunctionReference,
+ assertDataMetricFunctionReference := func(t *testing.T, dataMetricFunctionReference sdk.DataMetricFunctionReference,
viewId sdk.SchemaObjectIdentifier,
schedule string,
) {
@@ -405,12 +405,11 @@ func TestInt_Views(t *testing.T) {
err := client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
- alteredViewDetails, err := client.Views.Describe(ctx, id)
+ policyReferences, err := testClientHelper().PolicyReferences.GetPolicyReferences(t, view.ID(), sdk.ObjectTypeView)
require.NoError(t, err)
+ require.Len(t, policyReferences, 1)
- assert.Equal(t, 1, len(alteredViewDetails))
- // TODO [SNOW-1348118]: make nicer during the view rework
- assert.Equal(t, maskingPolicy.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*alteredViewDetails[0].PolicyName).FullyQualifiedName())
+ assertPolicyReference(t, policyReferences[0], maskingPolicy.ID(), "MASKING_POLICY", view.ID(), sdk.Pointer("ID"))
alterRequest = sdk.NewAlterViewRequest(id).WithUnsetMaskingPolicyOnColumn(
*sdk.NewViewUnsetColumnMaskingPolicyRequest("ID"),
@@ -418,11 +417,8 @@ func TestInt_Views(t *testing.T) {
err = client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
- alteredViewDetails, err = client.Views.Describe(ctx, id)
- require.NoError(t, err)
-
- assert.Equal(t, 1, len(alteredViewDetails))
- assert.Empty(t, alteredViewDetails[0].PolicyName)
+ _, err = testClientHelper().PolicyReferences.GetPolicyReference(t, view.ID(), sdk.ObjectTypeView)
+ require.Error(t, err, "no rows in result set")
})
t.Run("alter view: set and unset projection policy on column", func(t *testing.T) {
@@ -561,8 +557,8 @@ func TestInt_Views(t *testing.T) {
t.Cleanup(dataMetricFunction2Cleanup)
// set cron schedule
- cron := "5 * * * * UTC"
- alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest().WithUsingCron(sdk.ViewUsingCronRequest{Cron: cron}))
+ cron := "*/5 * * * * UTC"
+ alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest("USING CRON " + cron))
err := client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
@@ -576,8 +572,7 @@ func TestInt_Views(t *testing.T) {
err = client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
- dataMetricFunctionReferences, err := testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView)
- require.NoError(t, err)
+ dataMetricFunctionReferences := testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView)
require.Len(t, dataMetricFunctionReferences, 1)
assertDataMetricFunctionReference(t, dataMetricFunctionReferences[0], view.ID(), cron)
@@ -592,7 +587,7 @@ func TestInt_Views(t *testing.T) {
err = client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
- dataMetricFunctionReferences, err = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView)
+ dataMetricFunctionReferences = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView)
require.NoError(t, err)
require.Len(t, dataMetricFunctionReferences, 0)
@@ -610,8 +605,7 @@ func TestInt_Views(t *testing.T) {
err = client.Views.Alter(ctx, alterRequest)
require.NoError(t, err)
- dataMetricFunctionReferences, err = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView)
- require.NoError(t, err)
+ dataMetricFunctionReferences = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView)
require.Len(t, dataMetricFunctionReferences, 2)
assertDataMetricFunctionReference(t, dataMetricFunctionReferences[0], view.ID(), cron)
diff --git a/pkg/sdk/views_def.go b/pkg/sdk/views_def.go
index 680407fb5a..e981a19c70 100644
--- a/pkg/sdk/views_def.go
+++ b/pkg/sdk/views_def.go
@@ -1,9 +1,40 @@
package sdk
-import g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator"
+import (
+ "fmt"
+ "strings"
+
+ g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator"
+)
//go:generate go run ./poc/main.go
+var AllViewDataMetricScheduleMinutes = []int{5, 15, 30, 60, 720, 1440}
+
+type ViewDataMetricScheduleStatusOperationOption string
+
+const (
+ ViewDataMetricScheduleStatusOperationResume ViewDataMetricScheduleStatusOperationOption = "RESUME"
+ ViewDataMetricScheduleStatusOperationSuspend ViewDataMetricScheduleStatusOperationOption = "SUSPEND"
+)
+
+var AllViewDataMetricScheduleStatusOperationOptions = []ViewDataMetricScheduleStatusOperationOption{
+ ViewDataMetricScheduleStatusOperationResume,
+ ViewDataMetricScheduleStatusOperationSuspend,
+}
+
+func ToViewDataMetricScheduleStatusOperationOption(s string) (ViewDataMetricScheduleStatusOperationOption, error) {
+ s = strings.ToUpper(s)
+ switch s {
+ case string(ViewDataMetricScheduleStatusOperationResume):
+ return ViewDataMetricScheduleStatusOperationResume, nil
+ case string(ViewDataMetricScheduleStatusOperationSuspend):
+ return ViewDataMetricScheduleStatusOperationSuspend, nil
+ default:
+ return "", fmt.Errorf("invalid ViewDataMetricScheduleStatusOperationOption: %s", s)
+ }
+}
+
var viewDbRow = g.DbStruct("viewDBRow").
Text("created_on").
Text("name").
@@ -79,6 +110,16 @@ var dataMetricFunctionDef = g.NewQueryStruct("ViewDataMetricFunction").
ListAssignment("ON", "Column", g.ParameterOptions().Required().NoEquals().Parentheses()).
WithValidation(g.ValidIdentifier, "DataMetricFunction")
+var modifyDataMetricFunctionDef = g.NewQueryStruct("ViewModifyDataMetricFunction").
+ Identifier("DataMetricFunction", g.KindOfT[SchemaObjectIdentifier](), g.IdentifierOptions().Required()).
+ ListAssignment("ON", "Column", g.ParameterOptions().Required().NoEquals().Parentheses()).
+ Assignment(
+ "",
+ g.KindOfT[ViewDataMetricScheduleStatusOperationOption](),
+ g.ParameterOptions().NoEquals().NoQuotes(),
+ ).
+ WithValidation(g.ValidIdentifier, "DataMetricFunction")
+
var viewColumn = g.NewQueryStruct("ViewColumn").
Text("Name", g.KeywordOptions().Required().DoubleQuotes()).
OptionalQueryStructField("ProjectionPolicy", viewColumnProjectionPolicy, g.KeywordOptions()).
@@ -111,12 +152,13 @@ var viewDropDataMetricFunction = g.NewQueryStruct("ViewDropDataMetricFunction").
SQL("DROP").
ListAssignment("DATA METRIC FUNCTION", "ViewDataMetricFunction", g.ParameterOptions().NoEquals().Required())
+var viewModifyDataMetricFunction = g.NewQueryStruct("ViewModifyDataMetricFunctions").
+ SQL("MODIFY").
+ ListAssignment("DATA METRIC FUNCTION", "ViewModifyDataMetricFunction", g.ParameterOptions().NoEquals().Required())
+
var viewSetDataMetricSchedule = g.NewQueryStruct("ViewSetDataMetricSchedule").
- SQL("SET DATA_METRIC_SCHEDULE =").
- OptionalQueryStructField("Minutes", viewMinute, g.KeywordOptions()).
- OptionalQueryStructField("UsingCron", viewUsingCron, g.KeywordOptions()).
- OptionalSQL("TRIGGER_ON_CHANGES").
- WithValidation(g.ExactlyOneValueSet, "Minutes", "UsingCron", "TriggerOnChanges")
+ SQL("SET").
+ TextAssignment("DATA_METRIC_SCHEDULE", g.ParameterOptions().SingleQuotes())
var viewUnsetDataMetricSchedule = g.NewQueryStruct("ViewUnsetDataMetricSchedule").
SQL("UNSET DATA_METRIC_SCHEDULE")
@@ -244,6 +286,7 @@ var ViewsDef = g.NewInterface(
OptionalUnsetTags().
OptionalQueryStructField("AddDataMetricFunction", viewAddDataMetricFunction, g.KeywordOptions()).
OptionalQueryStructField("DropDataMetricFunction", viewDropDataMetricFunction, g.KeywordOptions()).
+ OptionalQueryStructField("ModifyDataMetricFunction", viewModifyDataMetricFunction, g.KeywordOptions()).
OptionalQueryStructField("SetDataMetricSchedule", viewSetDataMetricSchedule, g.KeywordOptions()).
OptionalQueryStructField("UnsetDataMetricSchedule", viewUnsetDataMetricSchedule, g.KeywordOptions()).
OptionalQueryStructField("AddRowAccessPolicy", viewAddRowAccessPolicy, g.KeywordOptions()).
@@ -260,7 +303,7 @@ var ViewsDef = g.NewInterface(
OptionalQueryStructField("UnsetTagsOnColumn", viewUnsetColumnTags, g.KeywordOptions()).
WithValidation(g.ValidIdentifier, "name").
WithValidation(g.ExactlyOneValueSet, "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking",
- "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule",
+ "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule",
"AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy",
"DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn",
"UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn",
@@ -269,6 +312,7 @@ var ViewsDef = g.NewInterface(
WithValidation(g.ConflictingFields, "IfExists", "UnsetSecure"),
columnDef,
dataMetricFunctionDef,
+ modifyDataMetricFunctionDef,
).
DropOperation(
"https://docs.snowflake.com/en/sql-reference/sql/drop-view",
diff --git a/pkg/sdk/views_dto_builders_gen.go b/pkg/sdk/views_dto_builders_gen.go
index d16848ec28..7b26eef482 100644
--- a/pkg/sdk/views_dto_builders_gen.go
+++ b/pkg/sdk/views_dto_builders_gen.go
@@ -204,6 +204,11 @@ func (s *AlterViewRequest) WithDropDataMetricFunction(DropDataMetricFunction Vie
return s
}
+func (s *AlterViewRequest) WithModifyDataMetricFunction(ModifyDataMetricFunction ViewModifyDataMetricFunctionsRequest) *AlterViewRequest {
+ s.ModifyDataMetricFunction = &ModifyDataMetricFunction
+ return s
+}
+
func (s *AlterViewRequest) WithSetDataMetricSchedule(SetDataMetricSchedule ViewSetDataMetricScheduleRequest) *AlterViewRequest {
s.SetDataMetricSchedule = &SetDataMetricSchedule
return s
@@ -290,38 +295,19 @@ func NewViewDropDataMetricFunctionRequest(
return &s
}
-func NewViewSetDataMetricScheduleRequest() *ViewSetDataMetricScheduleRequest {
- return &ViewSetDataMetricScheduleRequest{}
-}
-
-func (s *ViewSetDataMetricScheduleRequest) WithMinutes(Minutes ViewMinuteRequest) *ViewSetDataMetricScheduleRequest {
- s.Minutes = &Minutes
- return s
-}
-
-func (s *ViewSetDataMetricScheduleRequest) WithUsingCron(UsingCron ViewUsingCronRequest) *ViewSetDataMetricScheduleRequest {
- s.UsingCron = &UsingCron
- return s
-}
-
-func (s *ViewSetDataMetricScheduleRequest) WithTriggerOnChanges(TriggerOnChanges bool) *ViewSetDataMetricScheduleRequest {
- s.TriggerOnChanges = &TriggerOnChanges
- return s
-}
-
-func NewViewMinuteRequest(
- Minutes int,
-) *ViewMinuteRequest {
- s := ViewMinuteRequest{}
- s.Minutes = Minutes
+func NewViewModifyDataMetricFunctionsRequest(
+ DataMetricFunction []ViewModifyDataMetricFunction,
+) *ViewModifyDataMetricFunctionsRequest {
+ s := ViewModifyDataMetricFunctionsRequest{}
+ s.DataMetricFunction = DataMetricFunction
return &s
}
-func NewViewUsingCronRequest(
- Cron string,
-) *ViewUsingCronRequest {
- s := ViewUsingCronRequest{}
- s.Cron = Cron
+func NewViewSetDataMetricScheduleRequest(
+ DataMetricSchedule string,
+) *ViewSetDataMetricScheduleRequest {
+ s := ViewSetDataMetricScheduleRequest{}
+ s.DataMetricSchedule = DataMetricSchedule
return &s
}
diff --git a/pkg/sdk/views_dto_gen.go b/pkg/sdk/views_dto_gen.go
index 4b089e63d3..35d5c7b4ed 100644
--- a/pkg/sdk/views_dto_gen.go
+++ b/pkg/sdk/views_dto_gen.go
@@ -70,6 +70,7 @@ type AlterViewRequest struct {
UnsetTags []ObjectIdentifier
AddDataMetricFunction *ViewAddDataMetricFunctionRequest
DropDataMetricFunction *ViewDropDataMetricFunctionRequest
+ ModifyDataMetricFunction *ViewModifyDataMetricFunctionsRequest
SetDataMetricSchedule *ViewSetDataMetricScheduleRequest
UnsetDataMetricSchedule *ViewUnsetDataMetricScheduleRequest
AddRowAccessPolicy *ViewAddRowAccessPolicyRequest
@@ -94,18 +95,12 @@ type ViewDropDataMetricFunctionRequest struct {
DataMetricFunction []ViewDataMetricFunction // required
}
-type ViewSetDataMetricScheduleRequest struct {
- Minutes *ViewMinuteRequest
- UsingCron *ViewUsingCronRequest
- TriggerOnChanges *bool
-}
-
-type ViewMinuteRequest struct {
- Minutes int // required
+type ViewModifyDataMetricFunctionsRequest struct {
+ DataMetricFunction []ViewModifyDataMetricFunction // required
}
-type ViewUsingCronRequest struct {
- Cron string // required
+type ViewSetDataMetricScheduleRequest struct {
+ DataMetricSchedule string // required
}
type ViewUnsetDataMetricScheduleRequest struct{}
diff --git a/pkg/sdk/views_gen.go b/pkg/sdk/views_gen.go
index 2c85f8b30a..49b1e3952c 100644
--- a/pkg/sdk/views_gen.go
+++ b/pkg/sdk/views_gen.go
@@ -73,6 +73,7 @@ type AlterViewOptions struct {
UnsetTags []ObjectIdentifier `ddl:"keyword" sql:"UNSET TAG"`
AddDataMetricFunction *ViewAddDataMetricFunction `ddl:"keyword"`
DropDataMetricFunction *ViewDropDataMetricFunction `ddl:"keyword"`
+ ModifyDataMetricFunction *ViewModifyDataMetricFunctions `ddl:"keyword"`
SetDataMetricSchedule *ViewSetDataMetricSchedule `ddl:"keyword"`
UnsetDataMetricSchedule *ViewUnsetDataMetricSchedule `ddl:"keyword"`
AddRowAccessPolicy *ViewAddRowAccessPolicy `ddl:"keyword"`
@@ -95,6 +96,11 @@ type ViewDataMetricFunction struct {
DataMetricFunction SchemaObjectIdentifier `ddl:"identifier"`
On []Column `ddl:"parameter,parentheses,no_equals" sql:"ON"`
}
+type ViewModifyDataMetricFunction struct {
+ DataMetricFunction SchemaObjectIdentifier `ddl:"identifier"`
+ On []Column `ddl:"parameter,parentheses,no_equals" sql:"ON"`
+ ViewDataMetricScheduleStatusOperationOption `ddl:"parameter,no_quotes,no_equals"`
+}
type ViewAddDataMetricFunction struct {
add bool `ddl:"static" sql:"ADD"`
DataMetricFunction []ViewDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"`
@@ -103,24 +109,13 @@ type ViewDropDataMetricFunction struct {
drop bool `ddl:"static" sql:"DROP"`
DataMetricFunction []ViewDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"`
}
+type ViewModifyDataMetricFunctions struct {
+ modify bool `ddl:"static" sql:"MODIFY"`
+ DataMetricFunction []ViewModifyDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"`
+}
type ViewSetDataMetricSchedule struct {
- setDataMetricSchedule bool `ddl:"static" sql:"SET DATA_METRIC_SCHEDULE ="`
- Minutes *ViewMinute `ddl:"keyword"`
- UsingCron *ViewUsingCron `ddl:"keyword"`
- TriggerOnChanges *bool `ddl:"keyword,single_quotes" sql:"TRIGGER_ON_CHANGES"`
-}
-type ViewMinute struct {
- prefix bool `ddl:"static" sql:"'"`
- Minutes int `ddl:"keyword"`
- suffix bool `ddl:"static" sql:"MINUTE'"`
-}
-type ViewUsingCron struct {
- prefix bool `ddl:"static" sql:"'USING CRON"`
- Cron string `ddl:"keyword"`
- suffix bool `ddl:"static" sql:"'"`
-}
-type ViewTriggerOnChanges struct {
- triggerOnChanges bool `ddl:"static" sql:"TRIGGER_ON_CHANGES"`
+ set bool `ddl:"static" sql:"SET"`
+ DataMetricSchedule string `ddl:"parameter,single_quotes" sql:"DATA_METRIC_SCHEDULE"`
}
type ViewUnsetDataMetricSchedule struct {
unsetDataMetricSchedule bool `ddl:"static" sql:"UNSET DATA_METRIC_SCHEDULE"`
diff --git a/pkg/sdk/views_gen_test.go b/pkg/sdk/views_gen_test.go
index 94ef5d195d..919ab84e89 100644
--- a/pkg/sdk/views_gen_test.go
+++ b/pkg/sdk/views_gen_test.go
@@ -142,29 +142,16 @@ func TestViews_Alter(t *testing.T) {
assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier)
})
- t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present", func(t *testing.T) {
+ t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.ModifyDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present", func(t *testing.T) {
opts := defaultOpts()
- assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
+ assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
})
- t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present - more present", func(t *testing.T) {
+ t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.ModifyDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present - more present", func(t *testing.T) {
opts := defaultOpts()
opts.SetChangeTracking = Bool(true)
opts.DropAllRowAccessPolicies = Bool(true)
- assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
- })
-
- t.Run("validation: exactly one field from [opts.SetDataMetricSchedule.UsingCron opts.SetDataMetricSchedule.TriggerOnChanges opts.SetDataMetricSchedule.Minutes] should be present - more present", func(t *testing.T) {
- opts := defaultOpts()
- opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
- UsingCron: &ViewUsingCron{
- Cron: "5 * * * * UTC",
- },
- TriggerOnChanges: Pointer(true),
- }
-
- opts.DropAllRowAccessPolicies = Bool(true)
- assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions.SetDataMetricSchedule", "Minutes", "UsingCron", "TriggerOnChanges"))
+ assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
})
t.Run("validation: conflicting fields for [opts.IfExists opts.SetSecure]", func(t *testing.T) {
@@ -310,28 +297,22 @@ func TestViews_Alter(t *testing.T) {
assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s DROP DATA METRIC FUNCTION %s ON (\"foo\")", id.FullyQualifiedName(), dmfId.FullyQualifiedName())
})
- t.Run("set data metric schedule", func(t *testing.T) {
- opts := defaultOpts()
- opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
- Minutes: &ViewMinute{
- Minutes: 5,
- },
- }
- assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = ' 5 MINUTE'", id.FullyQualifiedName())
+ t.Run("modify data metric function", func(t *testing.T) {
+ dmfId := randomSchemaObjectIdentifier()
- opts = defaultOpts()
- opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
- UsingCron: &ViewUsingCron{
- Cron: "5 * * * * UTC",
- },
+ opts := defaultOpts()
+ opts.ModifyDataMetricFunction = &ViewModifyDataMetricFunctions{
+ DataMetricFunction: []ViewModifyDataMetricFunction{{DataMetricFunction: dmfId, On: []Column{{"foo"}}, ViewDataMetricScheduleStatusOperationOption: ViewDataMetricScheduleStatusOperationSuspend}},
}
- assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = 'USING CRON 5 * * * * UTC '", id.FullyQualifiedName())
+ assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s MODIFY DATA METRIC FUNCTION %s ON (\"foo\") SUSPEND", id.FullyQualifiedName(), dmfId.FullyQualifiedName())
+ })
- opts = defaultOpts()
+ t.Run("set data metric schedule", func(t *testing.T) {
+ opts := defaultOpts()
opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
- TriggerOnChanges: Pointer(true),
+ DataMetricSchedule: "5 MINUTE",
}
- assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = 'TRIGGER_ON_CHANGES'", id.FullyQualifiedName())
+ assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = '5 MINUTE'", id.FullyQualifiedName())
})
t.Run("unset data metric schedule", func(t *testing.T) {
diff --git a/pkg/sdk/views_impl_gen.go b/pkg/sdk/views_impl_gen.go
index 0bf564d6b8..149dfbccbd 100644
--- a/pkg/sdk/views_impl_gen.go
+++ b/pkg/sdk/views_impl_gen.go
@@ -141,21 +141,15 @@ func (r *AlterViewRequest) toOpts() *AlterViewOptions {
}
}
- if r.SetDataMetricSchedule != nil {
- opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
- TriggerOnChanges: r.SetDataMetricSchedule.TriggerOnChanges,
- }
-
- if r.SetDataMetricSchedule.Minutes != nil {
- opts.SetDataMetricSchedule.Minutes = &ViewMinute{
- Minutes: r.SetDataMetricSchedule.Minutes.Minutes,
- }
+ if r.ModifyDataMetricFunction != nil {
+ opts.ModifyDataMetricFunction = &ViewModifyDataMetricFunctions{
+ DataMetricFunction: r.ModifyDataMetricFunction.DataMetricFunction,
}
+ }
- if r.SetDataMetricSchedule.UsingCron != nil {
- opts.SetDataMetricSchedule.UsingCron = &ViewUsingCron{
- Cron: r.SetDataMetricSchedule.UsingCron.Cron,
- }
+ if r.SetDataMetricSchedule != nil {
+ opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{
+ DataMetricSchedule: r.SetDataMetricSchedule.DataMetricSchedule,
}
}
diff --git a/pkg/sdk/views_validations_gen.go b/pkg/sdk/views_validations_gen.go
index 7f8fd1fd55..376ec4cb91 100644
--- a/pkg/sdk/views_validations_gen.go
+++ b/pkg/sdk/views_validations_gen.go
@@ -59,8 +59,8 @@ func (opts *AlterViewOptions) validate() error {
if !ValidObjectIdentifier(opts.name) {
errs = append(errs, ErrInvalidObjectIdentifier)
}
- if !exactlyOneValueSet(opts.RenameTo, opts.SetComment, opts.UnsetComment, opts.SetSecure, opts.SetChangeTracking, opts.UnsetSecure, opts.SetTags, opts.UnsetTags, opts.AddDataMetricFunction, opts.DropDataMetricFunction, opts.SetDataMetricSchedule, opts.UnsetDataMetricSchedule, opts.AddRowAccessPolicy, opts.DropRowAccessPolicy, opts.DropAndAddRowAccessPolicy, opts.DropAllRowAccessPolicies, opts.SetAggregationPolicy, opts.UnsetAggregationPolicy, opts.SetMaskingPolicyOnColumn, opts.UnsetMaskingPolicyOnColumn, opts.SetProjectionPolicyOnColumn, opts.UnsetProjectionPolicyOnColumn, opts.SetTagsOnColumn, opts.UnsetTagsOnColumn) {
- errs = append(errs, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
+ if !exactlyOneValueSet(opts.RenameTo, opts.SetComment, opts.UnsetComment, opts.SetSecure, opts.SetChangeTracking, opts.UnsetSecure, opts.SetTags, opts.UnsetTags, opts.AddDataMetricFunction, opts.DropDataMetricFunction, opts.ModifyDataMetricFunction, opts.SetDataMetricSchedule, opts.UnsetDataMetricSchedule, opts.AddRowAccessPolicy, opts.DropRowAccessPolicy, opts.DropAndAddRowAccessPolicy, opts.DropAllRowAccessPolicies, opts.SetAggregationPolicy, opts.UnsetAggregationPolicy, opts.SetMaskingPolicyOnColumn, opts.UnsetMaskingPolicyOnColumn, opts.SetProjectionPolicyOnColumn, opts.UnsetProjectionPolicyOnColumn, opts.SetTagsOnColumn, opts.UnsetTagsOnColumn) {
+ errs = append(errs, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn"))
}
if everyValueSet(opts.IfExists, opts.SetSecure) {
errs = append(errs, errOneOf("AlterViewOptions", "IfExists", "SetSecure"))
@@ -68,11 +68,6 @@ func (opts *AlterViewOptions) validate() error {
if everyValueSet(opts.IfExists, opts.UnsetSecure) {
errs = append(errs, errOneOf("AlterViewOptions", "IfExists", "UnsetSecure"))
}
- if valueSet(opts.SetDataMetricSchedule) {
- if !exactlyOneValueSet(opts.SetDataMetricSchedule.Minutes, opts.SetDataMetricSchedule.UsingCron, opts.SetDataMetricSchedule.TriggerOnChanges) {
- errs = append(errs, errExactlyOneOf("AlterViewOptions.SetDataMetricSchedule", "Minutes", "UsingCron", "TriggerOnChanges"))
- }
- }
if valueSet(opts.AddRowAccessPolicy) {
if !ValidObjectIdentifier(opts.AddRowAccessPolicy.RowAccessPolicy) {
errs = append(errs, errInvalidIdentifier("AlterViewOptions.AddRowAccessPolicy", "RowAccessPolicy"))