Skip to content

Commit

Permalink
add desc to schema for logging resources (#3671)
Browse files Browse the repository at this point in the history
* add desc to schema for logging resources

* Update third_party/terraform/resources/resource_logging_project_bucket_config.go

Co-authored-by: emily <[email protected]>

* Update third_party/terraform/resources/resource_logging_bucket_config.go

Co-authored-by: emily <[email protected]>

Co-authored-by: Edward Sun <[email protected]>
Co-authored-by: emily <[email protected]>
  • Loading branch information
3 people authored Jun 24, 2020
1 parent 1befd05 commit 224ce38
Show file tree
Hide file tree
Showing 11 changed files with 101 additions and 74 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ import (

var loggingBillingAccountBucketConfigSchema = map[string]*schema.Schema{
"billing_account": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The parent resource that contains the logging bucket.`,
},
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ func resourceLoggingBillingAccountSink() *schema.Resource {
},
}
schm.Schema["billing_account"] = &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The billing account exported to the sink.`,
}
return schm
}
Expand Down
38 changes: 22 additions & 16 deletions third_party/terraform/resources/resource_logging_bucket_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,32 +11,38 @@ import (

var loggingBucketConfigSchema = map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Computed: true,
Type: schema.TypeString,
Computed: true,
Description: `The resource name of the bucket`,
},
"location": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The location of the bucket. The supported locations are: "global" "us-central1"`,
},
"bucket_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The name of the logging bucket. Logging automatically creates two log buckets: _Required and _Default.`,
},
"description": {
Type: schema.TypeString,
Optional: true,
Computed: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
Description: `An optional description for this bucket.`,
},
"retention_days": {
Type: schema.TypeInt,
Optional: true,
Default: 30,
Type: schema.TypeInt,
Optional: true,
Default: 30,
Description: `Logs will be retained by default for this amount of time, after which they will automatically be deleted. The minimum retention period is 1 day. If this value is set to zero at bucket creation time, the default time of 30 days will be used.`,
},
"lifecycle_state": {
Type: schema.TypeString,
Computed: true,
Type: schema.TypeString,
Computed: true,
Description: `The bucket's lifecycle such as active or deleted.`,
},
}

Expand Down
22 changes: 13 additions & 9 deletions third_party/terraform/resources/resource_logging_exclusion.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,25 @@ import (

var LoggingExclusionBaseSchema = map[string]*schema.Schema{
"filter": {
Type: schema.TypeString,
Required: true,
Type: schema.TypeString,
Required: true,
Description: `The filter to apply when excluding logs. Only log entries that match the filter are excluded.`,
},
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The name of the logging exclusion.`,
},
"description": {
Type: schema.TypeString,
Optional: true,
Type: schema.TypeString,
Optional: true,
Description: `A human-readable description.`,
},
"disabled": {
Type: schema.TypeBool,
Optional: true,
Type: schema.TypeBool,
Optional: true,
Description: `Whether this exclusion rule should be disabled or not. This defaults to false.`,
},
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ import (

var loggingFolderBucketConfigSchema = map[string]*schema.Schema{
"folder": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The parent resource that contains the logging bucket.`,
},
}

Expand Down
16 changes: 9 additions & 7 deletions third_party/terraform/resources/resource_logging_folder_sink.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,20 @@ func resourceLoggingFolderSink() *schema.Resource {
},
}
schm.Schema["folder"] = &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The folder to be exported to the sink. Note that either [FOLDER_ID] or "folders/[FOLDER_ID]" is accepted.`,
StateFunc: func(v interface{}) string {
return strings.Replace(v.(string), "folders/", "", 1)
},
}
schm.Schema["include_children"] = &schema.Schema{
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
Description: `Whether or not to include children folders in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided folder are included.`,
}

return schm
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ import (

var loggingOrganizationBucketConfigSchema = map[string]*schema.Schema{
"organization": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The parent resource that contains the logging bucket.`,
},
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,19 @@ func resourceLoggingOrganizationSink() *schema.Resource {
},
}
schm.Schema["org_id"] = &schema.Schema{
Type: schema.TypeString,
Required: true,
Type: schema.TypeString,
Required: true,
Description: `The numeric ID of the organization to be exported to the sink.`,
StateFunc: func(v interface{}) string {
return strings.Replace(v.(string), "organizations/", "", 1)
},
}
schm.Schema["include_children"] = &schema.Schema{
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
Type: schema.TypeBool,
Optional: true,
ForceNew: true,
Default: false,
Description: `Whether or not to include children organizations in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`,
}

return schm
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ import (

var loggingProjectBucketConfigSchema = map[string]*schema.Schema{
"project": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The parent project that contains the logging bucket.`,
},
}

Expand Down
18 changes: 10 additions & 8 deletions third_party/terraform/resources/resource_logging_project_sink.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,18 @@ func resourceLoggingProjectSink() *schema.Resource {
},
}
schm.Schema["project"] = &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
Description: `The ID of the project to create the sink in. If omitted, the project associated with the provider is used.`,
}
schm.Schema["unique_writer_identity"] = &schema.Schema{
Type: schema.TypeBool,
Optional: true,
Default: false,
ForceNew: true,
Type: schema.TypeBool,
Optional: true,
Default: false,
ForceNew: true,
Description: `Whether or not to create a unique identity associated with this sink. If false (the default), then the writer_identity used is serviceAccount:[email protected]. If true, then a unique service account is created and used for this sink. If you wish to publish logs across projects, you must set unique_writer_identity to true.`,
}
return schm
}
Expand Down
32 changes: 19 additions & 13 deletions third_party/terraform/resources/resource_logging_sink.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,37 +10,43 @@ import (
func resourceLoggingSinkSchema() map[string]*schema.Schema {
return map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
Type: schema.TypeString,
Required: true,
ForceNew: true,
Description: `The name of the logging sink.`,
},

"destination": {
Type: schema.TypeString,
Required: true,
Type: schema.TypeString,
Required: true,
Description: `The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, or a BigQuery dataset. Examples: "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The writer associated with the sink must have access to write to the above resource.`,
},

"filter": {
Type: schema.TypeString,
Optional: true,
DiffSuppressFunc: optionalSurroundingSpacesSuppress,
Description: `The filter to apply when exporting logs. Only log entries that match the filter are exported.`,
},

"writer_identity": {
Type: schema.TypeString,
Computed: true,
Type: schema.TypeString,
Computed: true,
Description: `The identity associated with this sink. This identity must be granted write access to the configured destination.`,
},

"bigquery_options": {
Type: schema.TypeList,
Optional: true,
Computed: true,
MaxItems: 1,
Type: schema.TypeList,
Optional: true,
Computed: true,
MaxItems: 1,
Description: `Options that affect sinks exporting data to BigQuery.`,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"use_partitioned_tables": {
Type: schema.TypeBool,
Required: true,
Type: schema.TypeBool,
Required: true,
Description: `Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.`,
},
},
},
Expand Down

0 comments on commit 224ce38

Please sign in to comment.