From 2b21127b0c3d9a3e077400843d2086feafaf3739 Mon Sep 17 00:00:00 2001 From: Steph Date: Fri, 18 Nov 2022 16:42:03 +0100 Subject: [PATCH 01/14] swap sdk for inputs and some outputs --- .../services/streamanalytics/client/client.go | 10 +- .../services/streamanalytics/helpers_input.go | 71 +++- .../streamanalytics/helpers_output.go | 36 +- .../services/streamanalytics/parse/cluster.go | 8 +- .../streamanalytics/parse/function.go | 12 +- .../services/streamanalytics/parse/output.go | 12 +- .../streamanalytics/parse/private_endpoint.go | 8 +- .../streamanalytics/parse/stream_input.go | 12 +- .../streamanalytics/parse/streaming_job.go | 8 +- .../parse/streaming_job_schedule.go | 12 +- .../services/streamanalytics/registration.go | 12 +- .../stream_analytics_cluster_resource.go | 20 +- .../stream_analytics_cluster_resource_test.go | 4 +- ...lytics_function_javascript_uda_resource.go | 22 +- ...s_function_javascript_uda_resource_test.go | 4 +- ...lytics_function_javascript_udf_resource.go | 22 +- ...s_function_javascript_udf_resource_test.go | 4 +- .../stream_analytics_job_data_source.go | 7 +- .../stream_analytics_job_resource.go | 22 +- .../stream_analytics_job_resource_test.go | 4 +- .../stream_analytics_job_schedule_resource.go | 23 +- ...am_analytics_job_schedule_resource_test.go | 4 +- ...ytics_managed_private_endpoint_resource.go | 16 +- ..._managed_private_endpoint_resource_test.go | 4 +- .../stream_analytics_output.go | 6 +- .../stream_analytics_output_blob_resource.go | 127 ++++--- ...eam_analytics_output_blob_resource_test.go | 15 +- ...ream_analytics_output_cosmosdb_resource.go | 136 ++++---- ...analytics_output_cosmosdb_resource_test.go | 9 +- ...ream_analytics_output_eventhub_resource.go | 104 +++--- ...analytics_output_eventhub_resource_test.go | 15 +- ...ream_analytics_output_function_resource.go | 130 ++++--- ...analytics_output_function_resource_test.go | 9 +- .../stream_analytics_output_mssql_resource.go | 145 ++++---- ...am_analytics_output_mssql_resource_test.go | 14 +- ...tream_analytics_output_powerbi_resource.go | 154 +++++---- ..._analytics_output_powerbi_resource_test.go | 9 +- ...lytics_output_servicebus_queue_resource.go | 117 ++++--- ...s_output_servicebus_queue_resource_test.go | 14 +- ...lytics_output_servicebus_topic_resource.go | 34 +- ...s_output_servicebus_topic_resource_test.go | 7 +- ...tream_analytics_output_synapse_resource.go | 33 +- ..._analytics_output_synapse_resource_test.go | 7 +- .../stream_analytics_output_table_resource.go | 44 ++- ...am_analytics_output_table_resource_test.go | 7 +- .../stream_analytics_reference_input.go | 49 +-- ...analytics_reference_input_blob_resource.go | 212 +++++++----- ...tics_reference_input_blob_resource_test.go | 251 +++++++------- ...nalytics_reference_input_mssql_resource.go | 122 ++++--- ...ics_reference_input_mssql_resource_test.go | 171 +++++----- ...am_analytics_stream_input_blob_resource.go | 119 ++++--- ...alytics_stream_input_blob_resource_test.go | 15 +- ...nalytics_stream_input_eventhub_resource.go | 154 +++++---- ...ics_stream_input_eventhub_resource_test.go | 319 +++++++++--------- ...ytics_stream_input_eventhub_v2_resource.go | 205 ++++++----- ..._stream_input_eventhub_v2_resource_test.go | 303 ++++++++--------- ..._analytics_stream_input_iothub_resource.go | 117 ++++--- ...ytics_stream_input_iothub_resource_test.go | 211 ++++++------ .../streamanalytics/validate/output_id.go | 4 +- 59 files changed, 2070 insertions(+), 1675 deletions(-) diff --git a/internal/services/streamanalytics/client/client.go b/internal/services/streamanalytics/client/client.go index 67ca2f180cda..7282e643006b 100644 --- a/internal/services/streamanalytics/client/client.go +++ b/internal/services/streamanalytics/client/client.go @@ -2,14 +2,16 @@ package client import ( "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/common" ) type Client struct { FunctionsClient *streamanalytics.FunctionsClient JobsClient *streamanalytics.StreamingJobsClient - InputsClient *streamanalytics.InputsClient - OutputsClient *streamanalytics.OutputsClient + InputsClient *inputs.InputsClient + OutputsClient *outputs.OutputsClient TransformationsClient *streamanalytics.TransformationsClient ClustersClient *streamanalytics.ClustersClient EndpointsClient *streamanalytics.PrivateEndpointsClient @@ -22,10 +24,10 @@ func NewClient(o *common.ClientOptions) *Client { jobsClient := streamanalytics.NewStreamingJobsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) o.ConfigureClient(&jobsClient.Client, o.ResourceManagerAuthorizer) - inputsClient := streamanalytics.NewInputsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + inputsClient := inputs.NewInputsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&inputsClient.Client, o.ResourceManagerAuthorizer) - outputsClient := streamanalytics.NewOutputsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + outputsClient := outputs.NewOutputsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&outputsClient.Client, o.ResourceManagerAuthorizer) transformationsClient := streamanalytics.NewTransformationsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) diff --git a/internal/services/streamanalytics/helpers_input.go b/internal/services/streamanalytics/helpers_input.go index a21423c97ea3..05728c305bd7 100644 --- a/internal/services/streamanalytics/helpers_input.go +++ b/internal/services/streamanalytics/helpers_input.go @@ -2,6 +2,7 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -178,33 +179,79 @@ func flattenStreamAnalyticsStreamInputSerialization(input streamanalytics.BasicS } } -func flattenStreamAnalyticsStreamInputSerializationTyped(input streamanalytics.BasicSerialization) Serialization { +func flattenStreamAnalyticsStreamInputSerialization2(input inputs.Serialization) []interface{} { var encoding string var fieldDelimiter string var inputType string - if _, ok := input.AsAvroSerialization(); ok { + if _, ok := input.(inputs.AvroSerialization); ok { inputType = string(streamanalytics.TypeAvro) } - if v, ok := input.AsCsvSerialization(); ok { - if props := v.CsvSerializationProperties; props != nil { - encoding = string(props.Encoding) + if csv, ok := input.(inputs.CsvSerialization); ok { + if props := csv.Properties; props != nil { + if v := props.Encoding; v != nil { + encoding = string(*v) + } - if props.FieldDelimiter != nil { - fieldDelimiter = *props.FieldDelimiter + if v := props.FieldDelimiter; v != nil { + fieldDelimiter = string(*v) } } - inputType = string(streamanalytics.TypeCsv) + inputType = string(inputs.EventSerializationTypeCsv) } - if v, ok := input.AsJSONSerialization(); ok { - if props := v.JSONSerializationProperties; props != nil { - encoding = string(props.Encoding) + if json, ok := input.(inputs.JsonSerialization); ok { + if props := json.Properties; props != nil { + if v := props.Encoding; v != nil { + encoding = string(*v) + } } - inputType = string(streamanalytics.TypeJSON) + inputType = string(inputs.EventSerializationTypeJson) + } + + return []interface{}{ + map[string]interface{}{ + "encoding": encoding, + "type": inputType, + "field_delimiter": fieldDelimiter, + }, + } +} + +func flattenStreamAnalyticsStreamInputSerializationTyped(input inputs.Serialization) Serialization { + var encoding string + var fieldDelimiter string + var inputType string + + if _, ok := input.(inputs.AvroSerialization); ok { + inputType = string(streamanalytics.TypeAvro) + } + + if csv, ok := input.(inputs.CsvSerialization); ok { + if props := csv.Properties; props != nil { + if v := props.Encoding; v != nil { + encoding = string(*v) + } + + if v := props.FieldDelimiter; v != nil { + fieldDelimiter = string(*v) + } + } + + inputType = string(inputs.EventSerializationTypeCsv) + } + + if json, ok := input.(inputs.JsonSerialization); ok { + if props := json.Properties; props != nil { + if v := props.Encoding; v != nil { + encoding = string(*v) + } + } + + inputType = string(inputs.EventSerializationTypeJson) } return Serialization{ diff --git a/internal/services/streamanalytics/helpers_output.go b/internal/services/streamanalytics/helpers_output.go index 856c8cc25dc4..86384f3dfdc3 100644 --- a/internal/services/streamanalytics/helpers_output.go +++ b/internal/services/streamanalytics/helpers_output.go @@ -2,6 +2,7 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -140,38 +141,45 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalyt return nil, fmt.Errorf("Unsupported Output Type %q", outputType) } -func flattenStreamAnalyticsOutputSerialization(input streamanalytics.BasicSerialization) []interface{} { +func flattenStreamAnalyticsOutputSerialization(input outputs.Serialization) []interface{} { var encoding string var outputType string var fieldDelimiter string var format string - if _, ok := input.AsAvroSerialization(); ok { - outputType = string(streamanalytics.TypeAvro) + if _, ok := input.(outputs.AvroSerialization); ok { + outputType = string(outputs.EventSerializationTypeAvro) } - if v, ok := input.AsCsvSerialization(); ok { - if props := v.CsvSerializationProperties; props != nil { - encoding = string(props.Encoding) + if csv, ok := input.(outputs.CsvSerialization); ok { + if props := csv.Properties; props != nil { + if props.Encoding != nil { + encoding = string(*props.Encoding) + } + if props.FieldDelimiter != nil { fieldDelimiter = *props.FieldDelimiter } } - outputType = string(streamanalytics.TypeCsv) + outputType = string(outputs.EventSerializationTypeCsv) } - if v, ok := input.AsJSONSerialization(); ok { - if props := v.JSONSerializationProperties; props != nil { - encoding = string(props.Encoding) - format = string(props.Format) + if json, ok := input.(outputs.JsonSerialization); ok { + if props := json.Properties; props != nil { + if props.Encoding != nil { + encoding = string(*props.Encoding) + } + if props.Format != nil { + format = string(*props.Format) + } } - outputType = string(streamanalytics.TypeJSON) + outputType = string(outputs.EventSerializationTypeJson) } - if _, ok := input.AsParquetSerialization(); ok { - outputType = string(streamanalytics.TypeParquet) + if _, ok := input.(outputs.ParquetSerialization); ok { + outputType = string(outputs.EventSerializationTypeParquet) } return []interface{}{ diff --git a/internal/services/streamanalytics/parse/cluster.go b/internal/services/streamanalytics/parse/cluster.go index b8be8beb5c28..941ed4c44428 100644 --- a/internal/services/streamanalytics/parse/cluster.go +++ b/internal/services/streamanalytics/parse/cluster.go @@ -26,7 +26,7 @@ func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { func (id ClusterId) String() string { segments := []string{ fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) @@ -34,7 +34,7 @@ func (id ClusterId) String() string { func (id ClusterId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.Name) } // ClusterID parses a Cluster ID into an ClusterId struct @@ -46,14 +46,14 @@ func ClusterID(input string) (*ClusterId, error) { resourceId := ClusterId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } diff --git a/internal/services/streamanalytics/parse/function.go b/internal/services/streamanalytics/parse/function.go index fec98d9ec428..1dad541213e4 100644 --- a/internal/services/streamanalytics/parse/function.go +++ b/internal/services/streamanalytics/parse/function.go @@ -28,8 +28,8 @@ func NewFunctionID(subscriptionId, resourceGroup, streamingjobName, name string) func (id FunctionId) String() string { segments := []string{ fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Streamingjob Name %q", id.StreamingjobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Streamingjob Name %q", id.JobName), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Function", segmentsStr) @@ -37,7 +37,7 @@ func (id FunctionId) String() string { func (id FunctionId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/functions/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.StreamingjobName, id.Name) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.Name) } // FunctionID parses a Function ID into an FunctionId struct @@ -49,18 +49,18 @@ func FunctionID(input string) (*FunctionId, error) { resourceId := FunctionId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if resourceId.StreamingjobName, err = id.PopSegment("streamingjobs"); err != nil { + if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { return nil, err } if resourceId.Name, err = id.PopSegment("functions"); err != nil { diff --git a/internal/services/streamanalytics/parse/output.go b/internal/services/streamanalytics/parse/output.go index 50fddb863a0a..bf0e9e28e987 100644 --- a/internal/services/streamanalytics/parse/output.go +++ b/internal/services/streamanalytics/parse/output.go @@ -28,8 +28,8 @@ func NewOutputID(subscriptionId, resourceGroup, streamingjobName, name string) O func (id OutputId) String() string { segments := []string{ fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Streamingjob Name %q", id.StreamingjobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Streamingjob Name %q", id.JobName), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Output", segmentsStr) @@ -37,7 +37,7 @@ func (id OutputId) String() string { func (id OutputId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/outputs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.StreamingjobName, id.Name) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.Name) } // OutputID parses a Output ID into an OutputId struct @@ -49,18 +49,18 @@ func OutputID(input string) (*OutputId, error) { resourceId := OutputId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if resourceId.StreamingjobName, err = id.PopSegment("streamingjobs"); err != nil { + if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { return nil, err } if resourceId.Name, err = id.PopSegment("outputs"); err != nil { diff --git a/internal/services/streamanalytics/parse/private_endpoint.go b/internal/services/streamanalytics/parse/private_endpoint.go index 5e4cfb3a1ed5..80bdad2dbdb9 100644 --- a/internal/services/streamanalytics/parse/private_endpoint.go +++ b/internal/services/streamanalytics/parse/private_endpoint.go @@ -29,7 +29,7 @@ func (id PrivateEndpointId) String() string { segments := []string{ fmt.Sprintf("Name %q", id.Name), fmt.Sprintf("Cluster Name %q", id.ClusterName), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Private Endpoint", segmentsStr) @@ -37,7 +37,7 @@ func (id PrivateEndpointId) String() string { func (id PrivateEndpointId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s/privateEndpoints/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.ClusterName, id.Name) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.ClusterName, id.Name) } // PrivateEndpointID parses a PrivateEndpoint ID into an PrivateEndpointId struct @@ -49,14 +49,14 @@ func PrivateEndpointID(input string) (*PrivateEndpointId, error) { resourceId := PrivateEndpointId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } diff --git a/internal/services/streamanalytics/parse/stream_input.go b/internal/services/streamanalytics/parse/stream_input.go index 573cdf36bff4..dd5fd5b29df8 100644 --- a/internal/services/streamanalytics/parse/stream_input.go +++ b/internal/services/streamanalytics/parse/stream_input.go @@ -28,8 +28,8 @@ func NewStreamInputID(subscriptionId, resourceGroup, streamingjobName, inputName func (id StreamInputId) String() string { segments := []string{ fmt.Sprintf("Input Name %q", id.InputName), - fmt.Sprintf("Streamingjob Name %q", id.StreamingjobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Streamingjob Name %q", id.JobName), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Stream Input", segmentsStr) @@ -37,7 +37,7 @@ func (id StreamInputId) String() string { func (id StreamInputId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/inputs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.StreamingjobName, id.InputName) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.InputName) } // StreamInputID parses a StreamInput ID into an StreamInputId struct @@ -49,18 +49,18 @@ func StreamInputID(input string) (*StreamInputId, error) { resourceId := StreamInputId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if resourceId.StreamingjobName, err = id.PopSegment("streamingjobs"); err != nil { + if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { return nil, err } if resourceId.InputName, err = id.PopSegment("inputs"); err != nil { diff --git a/internal/services/streamanalytics/parse/streaming_job.go b/internal/services/streamanalytics/parse/streaming_job.go index ef6d70983ea7..9ec8cb8a98d8 100644 --- a/internal/services/streamanalytics/parse/streaming_job.go +++ b/internal/services/streamanalytics/parse/streaming_job.go @@ -26,7 +26,7 @@ func NewStreamingJobID(subscriptionId, resourceGroup, name string) StreamingJobI func (id StreamingJobId) String() string { segments := []string{ fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Streaming Job", segmentsStr) @@ -34,7 +34,7 @@ func (id StreamingJobId) String() string { func (id StreamingJobId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.Name) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.Name) } // StreamingJobID parses a StreamingJob ID into an StreamingJobId struct @@ -46,14 +46,14 @@ func StreamingJobID(input string) (*StreamingJobId, error) { resourceId := StreamingJobId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } diff --git a/internal/services/streamanalytics/parse/streaming_job_schedule.go b/internal/services/streamanalytics/parse/streaming_job_schedule.go index 754ab8e9d6a3..808783096c6d 100644 --- a/internal/services/streamanalytics/parse/streaming_job_schedule.go +++ b/internal/services/streamanalytics/parse/streaming_job_schedule.go @@ -28,8 +28,8 @@ func NewStreamingJobScheduleID(subscriptionId, resourceGroup, streamingjobName, func (id StreamingJobScheduleId) String() string { segments := []string{ fmt.Sprintf("Schedule Name %q", id.ScheduleName), - fmt.Sprintf("Streamingjob Name %q", id.StreamingjobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroup), + fmt.Sprintf("Streamingjob Name %q", id.JobName), + fmt.Sprintf("Resource Group %q", id.ResourceGroupName), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Streaming Job Schedule", segmentsStr) @@ -37,7 +37,7 @@ func (id StreamingJobScheduleId) String() string { func (id StreamingJobScheduleId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/schedule/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.StreamingjobName, id.ScheduleName) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.ScheduleName) } // StreamingJobScheduleID parses a StreamingJobSchedule ID into an StreamingJobScheduleId struct @@ -49,18 +49,18 @@ func StreamingJobScheduleID(input string) (*StreamingJobScheduleId, error) { resourceId := StreamingJobScheduleId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceId.ResourceGroup == "" { + if resourceid.ResourceGroupName == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if resourceId.StreamingjobName, err = id.PopSegment("streamingjobs"); err != nil { + if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { return nil, err } if resourceId.ScheduleName, err = id.PopSegment("schedule"); err != nil { diff --git a/internal/services/streamanalytics/registration.go b/internal/services/streamanalytics/registration.go index 9b200e71f712..591174c2ee22 100644 --- a/internal/services/streamanalytics/registration.go +++ b/internal/services/streamanalytics/registration.go @@ -29,7 +29,7 @@ func (r Registration) Resources() []sdk.Resource { OutputTableResource{}, OutputPowerBIResource{}, OutputCosmosDBResource{}, - StreamInputEventHubV2Resource{}, + //StreamInputEventHubV2Resource{}, } } @@ -64,10 +64,10 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { "azurerm_stream_analytics_output_servicebus_queue": resourceStreamAnalyticsOutputServiceBusQueue(), "azurerm_stream_analytics_output_servicebus_topic": resourceStreamAnalyticsOutputServiceBusTopic(), "azurerm_stream_analytics_output_synapse": resourceStreamAnalyticsOutputSynapse(), - "azurerm_stream_analytics_reference_input_blob": resourceStreamAnalyticsReferenceInputBlob(), - "azurerm_stream_analytics_reference_input_mssql": resourceStreamAnalyticsReferenceMsSql(), - "azurerm_stream_analytics_stream_input_blob": resourceStreamAnalyticsStreamInputBlob(), - "azurerm_stream_analytics_stream_input_eventhub": resourceStreamAnalyticsStreamInputEventHub(), - "azurerm_stream_analytics_stream_input_iothub": resourceStreamAnalyticsStreamInputIoTHub(), + //"azurerm_stream_analytics_reference_input_blob": resourceStreamAnalyticsReferenceInputBlob(), + //"azurerm_stream_analytics_reference_input_mssql": resourceStreamAnalyticsReferenceMsSql(), + "azurerm_stream_analytics_stream_input_blob": resourceStreamAnalyticsStreamInputBlob(), + //"azurerm_stream_analytics_stream_input_eventhub": resourceStreamAnalyticsStreamInputEventHub(), + //"azurerm_stream_analytics_stream_input_iothub": resourceStreamAnalyticsStreamInputIoTHub(), } } diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource.go b/internal/services/streamanalytics/stream_analytics_cluster_resource.go index f5a5ab3d8e48..03882e878aaa 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource.go @@ -85,11 +85,11 @@ func (r ClusterResource) Create() sdk.ResourceFunc { id := parse.NewClusterID(subscriptionId, model.ResourceGroup, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id.ResourceGroupName, id.Name) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } @@ -103,7 +103,7 @@ func (r ClusterResource) Create() sdk.ResourceFunc { Tags: tags.Expand(model.Tags), } - future, err := client.CreateOrUpdate(ctx, props, id.ResourceGroup, id.Name, "", "") + future, err := client.CreateOrUpdate(ctx, props, id.ResourceGroupName, id.Name, "", "") if err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -129,9 +129,9 @@ func (r ClusterResource) Read() sdk.ResourceFunc { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.Get(ctx, id.ResourceGroupName, id.Name) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) @@ -139,7 +139,7 @@ func (r ClusterResource) Read() sdk.ResourceFunc { state := ClusterModel{ Name: id.Name, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, Location: *resp.Location, StreamingCapacity: *resp.Sku.Capacity, Tags: tags.Flatten(resp.Tags), @@ -162,8 +162,8 @@ func (r ClusterResource) Delete() sdk.ResourceFunc { metadata.Logger.Infof("deleting %s", *id) - if resp, err := client.Delete(ctx, id.ResourceGroup, id.Name); err != nil { - if !response.WasNotFound(resp.Response()) { + if resp, err := client.Delete(ctx, id.ResourceGroupName, id.Name); err != nil { + if !response.WasNotFound(resp.HttpResponse()) { return fmt.Errorf("deleting %s: %+v", *id, err) } } @@ -196,7 +196,7 @@ func (r ClusterResource) Update() sdk.ResourceFunc { Tags: tags.Expand(state.Tags), } - future, err := client.Update(ctx, props, id.ResourceGroup, id.Name, "") + future, err := client.Update(ctx, props, id.ResourceGroupName, id.Name, "") if err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go index 007d034e81ac..2c279abdd18d 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go @@ -94,9 +94,9 @@ func (r StreamAnalyticsClusterResource) Exists(ctx context.Context, client *clie return nil, err } - resp, err := client.StreamAnalytics.ClustersClient.Get(ctx, id.ResourceGroup, id.Name) + resp, err := client.StreamAnalytics.ClustersClient.Get(ctx, id.ResourceGroupName, id.Name) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go index 97d04c805858..b5d78e3ccb9a 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go @@ -123,16 +123,16 @@ func resourceStreamAnalyticsFunctionUDACreate(d *pluginsdk.ResourceData, meta in return err } - id := parse.NewFunctionID(subscriptionId, jobId.ResourceGroup, jobId.Name, d.Get("name").(string)) + id := parse.NewFunctionID(subscriptionId, jobid.ResourceGroupName, jobId.Name, d.Get("name").(string)) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_function_javascript_uda", id.ID()) } @@ -152,7 +152,7 @@ func resourceStreamAnalyticsFunctionUDACreate(d *pluginsdk.ResourceData, meta in }, } - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -171,9 +171,9 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %q was not found - removing from state!", *id) d.SetId("") return nil @@ -184,7 +184,7 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte d.Set("name", id.Name) - jobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + jobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) d.Set("stream_analytics_job_id", jobId.ID()) if props := resp.Properties; props != nil { @@ -240,7 +240,7 @@ func resourceStreamAnalyticsFunctionUDAUpdate(d *pluginsdk.ResourceData, meta in }, } - if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -257,8 +257,8 @@ func resourceStreamAnalyticsFunctionUDADelete(d *pluginsdk.ResourceData, meta in return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go index ab693b482c8a..5abf4558b213 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go @@ -124,9 +124,9 @@ func (r StreamAnalyticsFunctionJavaScriptUDAResource) Exists(ctx context.Context return nil, err } - resp, err := client.StreamAnalytics.FunctionsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.Functionsclient.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s : %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go index 149c975bde75..784df9d8db46 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go @@ -122,14 +122,14 @@ func resourceStreamAnalyticsFunctionUDFCreateUpdate(d *pluginsdk.ResourceData, m id := parse.NewFunctionID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_function_javascript_udf", id.ID()) } } @@ -158,12 +158,12 @@ func resourceStreamAnalyticsFunctionUDFCreateUpdate(d *pluginsdk.ResourceData, m } if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, function, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, function, id.ResourceGroupName, id.JobName, id.Name, "", ""); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, function, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, function, id.ResourceGroupName, id.JobName, id.Name, ""); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -180,9 +180,9 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %q was not found - removing from state!", id) d.SetId("") return nil @@ -192,8 +192,8 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte } d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) if props := resp.Properties; props != nil { scalarProps, ok := props.AsScalarFunctionProperties() @@ -232,8 +232,8 @@ func resourceStreamAnalyticsFunctionUDFDelete(d *pluginsdk.ResourceData, meta in return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go index 87be43e31574..9187032720bd 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go @@ -95,9 +95,9 @@ func (r StreamAnalyticsFunctionJavaScriptUDFResource) Exists(ctx context.Context return nil, err } - resp, err := client.StreamAnalytics.FunctionsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.Functionsclient.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s : %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_job_data_source.go b/internal/services/streamanalytics/stream_analytics_job_data_source.go index cbda48dfa6f3..43328f7ce504 100644 --- a/internal/services/streamanalytics/stream_analytics_job_data_source.go +++ b/internal/services/streamanalytics/stream_analytics_job_data_source.go @@ -10,7 +10,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func dataSourceStreamAnalyticsJob() *pluginsdk.Resource { @@ -103,9 +102,9 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ defer cancel() id := parse.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "transformation") + resp, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("%s was not found", id) } @@ -115,7 +114,7 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ d.SetId(id.ID()) d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("resource_group_name", id.ResourceGroupName) d.Set("location", location.NormalizeNilable(resp.Location)) if err := d.Set("identity", flattenJobIdentity(resp.Identity)); err != nil { diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index ed99958ca6ee..bf9454a55fec 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -204,14 +204,14 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte defer locks.UnlockByID(id.ID()) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.Name, "") + existing, err := client.Get(ctx, id.ResourceGroupName, id.Name, "") if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_job", id.ID()) } } @@ -302,7 +302,7 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte if d.IsNewResource() { props.StreamingJobProperties.Transformation = &transformation - future, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.Name, "", "") + future, err := client.CreateOrReplace(ctx, props, id.ResourceGroupName, id.Name, "", "") if err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -313,17 +313,17 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte d.SetId(id.ID()) } else { - if _, err := client.Update(ctx, props, id.ResourceGroup, id.Name, ""); err != nil { + if _, err := client.Update(ctx, props, id.ResourceGroupName, id.Name, ""); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } - job, err := client.Get(ctx, id.ResourceGroup, id.Name, "transformation") + job, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") if err != nil { return err } if readTransformation := job.Transformation; readTransformation != nil { - if _, err := transformationsClient.Update(ctx, transformation, id.ResourceGroup, id.Name, *readTransformation.Name, ""); err != nil { + if _, err := transformationsClient.Update(ctx, transformation, id.ResourceGroupName, id.Name, *readTransformation.Name, ""); err != nil { return fmt.Errorf("updating transformation for %s: %+v", id, err) } } @@ -342,9 +342,9 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.Name, "transformation") + resp, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) d.SetId("") return nil @@ -354,7 +354,7 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) } d.Set("name", id.Name) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("resource_group_name", id.ResourceGroupName) if resp.Location != nil { d.Set("location", azure.NormalizeLocation(*resp.Location)) @@ -406,7 +406,7 @@ func resourceStreamAnalyticsJobDelete(d *pluginsdk.ResourceData, meta interface{ return err } - future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + future, err := client.Delete(ctx, id.ResourceGroupName, id.Name) if err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_job_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_resource_test.go index cb405dd9198e..999b1d1895e8 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource_test.go @@ -154,9 +154,9 @@ func (r StreamAnalyticsJobResource) Exists(ctx context.Context, client *clients. return nil, err } - resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroup, id.Name, "") + resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroupName, id.Name, "") if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), err } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go index 1d52259843ed..4b9cf42da8b9 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go @@ -14,7 +14,6 @@ import ( streamAnalyticsValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type JobScheduleResource struct{} @@ -91,13 +90,13 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { } // This is a virtual resource so the last segment is hardcoded - id := parse.NewStreamingJobScheduleID(streamAnalyticsId.SubscriptionId, streamAnalyticsId.ResourceGroup, streamAnalyticsId.Name, "default") + id := parse.NewStreamingJobScheduleID(streamAnalyticsId.SubscriptionId, streamAnalyticsid.ResourceGroupName, streamAnalyticsId.Name, "default") locks.ByID(id.ID()) defer locks.UnlockByID(id.ID()) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, "") - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } @@ -124,7 +123,7 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { } } - future, err := client.Start(ctx, id.ResourceGroup, id.StreamingjobName, props) + future, err := client.Start(ctx, id.ResourceGroupName, id.JobName, props) if err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -150,11 +149,11 @@ func (r JobScheduleResource) Read() sdk.ResourceFunc { return err } - streamAnalyticsId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + streamAnalyticsId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, "") + resp, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("retrieving %s: %+v", *id, err) @@ -216,13 +215,13 @@ func (r JobScheduleResource) Update() sdk.ResourceFunc { props.OutputStartTime = outputStartTime } - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, "") + existing, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") if err != nil { return fmt.Errorf("retrieving %s: %+v", *id, err) } if v := existing.StreamingJobProperties; v != nil && v.JobState != nil && *v.JobState == "Running" { - future, err := client.Stop(ctx, id.ResourceGroup, id.StreamingjobName) + future, err := client.Stop(ctx, id.ResourceGroupName, id.JobName) if err != nil { return err } @@ -231,7 +230,7 @@ func (r JobScheduleResource) Update() sdk.ResourceFunc { } } - future, err := client.Start(ctx, id.ResourceGroup, id.StreamingjobName, props) + future, err := client.Start(ctx, id.ResourceGroupName, id.JobName, props) if err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -257,7 +256,7 @@ func (r JobScheduleResource) Delete() sdk.ResourceFunc { metadata.Logger.Infof("deleting %s", *id) - future, err := client.Stop(ctx, id.ResourceGroup, id.StreamingjobName) + future, err := client.Stop(ctx, id.ResourceGroupName, id.JobName) if err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go index bbcc6231d1a9..ea976a6b9e2b 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go @@ -74,9 +74,9 @@ func (r StreamAnalyticsJobScheduleResource) Exists(ctx context.Context, client * return nil, err } - resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, "") + resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroupName, id.JobName, "") if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), err } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go index 570d6aa0eecd..97a1eed38c87 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go @@ -90,11 +90,11 @@ func (r ManagedPrivateEndpointResource) Create() sdk.ResourceFunc { id := parse.NewPrivateEndpointID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsCluster, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } @@ -111,7 +111,7 @@ func (r ManagedPrivateEndpointResource) Create() sdk.ResourceFunc { }, } - if _, err := client.CreateOrUpdate(ctx, props, id.ResourceGroup, id.ClusterName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrUpdate(ctx, props, id.ResourceGroupName, id.ClusterName, id.Name, "", ""); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -132,9 +132,9 @@ func (r ManagedPrivateEndpointResource) Read() sdk.ResourceFunc { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + resp, err := client.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) @@ -146,7 +146,7 @@ func (r ManagedPrivateEndpointResource) Read() sdk.ResourceFunc { state := ManagedPrivateEndpointModel{ Name: id.Name, - ResourceGroup: id.ResourceGroup, + ResourceGroup: id.ResourceGroupName, StreamAnalyticsCluster: id.ClusterName, } @@ -172,7 +172,7 @@ func (r ManagedPrivateEndpointResource) Delete() sdk.ResourceFunc { metadata.Logger.Infof("deleting %s", *id) - future, err := client.Delete(ctx, id.ResourceGroup, id.ClusterName, id.Name) + future, err := client.Delete(ctx, id.ResourceGroupName, id.ClusterName, id.Name) if err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go index 3129150bcf26..26ff6ca7fa9c 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go @@ -51,9 +51,9 @@ func (r StreamAnalyticsManagedPrivateEndpointResource) Exists(ctx context.Contex return nil, err } - resp, err := client.StreamAnalytics.EndpointsClient.Get(ctx, id.ResourceGroup, id.ClusterName, id.Name) + resp, err := client.StreamAnalytics.EndpointsClient.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output.go b/internal/services/streamanalytics/stream_analytics_output.go index 8bfe12c77665..3aa4dc0a613f 100644 --- a/internal/services/streamanalytics/stream_analytics_output.go +++ b/internal/services/streamanalytics/stream_analytics_output.go @@ -3,22 +3,22 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" ) func importStreamAnalyticsOutput(expectType streamanalytics.TypeBasicOutputDataSource) pluginsdk.ImporterFunc { return func(ctx context.Context, d *pluginsdk.ResourceData, meta interface{}) (data []*pluginsdk.ResourceData, err error) { - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return nil, err } client := meta.(*clients.Client).StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go index 686266a261b8..ad47e912cc2e 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go @@ -2,15 +2,14 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -25,7 +24,7 @@ func resourceStreamAnalyticsOutputBlob() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputBlobCreateUpdate, Delete: resourceStreamAnalyticsOutputBlobDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }), @@ -87,10 +86,10 @@ func resourceStreamAnalyticsOutputBlob() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(outputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeConnectionString), - string(streamanalytics.AuthenticationModeMsi), + string(outputs.AuthenticationModeConnectionString), + string(outputs.AuthenticationModeMsi), }, false), }, @@ -121,17 +120,17 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_blob", id.ID()) } } @@ -148,13 +147,12 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me return fmt.Errorf("expanding `serialization`: %+v", err) } - props := streamanalytics.Output{ - Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.BlobOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageBlob, - BlobOutputDataSourceProperties: &streamanalytics.BlobOutputDataSourceProperties{ - StorageAccounts: &[]streamanalytics.StorageAccount{ + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.BlobOutputDataSource{ + Properties: &outputs.BlobOutputDataSourceProperties{ + StorageAccounts: &[]outputs.StorageAccount{ { AccountKey: getStorageAccountKey(d.Get("storage_account_key").(string)), AccountName: utils.String(storageAccountName), @@ -164,7 +162,7 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me DateFormat: utils.String(dateFormat), PathPattern: utils.String(pathPattern), TimeFormat: utils.String(timeFormat), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, @@ -172,26 +170,28 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me } if batchMaxWaitTime, ok := d.GetOk("batch_max_wait_time"); ok { - props.TimeWindow = utils.String(batchMaxWaitTime.(string)) + props.Properties.TimeWindow = utils.String(batchMaxWaitTime.(string)) } if batchMinRows, ok := d.GetOk("batch_min_rows"); ok { - props.SizeWindow = utils.Float(batchMinRows.(float64)) + props.Properties.SizeWindow = utils.Float(batchMinRows.(float64)) } // timeWindow and sizeWindow must be set for Parquet serialization _, isParquet := serialization.AsParquetSerialization() - if isParquet && (props.TimeWindow == nil || props.SizeWindow == nil) { + if isParquet && (props.Properties.TimeWindow == nil || props.Properties.SizeWindow == nil) { return fmt.Errorf("cannot create %s: batch_min_rows and batch_time_window must be set for Parquet serialization", id) } + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -203,14 +203,14 @@ func resourceStreamAnalyticsOutputBlobRead(d *pluginsdk.ResourceData, meta inter ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", id) d.SetId("") return nil @@ -219,34 +219,59 @@ func resourceStreamAnalyticsOutputBlobRead(d *pluginsdk.ResourceData, meta inter return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("name", id.OutputName) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsBlobOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to a Blob Output: %+v", err) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.BlobOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to Blob Output") + } - d.Set("date_format", v.DateFormat) - d.Set("path_pattern", v.PathPattern) - d.Set("storage_container_name", v.Container) - d.Set("time_format", v.TimeFormat) - d.Set("authentication_mode", v.AuthenticationMode) + dateFormat := "" + if v := output.DateFormat; v != nil { + dateFormat = *v + } + d.Set("date_format", dateFormat) - if accounts := v.StorageAccounts; accounts != nil && len(*accounts) > 0 { - account := (*accounts)[0] - d.Set("storage_account_name", account.AccountName) - } + pathPattern := "" + if v := output.PathPattern; v != nil { + pathPattern = *v + } + d.Set("path_pattern", pathPattern) + + containerName := "" + if v := output.Container; v != nil { + containerName = *v + } + d.Set("storage_container_name", containerName) - if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + timeFormat := "" + if v := output.TimeFormat; v != nil { + timeFormat = *v + } + d.Set("time_format", timeFormat) + + authenticationMode := "" + if v := output.AuthenticationMode; v != nil { + authenticationMode = string(*v) + } + d.Set("authentication_mode", authenticationMode) + + if accounts := output.StorageAccounts; accounts != nil && len(*accounts) > 0 { + account := (*accounts)[0] + d.Set("storage_account_name", account.AccountName) + } + + if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } + d.Set("batch_max_wait_time", props.TimeWindow) + d.Set("batch_min_rows", props.SizeWindow) } - d.Set("batch_max_wait_time", props.TimeWindow) - d.Set("batch_min_rows", props.SizeWindow) } - return nil } @@ -255,13 +280,13 @@ func resourceStreamAnalyticsOutputBlobDelete(d *pluginsdk.ResourceData, meta int ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go index 86bab171b238..8a9821d34b4f 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -134,16 +136,17 @@ func TestAccStreamAnalyticsOutputBlob_authenticationMode(t *testing.T) { } func (r StreamAnalyticsOutputBlobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - name := state.Attributes["name"] - jobName := state.Attributes["stream_analytics_job_name"] - resourceGroup := state.Attributes["resource_group_name"] + id, err := outputs.ParseOutputID(state.ID) + if err != nil { + return utils.Bool(false), err + } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, resourceGroup, jobName, name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving Stream Output %q (Stream Analytic Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go index 3c3f4f3bb833..27f640115f0d 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go @@ -3,9 +3,9 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -109,14 +109,14 @@ func (r OutputCosmosDBResource) Create() sdk.ResourceFunc { if err != nil { return err } - id := parse.NewOutputID(subscriptionId, streamingJobId.ResourceGroup, streamingJobId.Name, model.Name) + id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroup, streamingJobId.Name, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } @@ -125,26 +125,26 @@ func (r OutputCosmosDBResource) Create() sdk.ResourceFunc { return err } - documentDbOutputProps := &streamanalytics.DocumentDbOutputDataSourceProperties{ - AccountID: utils.String(databaseId.DatabaseAccountName), + documentDbOutputProps := &outputs.DocumentDbOutputDataSourceProperties{ + AccountId: utils.String(databaseId.DatabaseAccountName), AccountKey: utils.String(model.AccountKey), Database: utils.String(databaseId.Name), CollectionNamePattern: utils.String(model.ContainerName), - DocumentID: utils.String(model.DocumentID), + DocumentId: utils.String(model.DocumentID), PartitionKey: utils.String(model.PartitionKey), } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(model.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.DocumentDbOutputDataSource{ - DocumentDbOutputDataSourceProperties: documentDbOutputProps, - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB, + Properties: &outputs.OutputProperties{ + Datasource: &outputs.DocumentDbOutputDataSource{ + Properties: documentDbOutputProps, }, }, } - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + var opts outputs.CreateOrReplaceOperationOptions + if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } metadata.SetID(id) @@ -160,49 +160,57 @@ func (r OutputCosmosDBResource) Read() sdk.ResourceFunc { Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) } - if props := resp.OutputProperties; props != nil && props.Datasource != nil { - v, ok := props.Datasource.AsDocumentDbOutputDataSource() - if !ok { - return fmt.Errorf("converting output data source to a document DB output: %+v", err) + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.DocumentDbOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to CosmosDb Output") + } + + streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + state := OutputCosmosDBResourceModel{ + Name: id.OutputName, + StreamAnalyticsJob: streamingJobId.ID(), + } + + state.AccountKey = metadata.ResourceData.Get("cosmosdb_account_key").(string) + + databaseId := cosmosParse.NewSqlDatabaseID(id.SubscriptionId, id.ResourceGroupName, *output.AccountId, *output.Database) + state.Database = databaseId.ID() + + collectionName := "" + if v := output.CollectionNamePattern; v != nil { + collectionName = *v + } + state.ContainerName = collectionName + + document := "" + if v := output.DocumentId; v != nil { + document = *v + } + state.DocumentID = document + + partitionKey := "" + if v := output.PartitionKey; v != nil { + partitionKey = *v + } + state.PartitionKey = partitionKey + + return metadata.Encode(&state) } - - streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) - state := OutputCosmosDBResourceModel{ - Name: id.Name, - StreamAnalyticsJob: streamingJobId.ID(), - } - - state.AccountKey = metadata.ResourceData.Get("cosmosdb_account_key").(string) - - databaseId := cosmosParse.NewSqlDatabaseID(id.SubscriptionId, id.ResourceGroup, *v.AccountID, *v.Database) - state.Database = databaseId.ID() - - if v.CollectionNamePattern != nil { - state.ContainerName = *v.CollectionNamePattern - } - - if v.DocumentID != nil { - state.DocumentID = *v.DocumentID - } - - if v.PartitionKey != nil { - state.PartitionKey = *v.PartitionKey - } - - return metadata.Encode(&state) } return nil }, @@ -214,15 +222,15 @@ func (r OutputCosmosDBResource) Delete() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } @@ -232,7 +240,7 @@ func (r OutputCosmosDBResource) Delete() sdk.ResourceFunc { } func (r OutputCosmosDBResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.OutputID + return outputs.ValidateOutputID } func (r OutputCosmosDBResource) Update() sdk.ResourceFunc { @@ -240,7 +248,7 @@ func (r OutputCosmosDBResource) Update() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } @@ -256,21 +264,21 @@ func (r OutputCosmosDBResource) Update() sdk.ResourceFunc { } if metadata.ResourceData.HasChangesExcept("name", "stream_analytics_job_id") { - props := streamanalytics.Output{ - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: streamanalytics.DocumentDbOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB, - DocumentDbOutputDataSourceProperties: &streamanalytics.DocumentDbOutputDataSourceProperties{ + props := outputs.Output{ + Properties: &outputs.OutputProperties{ + Datasource: outputs.DocumentDbOutputDataSource{ + Properties: &outputs.DocumentDbOutputDataSourceProperties{ AccountKey: &state.AccountKey, Database: &databaseId.Name, CollectionNamePattern: &state.ContainerName, - DocumentID: &state.DocumentID, + DocumentId: &state.DocumentID, PartitionKey: &state.PartitionKey, }, }, }, } - if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + var opts outputs.UpdateOperationOptions + if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } } @@ -282,20 +290,20 @@ func (r OutputCosmosDBResource) Update() sdk.ResourceFunc { func (r OutputCosmosDBResource) CustomImporter() sdk.ResourceRunFunc { return func(ctx context.Context, metadata sdk.ResourceMetaData) error { - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } client := metadata.Client.StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil || resp.OutputProperties == nil { + resp, err := client.Get(ctx, *id) + if err != nil || resp.Model == nil || resp.Model.Properties == nil { return fmt.Errorf("reading %s: %+v", *id, err) } - props := resp.OutputProperties - if _, ok := props.Datasource.AsDocumentDbOutputDataSource(); !ok { - return fmt.Errorf("specified output is not of type %s", streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB) + props := resp.Model.Properties + if _, ok := props.Datasource.(outputs.DocumentDbOutputDataSourceProperties); !ok { + return fmt.Errorf("specified output is not of type") } return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go index 6b564bd9e832..9cdc3fe18259 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -81,14 +82,14 @@ func TestAccStreamAnalyticsOutputCosmosDB_requiresImport(t *testing.T) { }) } func (r StreamAnalyticsOutputCosmosDBResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go index 5db898788a4d..faf9ca6420b6 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go @@ -2,15 +2,14 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -24,7 +23,7 @@ func resourceStreamAnalyticsOutputEventHub() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputEventHubCreateUpdate, Delete: resourceStreamAnalyticsOutputEventHubDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }), @@ -94,10 +93,10 @@ func resourceStreamAnalyticsOutputEventHub() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(outputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeConnectionString), + string(outputs.AuthenticationModeMsi), + string(outputs.AuthenticationModeConnectionString), }, false), }, @@ -112,16 +111,16 @@ func resourceStreamAnalyticsOutputEventHubCreateUpdate(d *pluginsdk.ResourceData ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_eventhub", id.ID()) } } @@ -139,32 +138,33 @@ func resourceStreamAnalyticsOutputEventHubCreateUpdate(d *pluginsdk.ResourceData return fmt.Errorf("expanding `serialization`: %+v", err) } - props := streamanalytics.Output{ - Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.EventHubOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub, - EventHubOutputDataSourceProperties: &streamanalytics.EventHubOutputDataSourceProperties{ + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.EventHubOutputDataSource{ + Properties: &outputs.EventHubOutputDataSourceProperties{ EventHubName: utils.String(eventHubName), ServiceBusNamespace: utils.String(serviceBusNamespace), SharedAccessPolicyKey: utils.String(sharedAccessPolicyKey), SharedAccessPolicyName: utils.String(sharedAccessPolicyName), PropertyColumns: utils.ExpandStringSlice(propertyColumns), PartitionKey: utils.String(partitionKey), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, }, } + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -176,14 +176,14 @@ func resourceStreamAnalyticsOutputEventHubRead(d *pluginsdk.ResourceData, meta i ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", id) d.SetId("") return nil @@ -192,28 +192,48 @@ func resourceStreamAnalyticsOutputEventHubRead(d *pluginsdk.ResourceData, meta i return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("name", id.OutputName) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsEventHubOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to a EventHub Output: %+v", err) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.EventHubOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to EventHub Output") + } + + eventHubName := "" + if v := output.EventHubName; v != nil { + eventHubName = *v + } + d.Set("eventhub_name", eventHubName) - d.Set("eventhub_name", v.EventHubName) - d.Set("servicebus_namespace", v.ServiceBusNamespace) - d.Set("shared_access_policy_name", v.SharedAccessPolicyName) - d.Set("property_columns", v.PropertyColumns) - d.Set("partition_key", v.PartitionKey) - d.Set("authentication_mode", v.AuthenticationMode) + serviceBusNamespace := "" + if v := output.ServiceBusNamespace; v != nil { + serviceBusNamespace = *v + } + d.Set("servicebus_namespace", serviceBusNamespace) - if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + sharedAccessPolicyName := "" + if v := output.SharedAccessPolicyName; v != nil { + sharedAccessPolicyName = *v + } + d.Set("shared_access_policy_name", sharedAccessPolicyName) + + partitionKey := "" + if v := output.PartitionKey; v != nil { + partitionKey = *v + } + d.Set("partition_key", partitionKey) + + authMode := "" + if v := output.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) } } - return nil } @@ -222,13 +242,13 @@ func resourceStreamAnalyticsOutputEventHubDelete(d *pluginsdk.ResourceData, meta ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go index 4a20399ed47c..60471418bcdb 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -168,16 +170,17 @@ func TestAccStreamAnalyticsOutputEventHub_authenticationMode(t *testing.T) { } func (r StreamAnalyticsOutputEventhubResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - name := state.Attributes["name"] - jobName := state.Attributes["stream_analytics_job_name"] - resourceGroup := state.Attributes["resource_group_name"] + id, err := outputs.ParseOutputID(state.ID) + if err != nil { + return utils.Bool(false), err + } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, resourceGroup, jobName, name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving Stream Output %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource.go b/internal/services/streamanalytics/stream_analytics_output_function_resource.go index a2bf2d9a72cb..6297cb80316e 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource.go @@ -3,13 +3,12 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -95,7 +94,7 @@ func (r OutputFunctionResource) ResourceType() string { } func (r OutputFunctionResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.OutputID + return outputs.ValidateOutputID } func (r OutputFunctionResource) Create() sdk.ResourceFunc { @@ -110,26 +109,25 @@ func (r OutputFunctionResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.OutputsClient subscriptionId := metadata.Client.Account.SubscriptionId - id := parse.NewOutputID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsJob, model.Name) + id := outputs.NewOutputID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsJob, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(model.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureFunctionOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftAzureFunction, - AzureFunctionOutputDataSourceProperties: &streamanalytics.AzureFunctionOutputDataSourceProperties{ + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureFunctionOutputDataSource{ + Properties: &outputs.AzureFunctionOutputDataSourceProperties{ FunctionAppName: utils.String(model.FunctionApp), FunctionName: utils.String(model.FunctionName), - APIKey: utils.String(model.ApiKey), + ApiKey: utils.String(model.ApiKey), MaxBatchSize: utils.Float(float64(model.BatchMaxInBytes)), MaxBatchCount: utils.Float(float64(model.BatchMaxCount)), }, @@ -137,7 +135,8 @@ func (r OutputFunctionResource) Create() sdk.ResourceFunc { }, } - if _, err = client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + var opts outputs.CreateOrReplaceOperationOptions + if _, err = client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -153,40 +152,63 @@ func (r OutputFunctionResource) Read() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) } - if props := resp.OutputProperties; props != nil && props.Datasource != nil { - v, ok := props.Datasource.AsAzureFunctionOutputDataSource() - if !ok { - return fmt.Errorf("converting output data source to a function output: %+v", err) + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.AzureFunctionOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to Function Output") + } + + if output.FunctionAppName == nil || output.FunctionName == nil || output.MaxBatchCount == nil || output.MaxBatchSize == nil { + return nil + } + + state := OutputFunctionResourceModel{ + Name: id.OutputName, + ResourceGroup: id.ResourceGroupName, + StreamAnalyticsJob: id.JobName, + ApiKey: metadata.ResourceData.Get("api_key").(string), + } + + functionApp := "" + if v := output.FunctionAppName; v != nil { + functionApp = *v + } + state.FunctionApp = functionApp + + functionName := "" + if v := output.FunctionName; v != nil { + functionName = *v + } + state.FunctionName = functionName + + batchMaxInBytes := 0 + if v := output.MaxBatchSize; v != nil { + batchMaxInBytes = int(*v) + } + state.BatchMaxInBytes = batchMaxInBytes + + batchMaxCount := 0 + if v := output.MaxBatchCount; v != nil { + batchMaxCount = int(*v) + } + state.BatchMaxCount = batchMaxCount + + return metadata.Encode(&state) } - - if v.FunctionAppName == nil || v.FunctionName == nil || v.MaxBatchCount == nil || v.MaxBatchSize == nil { - return nil - } - - state := OutputFunctionResourceModel{ - Name: id.Name, - ResourceGroup: id.ResourceGroup, - StreamAnalyticsJob: id.StreamingjobName, - FunctionApp: *v.FunctionAppName, - FunctionName: *v.FunctionName, - ApiKey: metadata.ResourceData.Get("api_key").(string), - BatchMaxInBytes: int(*v.MaxBatchSize), - BatchMaxCount: int(*v.MaxBatchCount), - } - return metadata.Encode(&state) } return nil }, @@ -198,7 +220,7 @@ func (r OutputFunctionResource) Update() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } @@ -208,15 +230,14 @@ func (r OutputFunctionResource) Update() sdk.ResourceFunc { return fmt.Errorf("decoding: %+v", err) } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(state.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureFunctionOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable, - AzureFunctionOutputDataSourceProperties: &streamanalytics.AzureFunctionOutputDataSourceProperties{ + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureFunctionOutputDataSource{ + Properties: &outputs.AzureFunctionOutputDataSourceProperties{ FunctionAppName: utils.String(state.FunctionApp), FunctionName: utils.String(state.FunctionName), - APIKey: utils.String(state.ApiKey), + ApiKey: utils.String(state.ApiKey), MaxBatchSize: utils.Float(float64(state.BatchMaxInBytes)), MaxBatchCount: utils.Float(float64(state.BatchMaxCount)), }, @@ -224,7 +245,8 @@ func (r OutputFunctionResource) Update() sdk.ResourceFunc { }, } - if _, err = client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + var opts outputs.UpdateOperationOptions + if _, err = client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -238,14 +260,14 @@ func (r OutputFunctionResource) Delete() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - if _, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { + if _, err := client.Delete(ctx, *id); err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } return nil @@ -255,20 +277,20 @@ func (r OutputFunctionResource) Delete() sdk.ResourceFunc { func (r OutputFunctionResource) CustomImporter() sdk.ResourceRunFunc { return func(ctx context.Context, metadata sdk.ResourceMetaData) error { - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } client := metadata.Client.StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil || resp.OutputProperties == nil { + resp, err := client.Get(ctx, *id) + if err != nil || resp.Model == nil || resp.Model.Properties == nil { return fmt.Errorf("reading %s: %+v", *id, err) } - props := resp.OutputProperties - if _, ok := props.Datasource.AsAzureFunctionOutputDataSource(); !ok { - return fmt.Errorf("specified output is not of type %s", streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftAzureFunction) + props := resp.Model.Properties + if _, ok := props.Datasource.(outputs.AzureFunctionOutputDataSourceProperties); !ok { + return fmt.Errorf("specified output is not of type") } return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go index fec7856c8265..9a3c9be472ec 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -83,14 +84,14 @@ func TestAccStreamAnalyticsOutputFunction_requiresImport(t *testing.T) { } func (r StreamAnalyticsOutputFunctionResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go index 789cb2e6fe1b..5032ed2c8cf2 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go @@ -2,14 +2,13 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -23,7 +22,7 @@ func resourceStreamAnalyticsOutputSql() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputSqlCreateUpdate, Delete: resourceStreamAnalyticsOutputSqlDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }), @@ -108,10 +107,10 @@ func resourceStreamAnalyticsOutputSql() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(outputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeConnectionString), + string(outputs.AuthenticationModeMsi), + string(outputs.AuthenticationModeConnectionString), }, false), }, }, @@ -124,50 +123,45 @@ func resourceStreamAnalyticsOutputSqlCreateUpdate(d *pluginsdk.ResourceData, met ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_mssql", id.ID()) } } - server := d.Get("server").(string) - databaseName := d.Get("database").(string) - tableName := d.Get("table").(string) - sqlUser := d.Get("user").(string) - sqlUserPassword := d.Get("password").(string) - - props := streamanalytics.Output{ - Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureSQLDatabaseOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase, - AzureSQLDatabaseOutputDataSourceProperties: &streamanalytics.AzureSQLDatabaseOutputDataSourceProperties{ - Server: utils.String(server), - Database: utils.String(databaseName), - User: utils.String(sqlUser), - Password: utils.String(sqlUserPassword), - Table: utils.String(tableName), + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureSqlDatabaseOutputDataSource{ + Properties: &outputs.AzureSqlDatabaseDataSourceProperties{ + Server: utils.String(d.Get("server").(string)), + Database: utils.String(d.Get("database").(string)), + User: utils.String(d.Get("user").(string)), + Password: utils.String(d.Get("password").(string)), + Table: utils.String(d.Get("table").(string)), MaxBatchCount: utils.Float(d.Get("max_batch_count").(float64)), MaxWriterCount: utils.Float(d.Get("max_writer_count").(float64)), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, }, } + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -179,14 +173,14 @@ func resourceStreamAnalyticsOutputSqlRead(d *pluginsdk.ResourceData, meta interf ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", id) d.SetId("") return nil @@ -195,35 +189,60 @@ func resourceStreamAnalyticsOutputSqlRead(d *pluginsdk.ResourceData, meta interf return fmt.Errorf("retreving %s: %+v", id, err) } - d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) - - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsAzureSQLDatabaseOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to SQL Output: %+v", err) - } - - d.Set("server", v.Server) - d.Set("database", v.Database) - d.Set("table", v.Table) - d.Set("user", v.User) - d.Set("authentication_mode", v.AuthenticationMode) - - maxBatchCount := float64(10000) - if v.MaxBatchCount != nil { - maxBatchCount = *v.MaxBatchCount + d.Set("name", id.OutputName) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) + + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.AzureSqlDatabaseDataSourceProperties) + if !ok { + return fmt.Errorf("converting to SQL Output") + } + + server := "" + if v := output.Server; v != nil { + server = *v + } + d.Set("server", server) + + database := "" + if v := output.Database; v != nil { + database = *v + } + d.Set("database", database) + + table := "" + if v := output.Table; v != nil { + table = *v + } + d.Set("table", table) + + user := "" + if v := output.User; v != nil { + user = *v + } + d.Set("user", user) + + authMode := "" + if v := output.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) + + maxBatchCount := float64(10000) + if v := output.MaxBatchCount; v != nil { + maxBatchCount = *v + } + d.Set("max_batch_count", maxBatchCount) + + maxWriterCount := float64(1) + if v := output.MaxWriterCount; v != nil { + maxWriterCount = *v + } + d.Set("max_writer_count", maxWriterCount) } - d.Set("max_batch_count", maxBatchCount) - - maxWriterCount := float64(1) - if v.MaxWriterCount != nil { - maxWriterCount = *v.MaxWriterCount - } - d.Set("max_writer_count", maxWriterCount) } - return nil } @@ -232,13 +251,13 @@ func resourceStreamAnalyticsOutputSqlDelete(d *pluginsdk.ResourceData, meta inte ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go index 9e9c02352ffc..1d49a7358321 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -117,16 +119,16 @@ func TestAccStreamAnalyticsOutputSql_maxBatchCountAndMaxWriterCount(t *testing.T } func (r StreamAnalyticsOutputSqlResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - name := state.Attributes["name"] - jobName := state.Attributes["stream_analytics_job_name"] - resourceGroup := state.Attributes["resource_group_name"] + id, err := outputs.ParseOutputID(state.ID) + if err != nil { + } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, resourceGroup, jobName, name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving Stream Output %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go index 9377820ab185..81d0e73bde2a 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -3,9 +3,9 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -109,49 +109,49 @@ func (r OutputPowerBIResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.OutputsClient subscriptionId := metadata.Client.Account.SubscriptionId - streamingJobStruct, err := parse.StreamingJobID(model.StreamAnalyticsJob) + streamingJobId, err := parse.StreamingJobID(model.StreamAnalyticsJob) if err != nil { return err } - id := parse.NewOutputID(subscriptionId, streamingJobStruct.ResourceGroup, streamingJobStruct.Name, model.Name) + id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroup, streamingJobId.Name, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } - powerbiOutputProps := &streamanalytics.PowerBIOutputDataSourceProperties{ + powerBIOutputProps := &outputs.PowerBIOutputDataSourceProperties{ Dataset: utils.String(model.DataSet), Table: utils.String(model.Table), - GroupID: utils.String(model.GroupID), + GroupId: utils.String(model.GroupID), GroupName: utils.String(model.GroupName), - RefreshToken: utils.String("someRefreshToken"), // A valid refresh token is currently only obtainable via the Azure Portal. Put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. - AuthenticationMode: streamanalytics.AuthenticationMode("Msi"), // Set authentication mode as "Msi" here since other modes requires params obtainable from portal only. + RefreshToken: utils.String("someRefreshToken"), // A valid refresh token is currently only obtainable via the Azure Portal. Put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode("Msi")), // Set authentication mode as "Msi" here since other modes requires params obtainable from portal only. } if model.TokenUserDisplayName != "" { - powerbiOutputProps.TokenUserDisplayName = utils.String(model.TokenUserDisplayName) + powerBIOutputProps.TokenUserDisplayName = utils.String(model.TokenUserDisplayName) } if model.TokenUserPrincipalName != "" { - powerbiOutputProps.TokenUserPrincipalName = utils.String(model.TokenUserPrincipalName) + powerBIOutputProps.TokenUserPrincipalName = utils.String(model.TokenUserPrincipalName) } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(model.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.PowerBIOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypePowerBI, - PowerBIOutputDataSourceProperties: powerbiOutputProps, + Properties: &outputs.OutputProperties{ + Datasource: &outputs.PowerBIOutputDataSource{ + Properties: powerBIOutputProps, }, }, } - if _, err = client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + var opts outputs.CreateOrReplaceOperationOptions + if _, err = client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -167,7 +167,7 @@ func (r OutputPowerBIResource) Update() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } @@ -178,7 +178,7 @@ func (r OutputPowerBIResource) Update() sdk.ResourceFunc { } needUpdateDataSourceProps := false - dataSourceProps := streamanalytics.PowerBIOutputDataSourceProperties{} + dataSourceProps := outputs.PowerBIOutputDataSourceProperties{} d := metadata.ResourceData if d.HasChange("dataset") { @@ -198,7 +198,7 @@ func (r OutputPowerBIResource) Update() sdk.ResourceFunc { if d.HasChange("group_id") { needUpdateDataSourceProps = true - dataSourceProps.GroupID = &state.GroupID + dataSourceProps.GroupId = &state.GroupID } if d.HasChange("token_user_principal_name") { @@ -215,18 +215,18 @@ func (r OutputPowerBIResource) Update() sdk.ResourceFunc { return nil } - updateDataSource := streamanalytics.PowerBIOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypePowerBI, - PowerBIOutputDataSourceProperties: &dataSourceProps, + updateDataSource := outputs.PowerBIOutputDataSource{ + Properties: &dataSourceProps, } - props := streamanalytics.Output{ - OutputProperties: &streamanalytics.OutputProperties{ + props := outputs.Output{ + Properties: &outputs.OutputProperties{ Datasource: updateDataSource, }, } - if _, err = client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + var opts outputs.UpdateOperationOptions + if _, err = client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -240,52 +240,62 @@ func (r OutputPowerBIResource) Read() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) } - if props := resp.OutputProperties; props != nil && props.Datasource != nil { - v, ok := props.Datasource.AsPowerBIOutputDataSource() - if !ok { - return fmt.Errorf("converting output data source to a powerBI output: %+v", err) + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.PowerBIOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to PowerBI Output") + } + + streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + + state := OutputPowerBIResourceModel{ + Name: id.OutputName, + StreamAnalyticsJob: streamingJobId.ID(), + } + + dataset := "" + if v := output.Dataset; v != nil { + dataset = *v + } + state.DataSet = dataset + + table := "" + if v := output.Table; v != nil { + table = *v + } + state.Table = table + + groupId := "" + if v := output.GroupId; v != nil { + groupId = *v + } + state.GroupID = groupId + + groupName := "" + if v := output.GroupName; v != nil { + groupName = *v + } + state.GroupName = groupName + + state.TokenUserDisplayName = metadata.ResourceData.Get("token_user_display_name").(string) + state.TokenUserPrincipalName = metadata.ResourceData.Get("token_user_principal_name").(string) + + return metadata.Encode(&state) } - - streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) - - state := OutputPowerBIResourceModel{ - Name: id.Name, - StreamAnalyticsJob: streamingJobId.ID(), - } - - if v.Dataset != nil { - state.DataSet = *v.Dataset - } - - if v.Table != nil { - state.Table = *v.Table - } - - if v.GroupID != nil { - state.GroupID = *v.GroupID - } - - if v.GroupName != nil { - state.GroupName = *v.GroupName - } - - state.TokenUserDisplayName = metadata.ResourceData.Get("token_user_display_name").(string) - state.TokenUserPrincipalName = metadata.ResourceData.Get("token_user_principal_name").(string) - - return metadata.Encode(&state) } return nil }, @@ -297,15 +307,15 @@ func (r OutputPowerBIResource) Delete() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } @@ -315,25 +325,25 @@ func (r OutputPowerBIResource) Delete() sdk.ResourceFunc { } func (r OutputPowerBIResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.OutputID + return outputs.ValidateOutputID } func (r OutputPowerBIResource) CustomImporter() sdk.ResourceRunFunc { return func(ctx context.Context, metadata sdk.ResourceMetaData) error { - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } client := metadata.Client.StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil || resp.OutputProperties == nil { + resp, err := client.Get(ctx, *id) + if err != nil || resp.Model == nil || resp.Model.Properties == nil { return fmt.Errorf("reading %s: %+v", *id, err) } - props := resp.OutputProperties - if _, ok := props.Datasource.AsPowerBIOutputDataSource(); !ok { - return fmt.Errorf("specified output is not of type %s", streamanalytics.TypeBasicOutputDataSourceTypePowerBI) + props := resp.Model.Properties + if _, ok := props.Datasource.(outputs.PowerBIOutputDataSourceProperties); !ok { + return fmt.Errorf("specified output is not of type") } return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go index 786dadc0a82b..801d9d7a4b3a 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -16,14 +17,14 @@ import ( type StreamAnalyticsOutputPowerBIResource struct{} func (r StreamAnalyticsOutputPowerBIResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go index 0279657bf78b..0a7814eb0c42 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go @@ -2,15 +2,14 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -24,7 +23,7 @@ func resourceStreamAnalyticsOutputServiceBusQueue() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate, Delete: resourceStreamAnalyticsOutputServiceBusQueueDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }), @@ -100,10 +99,10 @@ func resourceStreamAnalyticsOutputServiceBusQueue() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(outputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeConnectionString), - string(streamanalytics.AuthenticationModeMsi), + string(outputs.AuthenticationModeConnectionString), + string(outputs.AuthenticationModeMsi), }, false), }, }, @@ -116,16 +115,16 @@ func resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate(d *pluginsdk.Resou ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_servicebus_queue", id.ID()) } } @@ -141,19 +140,19 @@ func resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate(d *pluginsdk.Resou return fmt.Errorf("expanding `serialization`: %+v", err) } - props := streamanalytics.Output{ - Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.ServiceBusQueueOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue, - ServiceBusQueueOutputDataSourceProperties: &streamanalytics.ServiceBusQueueOutputDataSourceProperties{ + systemPropertyColumns := d.Get("system_property_columns").(interface{}) + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.ServiceBusQueueOutputDataSource{ + Properties: &outputs.ServiceBusQueueOutputDataSourceProperties{ QueueName: utils.String(queueName), ServiceBusNamespace: utils.String(serviceBusNamespace), SharedAccessPolicyKey: utils.String(sharedAccessPolicyKey), SharedAccessPolicyName: utils.String(sharedAccessPolicyName), PropertyColumns: utils.ExpandStringSlice(d.Get("property_columns").([]interface{})), - SystemPropertyColumns: d.Get("system_property_columns").(map[string]interface{}), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + SystemPropertyColumns: &systemPropertyColumns, + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, @@ -161,13 +160,15 @@ func resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate(d *pluginsdk.Resou } // TODO: split the create/update functions to allow for ignore changes etc + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("uUpdating %s: %+v", id, err) } @@ -179,14 +180,14 @@ func resourceStreamAnalyticsOutputServiceBusQueueRead(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", id) d.SetId("") return nil @@ -195,28 +196,58 @@ func resourceStreamAnalyticsOutputServiceBusQueueRead(d *pluginsdk.ResourceData, return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("name", id.OutputName) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsServiceBusQueueOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to a ServiceBus Queue Output: %+v", err) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.ServiceBusQueueOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to ServiceBus Queue Output") + } + + queue := "" + if v := output.QueueName; v != nil { + queue = *v + } + d.Set("queue_name", queue) - d.Set("queue_name", v.QueueName) - d.Set("servicebus_namespace", v.ServiceBusNamespace) - d.Set("shared_access_policy_name", v.SharedAccessPolicyName) - d.Set("property_columns", v.PropertyColumns) - d.Set("system_property_columns", v.SystemPropertyColumns) - d.Set("authentication_mode", v.AuthenticationMode) + namespace := "" + if v := output.ServiceBusNamespace; v != nil { + namespace = *v + } + d.Set("servicebus_namespace", namespace) + + policyName := "" + if v := output.SharedAccessPolicyName; v != nil { + policyName = *v + } + d.Set("shared_access_policy_name", policyName) - if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + var columns []string + if v := output.PropertyColumns; v != nil { + columns = *v + } + d.Set("property_columns", columns) + + var systemColumns interface{} + if v := output.SystemPropertyColumns; v != nil { + systemColumns = *v + } + d.Set("system_property_columns", systemColumns) + + authMode := "" + if v := output.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) + + if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } } } - return nil } @@ -225,13 +256,13 @@ func resourceStreamAnalyticsOutputServiceBusQueueDelete(d *pluginsdk.ResourceDat ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go index 99afcc5c621b..a6dca9e1bfd3 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -184,16 +186,16 @@ func TestAccStreamAnalyticsOutputServiceBusQueue_systemPropertyColumns(t *testin } func (r StreamAnalyticsOutputServiceBusQueueResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - name := state.Attributes["name"] - jobName := state.Attributes["stream_analytics_job_name"] - resourceGroup := state.Attributes["resource_group_name"] + id, err := outputs.ParseOutputID(state.ID) + if err != nil { + } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, resourceGroup, jobName, name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving Stream Output %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go index 51f7c73946b9..b7bc5ea2c4f0 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go @@ -2,6 +2,7 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" @@ -10,7 +11,6 @@ import ( "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -24,7 +24,7 @@ func resourceStreamAnalyticsOutputServiceBusTopic() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate, Delete: resourceStreamAnalyticsOutputServiceBusTopicDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }), @@ -117,17 +117,17 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou defer cancel() log.Printf("[INFO] preparing arguments for Azure Stream Analytics Output ServiceBus Topic creation.") - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_servicebus_topic", id.ID()) } } @@ -140,7 +140,7 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou props := streamanalytics.Output{ Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ + Properties: &streamanalytics.OutputProperties{ Datasource: &streamanalytics.ServiceBusTopicOutputDataSource{ Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic, ServiceBusTopicOutputDataSourceProperties: &streamanalytics.ServiceBusTopicOutputDataSourceProperties{ @@ -158,10 +158,10 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou } if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -175,14 +175,14 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) d.SetId("") return nil @@ -192,8 +192,8 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, } d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) if props := resp.OutputProperties; props != nil { v, ok := props.Datasource.AsServiceBusTopicOutputDataSource() @@ -224,13 +224,13 @@ func resourceStreamAnalyticsOutputServiceBusTopicDelete(d *pluginsdk.ResourceDat ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go index b9227e564298..9df99d3059ad 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -165,14 +164,14 @@ func TestAccStreamAnalyticsOutputServiceBusTopic_systemPropertyColumns(t *testin } func (r StreamAnalyticsOutputServiceBusTopicResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving (%s): %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go index 30dabd45d5ce..2573b2e2f0de 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go @@ -9,7 +9,6 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -23,7 +22,7 @@ func resourceStreamAnalyticsOutputSynapse() *pluginsdk.Resource { Update: resourceStreamAnalyticsOutputSynapseCreateUpdate, Delete: resourceStreamAnalyticsOutputSynapseDelete, Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { - _, err := parse.OutputID(id) + _, err := outputs.ParseOutputID(id) return err }, importStreamAnalyticsOutput(streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse)), @@ -100,22 +99,22 @@ func resourceStreamAnalyticsOutputSynapseCreateUpdate(d *pluginsdk.ResourceData, defer cancel() subscriptionId := meta.(*clients.Client).Account.SubscriptionId - id := parse.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := outputs.NewOutputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_output_synapse", id.ID()) } } props := streamanalytics.Output{ Name: utils.String(id.Name), - OutputProperties: &streamanalytics.OutputProperties{ + Properties: &streamanalytics.OutputProperties{ Datasource: &streamanalytics.AzureSynapseOutputDataSource{ Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse, AzureSynapseOutputDataSourceProperties: &streamanalytics.AzureSynapseOutputDataSourceProperties{ @@ -130,12 +129,12 @@ func resourceStreamAnalyticsOutputSynapseCreateUpdate(d *pluginsdk.ResourceData, } if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -147,14 +146,14 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) d.SetId("") return nil @@ -164,8 +163,8 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in } d.Set("name", id.Name) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupName) if props := resp.OutputProperties; props != nil { v, ok := props.Datasource.AsAzureSynapseOutputDataSource() @@ -187,13 +186,13 @@ func resourceStreamAnalyticsOutputSynapseDelete(d *pluginsdk.ResourceData, meta ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.OutputID(d.Id()) + id, err := outputs.ParseOutputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go index 3444702f6759..fb336be0a31f 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -82,14 +81,14 @@ func TestAccStreamAnalyticsOutputSynapse_requiresImport(t *testing.T) { } func (r StreamAnalyticsOutputSynapseResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index f29d574e0e12..6e8a0965c737 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -9,8 +9,6 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -112,7 +110,7 @@ func (r OutputTableResource) ResourceType() string { } func (r OutputTableResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.OutputID + return outputs.ValidateOutputID } func (r OutputTableResource) Create() sdk.ResourceFunc { @@ -127,14 +125,14 @@ func (r OutputTableResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.OutputsClient subscriptionId := metadata.Client.Account.SubscriptionId - id := parse.NewOutputID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsJob, model.Name) + id := outputs.NewOutputID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsJob, model.Name) - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil && !utils.ResponseWasNotFound(existing.Response) { + existing, err := client.Get(ctx, id) + if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return metadata.ResourceRequiresImport(r.ResourceType(), id) } @@ -153,7 +151,7 @@ func (r OutputTableResource) Create() sdk.ResourceFunc { props := streamanalytics.Output{ Name: utils.String(model.Name), - OutputProperties: &streamanalytics.OutputProperties{ + Properties: &streamanalytics.OutputProperties{ Datasource: &streamanalytics.AzureTableOutputDataSource{ Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable, AzureTableOutputDataSourceProperties: tableOutputProps, @@ -161,7 +159,7 @@ func (r OutputTableResource) Create() sdk.ResourceFunc { }, } - if _, err = client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + if _, err = client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -177,14 +175,14 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.Get(ctx, id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) } return fmt.Errorf("reading %s: %+v", *id, err) @@ -202,8 +200,8 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { state := OutputTableResourceModel{ Name: id.Name, - ResourceGroup: id.ResourceGroup, - StreamAnalyticsJob: id.StreamingjobName, + ResourceGroup: id.ResourceGroupName, + StreamAnalyticsJob: id.JobName, StorageAccount: *v.AccountName, StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), Table: *v.Table, @@ -230,7 +228,7 @@ func (r OutputTableResource) Update() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } @@ -242,7 +240,7 @@ func (r OutputTableResource) Update() sdk.ResourceFunc { props := streamanalytics.Output{ Name: utils.String(state.Name), - OutputProperties: &streamanalytics.OutputProperties{ + Properties: &streamanalytics.OutputProperties{ Datasource: &streamanalytics.AzureTableOutputDataSource{ Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable, AzureTableOutputDataSourceProperties: &streamanalytics.AzureTableOutputDataSourceProperties{ @@ -265,7 +263,7 @@ func (r OutputTableResource) Update() sdk.ResourceFunc { tableOutput.ColumnsToRemove = &state.ColumnsToRemove } - if _, err = client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + if _, err = client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -279,15 +277,15 @@ func (r OutputTableResource) Delete() sdk.ResourceFunc { Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.OutputsClient - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } @@ -298,14 +296,14 @@ func (r OutputTableResource) Delete() sdk.ResourceFunc { func (r OutputTableResource) CustomImporter() sdk.ResourceRunFunc { return func(ctx context.Context, metadata sdk.ResourceMetaData) error { - id, err := parse.OutputID(metadata.ResourceData.Id()) + id, err := outputs.ParseOutputID(metadata.ResourceData.Id()) if err != nil { return err } client := metadata.Client.StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) - if err != nil || resp.OutputProperties == nil { + resp, err := client.Get(ctx, id) + if err != nil || resp.Model == nil || resp.Model.Properties == nil { return fmt.Errorf("reading %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go index f4666eea0bd2..50d6b9e0d6fe 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go @@ -9,7 +9,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -104,14 +103,14 @@ func TestAccStreamAnalyticsOutputTable_columnsToRemove(t *testing.T) { } func (r StreamAnalyticsOutputTableResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.OutputID(state.ID) + id, err := outputs.ParseOutputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) diff --git a/internal/services/streamanalytics/stream_analytics_reference_input.go b/internal/services/streamanalytics/stream_analytics_reference_input.go index bd24cf919c7c..52a4b6a1bc7c 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input.go @@ -4,43 +4,48 @@ import ( "context" "fmt" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" ) -func importStreamAnalyticsReferenceInput(expectType streamanalytics.TypeBasicReferenceInputDataSource) pluginsdk.ImporterFunc { +func importStreamAnalyticsReferenceInput(expectType string) pluginsdk.ImporterFunc { return func(ctx context.Context, d *pluginsdk.ResourceData, meta interface{}) (data []*pluginsdk.ResourceData, err error) { - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return nil, err } client := meta.(*clients.Client).StreamAnalytics.InputsClient - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + resp, err := client.Get(ctx, *id) if err != nil { return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } - if props := resp.Properties; props != nil { - v, ok := props.AsReferenceInputProperties() - if !ok { - return nil, fmt.Errorf("converting properties to a Reference Input: %+v", err) - } - - var actualType streamanalytics.TypeBasicReferenceInputDataSource - - if inputMsSql, ok := v.Datasource.AsAzureSQLReferenceInputDataSource(); ok { - actualType = inputMsSql.Type - } else if inputBlob, ok := v.Datasource.AsBlobReferenceInputDataSource(); ok { - actualType = inputBlob.Type - } else { - return nil, fmt.Errorf("unable to convert input data source: %+v", v) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + input, ok := props.(inputs.Input) + if !ok { + return nil, fmt.Errorf("failed to convert to Input") + } + reference, ok := input.Properties.(inputs.ReferenceInputProperties) + if !ok { + return nil, fmt.Errorf("failed to convert to Reference Input") + } + + var actualType string + + if _, ok := reference.Datasource.(inputs.BlobDataSourceProperties); ok { + actualType = "Microsoft.Storage/Blob" + } + if _, ok := reference.Datasource.(inputs.AzureSqlReferenceInputDataSource); ok { + actualType = "Microsoft.Storage/Blob" + } + + if actualType != expectType { + return nil, fmt.Errorf("stream analytics reference input has mismatched type, expected: %q, got %q", expectType, actualType) + } - if actualType != expectType { - return nil, fmt.Errorf("stream analytics reference input has mismatched type, expected: %q, got %q", expectType, actualType) } } return []*pluginsdk.ResourceData{d}, nil diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index 3712dcceb5c5..c9d34e1540a4 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -2,6 +2,7 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" @@ -10,7 +11,6 @@ import ( "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -25,9 +25,9 @@ func resourceStreamAnalyticsReferenceInputBlob() *pluginsdk.Resource { Delete: resourceStreamAnalyticsReferenceInputBlobDelete, Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { - _, err := parse.StreamInputID(id) + _, err := inputs.ParseInputID(id) return err - }, importStreamAnalyticsReferenceInput(streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob)), + }, importStreamAnalyticsReferenceInput("Microsoft.Storage/Blob")), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -112,26 +112,52 @@ func resourceStreamAnalyticsReferenceInputBlobCreate(d *pluginsdk.ResourceData, defer cancel() log.Printf("[INFO] preparing arguments for Azure Stream Analytics Reference Input Blob creation.") - id := parse.NewStreamInputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := inputs.NewInputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { + if !response.WasNotFound(existing.HttpResponse) { return tf.ImportAsExistsError("azurerm_stream_analytics_reference_input_blob", id.ID()) } } - props, err := getBlobReferenceInputProps(d) + serializationRaw := d.Get("serialization").([]interface{}) + serialization, err := expandStreamAnalyticsStreamInputSerialization(serializationRaw) if err != nil { - return fmt.Errorf("creating the input props for resource creation: %v", err) + fmt.Errorf("expanding `serialization`: %+v", err) } - if _, err := client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.InputName, "", ""); err != nil { + props := inputs.Input{ + Name: utils.String(id.InputName), + Properties: &inputs.ReferenceInputProperties{ + //Type: streamanalytics.TypeBasicInputPropertiesTypeReference, + Datasource: &inputs.BlobReferenceInputDataSource{ + //Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, + Properties: &inputs.BlobDataSourceProperties{ + Container: utils.String(d.Get("storage_container_name").(string)), + DateFormat: utils.String(d.Get("date_format").(string)), + PathPattern: utils.String(d.Get("path_pattern").(string)), + TimeFormat: utils.String(d.Get("time_format").(string)), + StorageAccounts: &[]inputs.StorageAccount{ + { + AccountName: utils.String(d.Get("storage_account_name").(string)), + AccountKey: utils.String(d.Get("storage_account_key").(string)), + }, + }, + AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), + }, + }, + Serialization: serialization, + }, + } + + var opts inputs.CreateOrReplaceOperationOptions + if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -145,17 +171,44 @@ func resourceStreamAnalyticsReferenceInputBlobUpdate(d *pluginsdk.ResourceData, defer cancel() log.Printf("[INFO] preparing arguments for Azure Stream Analytics Reference Input Blob update.") - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - props, err := getBlobReferenceInputProps(d) + serializationRaw := d.Get("serialization").([]interface{}) + serialization, err := expandStreamAnalyticsStreamInputSerialization(serializationRaw) if err != nil { - return fmt.Errorf("creating the input props for resource update: %v", err) + fmt.Errorf("expanding `serialization`: %+v", err) } - if _, err := client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.InputName, ""); err != nil { + // TODO d.HasChanges() + props := inputs.Input{ + Name: utils.String(id.InputName), + Properties: &inputs.ReferenceInputProperties{ + //Type: streamanalytics.TypeBasicInputPropertiesTypeReference, + Datasource: &inputs.BlobReferenceInputDataSource{ + //Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, + Properties: &inputs.BlobDataSourceProperties{ + Container: utils.String(d.Get("storage_container_name").(string)), + DateFormat: utils.String(d.Get("date_format").(string)), + PathPattern: utils.String(d.Get("path_pattern").(string)), + TimeFormat: utils.String(d.Get("time_format").(string)), + StorageAccounts: &[]inputs.StorageAccount{ + { + AccountName: utils.String(d.Get("storage_account_name").(string)), + AccountKey: utils.String(d.Get("storage_account_key").(string)), + }, + }, + AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), + }, + }, + Serialization: serialization, + }, + } + + var opts inputs.UpdateOperationOptions + if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -167,14 +220,14 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) d.SetId("") return nil @@ -184,33 +237,65 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me } d.Set("name", id.InputName) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupNameName) + + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + input, ok := props.(inputs.Input) + if !ok { + return fmt.Errorf("blah") + } - if props := resp.Properties; props != nil { - v, ok := props.AsReferenceInputProperties() - if !ok { - return fmt.Errorf("converting Reference Input Blob to a Reference Input: %+v", err) - } + dataSource, ok := input.Properties.(inputs.ReferenceInputProperties) + if !ok { + return fmt.Errorf("blah2") + } - blobInputDataSource, ok := v.Datasource.AsBlobReferenceInputDataSource() - if !ok { - return fmt.Errorf("converting Reference Input Blob to an Blob Stream Input: %+v", err) - } + referenceInputBlob, ok := dataSource.Datasource.(inputs.BlobDataSourceProperties) + if !ok { + return fmt.Errorf("blah3") + } - d.Set("date_format", blobInputDataSource.DateFormat) - d.Set("path_pattern", blobInputDataSource.PathPattern) - d.Set("storage_container_name", blobInputDataSource.Container) - d.Set("time_format", blobInputDataSource.TimeFormat) - d.Set("authentication_mode", blobInputDataSource.AuthenticationMode) + dateFormat := "" + if v := referenceInputBlob.DateFormat; v != nil { + dateFormat = *v + } + d.Set("date_format", dateFormat) - if accounts := blobInputDataSource.StorageAccounts; accounts != nil && len(*accounts) > 0 { - account := (*accounts)[0] - d.Set("storage_account_name", account.AccountName) - } + pathPattern := "" + if v := referenceInputBlob.PathPattern; v != nil { + pathPattern = *v + } + d.Set("path_pattern", pathPattern) + + containerName := "" + if v := referenceInputBlob.Container; v != nil { + containerName = *v + } + d.Set("storage_container_name", containerName) + + timeFormat := "" + if v := referenceInputBlob.TimeFormat; v != nil { + timeFormat = *v + } + d.Set("time_format", timeFormat) + + authMode := "" + if v := referenceInputBlob.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) + + if accounts := referenceInputBlob.StorageAccounts; accounts != nil && len(*accounts) > 0 { + account := (*accounts)[0] + d.Set("storage_account_name", account.AccountName) + } + + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(dataSource.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(v.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) } } @@ -222,59 +307,16 @@ func resourceStreamAnalyticsReferenceInputBlobDelete(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } return nil } - -func getBlobReferenceInputProps(d *pluginsdk.ResourceData) (streamanalytics.Input, error) { - name := d.Get("name").(string) - containerName := d.Get("storage_container_name").(string) - dateFormat := d.Get("date_format").(string) - pathPattern := d.Get("path_pattern").(string) - storageAccountKey := d.Get("storage_account_key").(string) - storageAccountName := d.Get("storage_account_name").(string) - timeFormat := d.Get("time_format").(string) - authenticationMode := d.Get("authentication_mode").(string) - - serializationRaw := d.Get("serialization").([]interface{}) - serialization, err := expandStreamAnalyticsStreamInputSerialization(serializationRaw) - if err != nil { - return streamanalytics.Input{}, fmt.Errorf("expanding `serialization`: %+v", err) - } - - props := streamanalytics.Input{ - Name: utils.String(name), - Properties: &streamanalytics.ReferenceInputProperties{ - Type: streamanalytics.TypeBasicInputPropertiesTypeReference, - Datasource: &streamanalytics.BlobReferenceInputDataSource{ - Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, - BlobReferenceInputDataSourceProperties: &streamanalytics.BlobReferenceInputDataSourceProperties{ - Container: utils.String(containerName), - DateFormat: utils.String(dateFormat), - PathPattern: utils.String(pathPattern), - TimeFormat: utils.String(timeFormat), - StorageAccounts: &[]streamanalytics.StorageAccount{ - { - AccountName: utils.String(storageAccountName), - AccountKey: utils.String(storageAccountKey), - }, - }, - AuthenticationMode: streamanalytics.AuthenticationMode(authenticationMode), - }, - }, - Serialization: serialization, - }, - } - - return props, nil -} diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go index fba70a3420a9..5deb74a7e70f 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -112,17 +113,17 @@ func TestAccStreamAnalyticsReferenceInputBlob_requiresImport(t *testing.T) { } func (r StreamAnalyticsReferenceInputBlobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.StreamInputID(state.ID) + id, err := inputs.ParseInputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.InputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + resp, err := client.StreamAnalytics.InputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving (%s): %+v", *id, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } @@ -133,19 +134,19 @@ func (r StreamAnalyticsReferenceInputBlobResource) avro(data acceptance.TestData %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Avro" + } } `, template, data.RandomInteger) } @@ -156,21 +157,21 @@ func (r StreamAnalyticsReferenceInputBlobResource) csv(data acceptance.TestData) %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Csv" - encoding = "UTF8" - field_delimiter = "," - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Csv" + encoding = "UTF8" + field_delimiter = "," + } } `, template, data.RandomInteger) } @@ -181,20 +182,20 @@ func (r StreamAnalyticsReferenceInputBlobResource) json(data acceptance.TestData %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -205,33 +206,33 @@ func (r StreamAnalyticsReferenceInputBlobResource) updated(data acceptance.TestD %s resource "azurerm_storage_account" "updated" { - name = "acctestsa2%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" + name = "acctestsa2%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" } resource "azurerm_storage_container" "updated" { - name = "example2" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" + name = "example2" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" } resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.updated.name - storage_account_key = azurerm_storage_account.updated.primary_access_key - storage_container_name = azurerm_storage_container.updated.name - path_pattern = "some-other-pattern" - date_format = "yyyy-MM-dd" - time_format = "HH" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.updated.name + storage_account_key = azurerm_storage_account.updated.primary_access_key + storage_container_name = azurerm_storage_container.updated.name + path_pattern = "some-other-pattern" + date_format = "yyyy-MM-dd" + time_format = "HH" + + serialization { + type = "Avro" + } } `, template, data.RandomString, data.RandomInteger) } @@ -242,21 +243,21 @@ func (r StreamAnalyticsReferenceInputBlobResource) authenticationMode(data accep %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - authentication_mode = "Msi" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + authentication_mode = "Msi" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -267,22 +268,22 @@ func (r StreamAnalyticsReferenceInputBlobResource) requiresImport(data acceptanc %s resource "azurerm_stream_analytics_reference_input_blob" "import" { - name = azurerm_stream_analytics_reference_input_blob.test.name - stream_analytics_job_name = azurerm_stream_analytics_reference_input_blob.test.stream_analytics_job_name - resource_group_name = azurerm_stream_analytics_reference_input_blob.test.resource_group_name - storage_account_name = azurerm_stream_analytics_reference_input_blob.test.storage_account_name - storage_account_key = azurerm_stream_analytics_reference_input_blob.test.storage_account_key - storage_container_name = azurerm_stream_analytics_reference_input_blob.test.storage_container_name - path_pattern = azurerm_stream_analytics_reference_input_blob.test.path_pattern - date_format = azurerm_stream_analytics_reference_input_blob.test.date_format - time_format = azurerm_stream_analytics_reference_input_blob.test.time_format - dynamic "serialization" { - for_each = azurerm_stream_analytics_reference_input_blob.test.serialization - content { - encoding = lookup(serialization.value, "encoding", null) - type = serialization.value.type - } - } + name = azurerm_stream_analytics_reference_input_blob.test.name + stream_analytics_job_name = azurerm_stream_analytics_reference_input_blob.test.stream_analytics_job_name + resource_group_name = azurerm_stream_analytics_reference_input_blob.test.resource_group_name + storage_account_name = azurerm_stream_analytics_reference_input_blob.test.storage_account_name + storage_account_key = azurerm_stream_analytics_reference_input_blob.test.storage_account_key + storage_container_name = azurerm_stream_analytics_reference_input_blob.test.storage_container_name + path_pattern = azurerm_stream_analytics_reference_input_blob.test.path_pattern + date_format = azurerm_stream_analytics_reference_input_blob.test.date_format + time_format = azurerm_stream_analytics_reference_input_blob.test.time_format + dynamic "serialization" { + for_each = azurerm_stream_analytics_reference_input_blob.test.serialization + content { + encoding = lookup(serialization.value, "encoding", null) + type = serialization.value.type + } + } } `, template) } @@ -290,44 +291,44 @@ resource "azurerm_stream_analytics_reference_input_blob" "import" { func (r StreamAnalyticsReferenceInputBlobResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { - features {} + features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" + name = "acctestRG-%d" + location = "%s" } resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" } resource "azurerm_storage_container" "test" { - name = "example" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" + name = "example" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" } resource "azurerm_stream_analytics_job" "test" { - name = "acctestjob-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - compatibility_level = "1.0" - data_locale = "en-GB" - events_late_arrival_max_delay_in_seconds = 60 - events_out_of_order_max_delay_in_seconds = 50 - events_out_of_order_policy = "Adjust" - output_error_policy = "Drop" - streaming_units = 3 - - transformation_query = < 0 { - account := (*accounts)[0] - d.Set("storage_account_name", account.AccountName) - } + streamBlobInput, ok := streamInput.Datasource.(inputs.BlobStreamInputDataSource) + if !ok { + return fmt.Errorf("converting Stream Input Blob to an Stream Input: %+v", err) + } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(v.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + if streamBlobInputProps := streamBlobInput.Properties; streamBlobInputProps != nil { + dateFormat := "" + if v := streamBlobInput.Properties.DateFormat; v != nil { + dateFormat = *v + } + d.Set("date_format", dateFormat) + + pathPattern := "" + if v := streamBlobInputProps.PathPattern; v != nil { + pathPattern = *v + } + d.Set("path_pattern", pathPattern) + + containerName := "" + if v := streamBlobInputProps.Container; v != nil { + containerName = *v + } + d.Set("storage_container_name", containerName) + + timeFormat := "" + if v := streamBlobInputProps.TimeFormat; v != nil { + timeFormat = *v + } + d.Set("time_format", timeFormat) + + if accounts := streamBlobInputProps.StorageAccounts; accounts != nil && len(*accounts) > 0 { + account := (*accounts)[0] + d.Set("storage_account_name", account.AccountName) + } + + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(streamInput.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } + } } } @@ -223,13 +252,13 @@ func resourceStreamAnalyticsStreamInputBlobDelete(d *pluginsdk.ResourceData, met ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go index 00ec315c208b..ff2f88479dc3 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -96,16 +98,17 @@ func TestAccStreamAnalyticsStreamInputBlob_requiresImport(t *testing.T) { } func (r StreamAnalyticsStreamInputBlobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - name := state.Attributes["name"] - jobName := state.Attributes["stream_analytics_job_name"] - resourceGroup := state.Attributes["resource_group_name"] + id, err := inputs.ParseInputID(state.ID) + if err != nil { + return utils.Bool(false), err + } - resp, err := client.StreamAnalytics.InputsClient.Get(ctx, resourceGroup, jobName, name) + resp, err := client.StreamAnalytics.InputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving Stream Output %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go index ea0b298942cb..36f5b1868369 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go @@ -2,6 +2,7 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" @@ -10,7 +11,6 @@ import ( "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -24,7 +24,7 @@ func resourceStreamAnalyticsStreamInputEventHub() *pluginsdk.Resource { Update: resourceStreamAnalyticsStreamInputEventHubCreateUpdate, Delete: resourceStreamAnalyticsStreamInputEventHubDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.StreamInputID(id) + _, err := inputs.ParseInputID(id) return err }), @@ -111,17 +111,17 @@ func resourceStreamAnalyticsStreamInputEventHubCreateUpdate(d *pluginsdk.Resourc defer cancel() log.Printf("[INFO] preparing arguments for Azure Stream Analytics Stream Input EventHub creation.") - resourceId := parse.NewStreamInputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := inputs.NewInputID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { - existing, err := client.Get(ctx, resourceId.ResourceGroup, resourceId.StreamingjobName, resourceId.InputName) + existing, err := client.Get(ctx, id) if err != nil { - if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing %s: %+v", resourceId, err) + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } - if !utils.ResponseWasNotFound(existing.Response) { - return tf.ImportAsExistsError("azurerm_stream_analytics_stream_input_eventhub", resourceId.ID()) + if !response.WasNotFound(existing.HttpResponse) { + return tf.ImportAsExistsError("azurerm_stream_analytics_stream_input_eventhub", id.ID()) } } @@ -131,11 +131,11 @@ func resourceStreamAnalyticsStreamInputEventHubCreateUpdate(d *pluginsdk.Resourc return fmt.Errorf("expanding `serialization`: %+v", err) } - eventHubDataSourceProps := &streamanalytics.EventHubStreamInputDataSourceProperties{ + eventHubDataSourceProps := &inputs.EventHubStreamInputDataSourceProperties{ EventHubName: utils.String(d.Get("eventhub_name").(string)), ServiceBusNamespace: utils.String(d.Get("servicebus_namespace").(string)), ConsumerGroupName: utils.String(d.Get("eventhub_consumer_group_name").(string)), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), } if v, ok := d.GetOk("shared_access_policy_key"); ok { @@ -146,27 +146,29 @@ func resourceStreamAnalyticsStreamInputEventHubCreateUpdate(d *pluginsdk.Resourc eventHubDataSourceProps.SharedAccessPolicyName = utils.String(v.(string)) } - props := streamanalytics.Input{ - Name: utils.String(resourceId.InputName), - Properties: &streamanalytics.StreamInputProperties{ - Type: streamanalytics.TypeBasicInputPropertiesTypeStream, - Datasource: &streamanalytics.EventHubStreamInputDataSource{ - Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, - EventHubStreamInputDataSourceProperties: eventHubDataSourceProps, + props := inputs.Input{ + Name: utils.String(id.InputName), + Properties: &inputs.StreamInputProperties{ + //Type: streamanalytics.TypeBasicInputPropertiesTypeStream, + Datasource: &inputs.EventHubStreamInputDataSource{ + //Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, + Properties: eventHubDataSourceProps, }, Serialization: serialization, PartitionKey: utils.String(d.Get("partition_key").(string)), }, } + var createOpts inputs.CreateOrReplaceOperationOptions + var updateOpts inputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, props, resourceId.ResourceGroup, resourceId.StreamingjobName, resourceId.InputName, "", ""); err != nil { - return fmt.Errorf("creating %s: %+v", resourceId, err) + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { + return fmt.Errorf("creating %s: %+v", id, err) } - d.SetId(resourceId.ID()) - } else if _, err := client.Update(ctx, props, resourceId.ResourceGroup, resourceId.StreamingjobName, resourceId.InputName, ""); err != nil { - return fmt.Errorf("updating %s: %+v", resourceId, err) + d.SetId(id.ID()) + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { + return fmt.Errorf("updating %s: %+v", id, err) } return resourceStreamAnalyticsStreamInputEventHubRead(d, meta) @@ -177,14 +179,14 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + resp, err := client.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) d.SetId("") return nil @@ -194,47 +196,67 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m } d.Set("name", id.InputName) - d.Set("stream_analytics_job_name", id.StreamingjobName) - d.Set("resource_group_name", id.ResourceGroup) - - if props := resp.Properties; props != nil { - v, ok := props.AsStreamInputProperties() - if !ok { - return fmt.Errorf("converting Stream Input EventHub to an Stream Input: %+v", err) - } - - eventHub, ok := v.Datasource.AsEventHubStreamInputDataSource() - if !ok { - return fmt.Errorf("converting Stream Input EventHub to an EventHub Stream Input: %+v", err) - } - - d.Set("eventhub_name", eventHub.EventHubName) - d.Set("servicebus_namespace", eventHub.ServiceBusNamespace) - d.Set("authentication_mode", eventHub.AuthenticationMode) - - consumerGroupName := "" - if eventHub.ConsumerGroupName != nil { - consumerGroupName = *eventHub.ConsumerGroupName - } - - d.Set("eventhub_consumer_group_name", consumerGroupName) - - sharedAccessPolicyName := "" - if eventHub.SharedAccessPolicyName != nil { - sharedAccessPolicyName = *eventHub.SharedAccessPolicyName - } - - d.Set("shared_access_policy_name", sharedAccessPolicyName) + d.Set("stream_analytics_job_name", id.JobName) + d.Set("resource_group_name", id.ResourceGroupNameName) + + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + input, ok := props.(inputs.InputProperties) + if !ok { + return fmt.Errorf("failed to convert to Input") + } - partitionKey := "" - if v.PartitionKey != nil { - partitionKey = *v.PartitionKey - } + streamInput, ok := input.(inputs.StreamInputProperties) + if !ok { + return fmt.Errorf("failed to convert to Stream Input") + } - d.Set("partition_key", partitionKey) + streamEventHubInput, ok := streamInput.Datasource.(inputs.EventHubStreamInputDataSource) + if !ok { + return fmt.Errorf("failed to convert to an Event Hub Stream Input") + } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(v.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + if streamEventHubInputProps := streamEventHubInput.Properties; streamEventHubInputProps != nil { + eventHubName := "" + if v := streamEventHubInputProps.EventHubName; v != nil { + eventHubName = *v + } + d.Set("eventhub_name", eventHubName) + + serviceBusNameSpace := "" + if v := streamEventHubInputProps.ServiceBusNamespace; v != nil { + serviceBusNameSpace = *v + } + d.Set("servicebus_namespace", serviceBusNameSpace) + + authMode := "" + if v := streamEventHubInputProps.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("eventhub_name", authMode) + + consumerGroupName := "" + if v := streamEventHubInputProps.ConsumerGroupName; v != nil { + consumerGroupName = *v + } + d.Set("eventhub_consumer_group_name", consumerGroupName) + + sharedAccessPolicyName := "" + if v := streamEventHubInputProps.SharedAccessPolicyName; v != nil { + sharedAccessPolicyName = string(*v) + } + d.Set("shared_access_policy_name", sharedAccessPolicyName) + + partitionKey := "" + if v := streamInput.PartitionKey; v != nil { + partitionKey = *v + } + d.Set("partition_key", partitionKey) + + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(streamInput.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } + } } } @@ -246,13 +268,13 @@ func resourceStreamAnalyticsStreamInputEventHubDelete(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamInputID(d.Id()) + id, err := inputs.ParseInputID(d.Id()) if err != nil { return err } - if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName); err != nil { - if !response.WasNotFound(resp.Response) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go index a32e586fba4a..80fe913aff26 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -158,17 +159,17 @@ func TestAccStreamAnalyticsStreamInputEventHub_msiWithoutSharedAccessPolicy(t *t } func (r StreamAnalyticsStreamInputEventHubResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.StreamInputID(state.ID) + id, err := inputs.ParseInputID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.InputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.InputName) + resp, err := client.StreamAnalytics.InputsClient.Get(ctx, *id) if err != nil { - if utils.ResponseWasNotFound(resp.Response) { + if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil } - return nil, fmt.Errorf("retrieving (%s): %+v", *id, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } return utils.Bool(true), nil } @@ -179,18 +180,18 @@ func (r StreamAnalyticsStreamInputEventHubResource) avro(data acceptance.TestDat %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key - shared_access_policy_name = "RootManageSharedAccessKey" - partition_key = "partitionKey" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key + shared_access_policy_name = "RootManageSharedAccessKey" + partition_key = "partitionKey" + + serialization { + type = "Avro" + } } `, template, data.RandomInteger) } @@ -201,21 +202,21 @@ func (r StreamAnalyticsStreamInputEventHubResource) csv(data acceptance.TestData %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key - shared_access_policy_name = "RootManageSharedAccessKey" - partition_key = "partitionKey" - - serialization { - type = "Csv" - encoding = "UTF8" - field_delimiter = "," - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key + shared_access_policy_name = "RootManageSharedAccessKey" + partition_key = "partitionKey" + + serialization { + type = "Csv" + encoding = "UTF8" + field_delimiter = "," + } } `, template, data.RandomInteger) } @@ -226,20 +227,20 @@ func (r StreamAnalyticsStreamInputEventHubResource) json(data acceptance.TestDat %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key - shared_access_policy_name = "RootManageSharedAccessKey" - partition_key = "partitionKey" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key + shared_access_policy_name = "RootManageSharedAccessKey" + partition_key = "partitionKey" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -250,18 +251,18 @@ func (r StreamAnalyticsStreamInputEventHubResource) jsonNoOptional(data acceptan %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key - shared_access_policy_name = "RootManageSharedAccessKey" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key + shared_access_policy_name = "RootManageSharedAccessKey" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -272,42 +273,42 @@ func (r StreamAnalyticsStreamInputEventHubResource) updated(data acceptance.Test %s resource "azurerm_eventhub_namespace" "updated" { - name = "acctestehn2-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = 1 + name = "acctestehn2-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = 1 } resource "azurerm_eventhub" "updated" { - name = "acctesteh2-%d" - namespace_name = azurerm_eventhub_namespace.updated.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 1 + name = "acctesteh2-%d" + namespace_name = azurerm_eventhub_namespace.updated.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 } resource "azurerm_eventhub_consumer_group" "updated" { - name = "acctesteventhubcg2-%d" - namespace_name = azurerm_eventhub_namespace.updated.name - eventhub_name = azurerm_eventhub.updated.name - resource_group_name = azurerm_resource_group.test.name + name = "acctesteventhubcg2-%d" + namespace_name = azurerm_eventhub_namespace.updated.name + eventhub_name = azurerm_eventhub.updated.name + resource_group_name = azurerm_resource_group.test.name } resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_consumer_group_name = azurerm_eventhub_consumer_group.updated.name - eventhub_name = azurerm_eventhub.updated.name - servicebus_namespace = azurerm_eventhub_namespace.updated.name - shared_access_policy_key = azurerm_eventhub_namespace.updated.default_primary_key - shared_access_policy_name = "RootManageSharedAccessKey" - partition_key = "updatedPartitionKey" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_consumer_group_name = azurerm_eventhub_consumer_group.updated.name + eventhub_name = azurerm_eventhub.updated.name + servicebus_namespace = azurerm_eventhub_namespace.updated.name + shared_access_policy_key = azurerm_eventhub_namespace.updated.default_primary_key + shared_access_policy_name = "RootManageSharedAccessKey" + partition_key = "updatedPartitionKey" + + serialization { + type = "Avro" + } } `, template, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } @@ -318,21 +319,21 @@ func (r StreamAnalyticsStreamInputEventHubResource) requiresImport(data acceptan %s resource "azurerm_stream_analytics_stream_input_eventhub" "import" { - name = azurerm_stream_analytics_stream_input_eventhub.test.name - stream_analytics_job_name = azurerm_stream_analytics_stream_input_eventhub.test.stream_analytics_job_name - resource_group_name = azurerm_stream_analytics_stream_input_eventhub.test.resource_group_name - eventhub_consumer_group_name = azurerm_stream_analytics_stream_input_eventhub.test.eventhub_consumer_group_name - eventhub_name = azurerm_stream_analytics_stream_input_eventhub.test.eventhub_name - servicebus_namespace = azurerm_stream_analytics_stream_input_eventhub.test.servicebus_namespace - shared_access_policy_key = azurerm_stream_analytics_stream_input_eventhub.test.shared_access_policy_key - shared_access_policy_name = azurerm_stream_analytics_stream_input_eventhub.test.shared_access_policy_name - dynamic "serialization" { - for_each = azurerm_stream_analytics_stream_input_eventhub.test.serialization - content { - encoding = lookup(serialization.value, "encoding", null) - type = serialization.value.type - } - } + name = azurerm_stream_analytics_stream_input_eventhub.test.name + stream_analytics_job_name = azurerm_stream_analytics_stream_input_eventhub.test.stream_analytics_job_name + resource_group_name = azurerm_stream_analytics_stream_input_eventhub.test.resource_group_name + eventhub_consumer_group_name = azurerm_stream_analytics_stream_input_eventhub.test.eventhub_consumer_group_name + eventhub_name = azurerm_stream_analytics_stream_input_eventhub.test.eventhub_name + servicebus_namespace = azurerm_stream_analytics_stream_input_eventhub.test.servicebus_namespace + shared_access_policy_key = azurerm_stream_analytics_stream_input_eventhub.test.shared_access_policy_key + shared_access_policy_name = azurerm_stream_analytics_stream_input_eventhub.test.shared_access_policy_name + dynamic "serialization" { + for_each = azurerm_stream_analytics_stream_input_eventhub.test.serialization + content { + encoding = lookup(serialization.value, "encoding", null) + type = serialization.value.type + } + } } `, template) } @@ -343,21 +344,21 @@ func (r StreamAnalyticsStreamInputEventHubResource) authenticationMode(data acce %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key - shared_access_policy_name = "RootManagedSharedAccessKey" - partition_key = "partitionKey" - authentication_mode = "ConnectionString" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + shared_access_policy_key = azurerm_eventhub_namespace.test.default_primary_key + shared_access_policy_name = "RootManagedSharedAccessKey" + partition_key = "partitionKey" + authentication_mode = "ConnectionString" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -367,18 +368,18 @@ func (r StreamAnalyticsStreamInputEventHubResource) msiWithoutSharedAccessPolicy %s resource "azurerm_stream_analytics_stream_input_eventhub" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name - eventhub_name = azurerm_eventhub.test.name - servicebus_namespace = azurerm_eventhub_namespace.test.name - authentication_mode = "Msi" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + eventhub_consumer_group_name = azurerm_eventhub_consumer_group.test.name + eventhub_name = azurerm_eventhub.test.name + servicebus_namespace = azurerm_eventhub_namespace.test.name + authentication_mode = "Msi" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, r.template(data), data.RandomInteger) } @@ -386,53 +387,53 @@ resource "azurerm_stream_analytics_stream_input_eventhub" "test" { func (r StreamAnalyticsStreamInputEventHubResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { - features {} + features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" + name = "acctestRG-%d" + location = "%s" } resource "azurerm_eventhub_namespace" "test" { - name = "acctestehn-%d" - location = azurerm_resource_group.test.location - resource_group_name = azurerm_resource_group.test.name - sku = "Standard" - capacity = 1 + name = "acctestehn-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + sku = "Standard" + capacity = 1 } resource "azurerm_eventhub" "test" { - name = "acctesteh-%d" - namespace_name = azurerm_eventhub_namespace.test.name - resource_group_name = azurerm_resource_group.test.name - partition_count = 2 - message_retention = 1 + name = "acctesteh-%d" + namespace_name = azurerm_eventhub_namespace.test.name + resource_group_name = azurerm_resource_group.test.name + partition_count = 2 + message_retention = 1 } resource "azurerm_eventhub_consumer_group" "test" { - name = "acctesteventhubcg-%d" - namespace_name = azurerm_eventhub_namespace.test.name - eventhub_name = azurerm_eventhub.test.name - resource_group_name = azurerm_resource_group.test.name + name = "acctesteventhubcg-%d" + namespace_name = azurerm_eventhub_namespace.test.name + eventhub_name = azurerm_eventhub.test.name + resource_group_name = azurerm_resource_group.test.name } resource "azurerm_stream_analytics_job" "test" { - name = "acctestjob-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - compatibility_level = "1.0" - data_locale = "en-GB" - events_late_arrival_max_delay_in_seconds = 60 - events_out_of_order_max_delay_in_seconds = 50 - events_out_of_order_policy = "Adjust" - output_error_policy = "Drop" - streaming_units = 3 - - transformation_query = < Date: Mon, 21 Nov 2022 16:51:33 +0100 Subject: [PATCH 02/14] swap remaining resources --- .../services/streamanalytics/client/client.go | 26 +- .../stream_analytics_cluster_resource.go | 73 ++-- .../stream_analytics_cluster_resource_test.go | 7 +- ...lytics_function_javascript_uda_resource.go | 109 +++--- ...s_function_javascript_uda_resource_test.go | 7 +- ...lytics_function_javascript_udf_resource.go | 98 +++--- ...s_function_javascript_udf_resource_test.go | 3 +- .../stream_analytics_job_data_source.go | 118 +++++-- .../stream_analytics_job_resource.go | 323 +++++++++++------- .../stream_analytics_job_resource_test.go | 3 +- .../stream_analytics_job_schedule_resource.go | 121 ++++--- ...am_analytics_job_schedule_resource_test.go | 9 +- ...ytics_managed_private_endpoint_resource.go | 54 ++- ..._managed_private_endpoint_resource_test.go | 7 +- ...ream_analytics_output_cosmosdb_resource.go | 5 +- ...tream_analytics_output_powerbi_resource.go | 5 +- ...lytics_output_servicebus_topic_resource.go | 91 +++-- ...s_output_servicebus_topic_resource_test.go | 4 +- ...tream_analytics_output_synapse_resource.go | 67 ++-- ..._analytics_output_synapse_resource_test.go | 2 + .../stream_analytics_output_table_resource.go | 130 ++++--- ...am_analytics_output_table_resource_test.go | 2 + ...analytics_reference_input_blob_resource.go | 2 +- ...nalytics_reference_input_mssql_resource.go | 2 +- ...am_analytics_stream_input_blob_resource.go | 2 +- ...nalytics_stream_input_eventhub_resource.go | 2 +- ...ytics_stream_input_eventhub_v2_resource.go | 5 +- ..._analytics_stream_input_iothub_resource.go | 2 +- .../streamanalytics/validate/function_id.go | 4 +- .../validate/streaming_job_id.go | 4 +- 30 files changed, 742 insertions(+), 545 deletions(-) diff --git a/internal/services/streamanalytics/client/client.go b/internal/services/streamanalytics/client/client.go index 7282e643006b..f75f6959d85c 100644 --- a/internal/services/streamanalytics/client/client.go +++ b/internal/services/streamanalytics/client/client.go @@ -1,27 +1,31 @@ package client import ( - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations" "github.com/hashicorp/terraform-provider-azurerm/internal/common" ) type Client struct { - FunctionsClient *streamanalytics.FunctionsClient - JobsClient *streamanalytics.StreamingJobsClient + FunctionsClient *functions.FunctionsClient + JobsClient *streamingjobs.StreamingJobsClient InputsClient *inputs.InputsClient OutputsClient *outputs.OutputsClient - TransformationsClient *streamanalytics.TransformationsClient - ClustersClient *streamanalytics.ClustersClient - EndpointsClient *streamanalytics.PrivateEndpointsClient + TransformationsClient *transformations.TransformationsClient + ClustersClient *clusters.ClustersClient + EndpointsClient *privateendpoints.PrivateEndpointsClient } func NewClient(o *common.ClientOptions) *Client { - functionsClient := streamanalytics.NewFunctionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + functionsClient := functions.NewFunctionsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&functionsClient.Client, o.ResourceManagerAuthorizer) - jobsClient := streamanalytics.NewStreamingJobsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + jobsClient := streamingjobs.NewStreamingJobsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&jobsClient.Client, o.ResourceManagerAuthorizer) inputsClient := inputs.NewInputsClientWithBaseURI(o.ResourceManagerEndpoint) @@ -30,13 +34,13 @@ func NewClient(o *common.ClientOptions) *Client { outputsClient := outputs.NewOutputsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&outputsClient.Client, o.ResourceManagerAuthorizer) - transformationsClient := streamanalytics.NewTransformationsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + transformationsClient := transformations.NewTransformationsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&transformationsClient.Client, o.ResourceManagerAuthorizer) - clustersClient := streamanalytics.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + clustersClient := clusters.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&clustersClient.Client, o.ResourceManagerAuthorizer) - endpointsClient := streamanalytics.NewPrivateEndpointsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + endpointsClient := privateendpoints.NewPrivateEndpointsClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&endpointsClient.Client, o.ResourceManagerAuthorizer) return &Client{ diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource.go b/internal/services/streamanalytics/stream_analytics_cluster_resource.go index 03882e878aaa..f5955eafa8c4 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource.go @@ -3,15 +3,14 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" - "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -23,7 +22,7 @@ type ClusterModel struct { Name string `tfschema:"name"` ResourceGroup string `tfschema:"resource_group_name"` Location string `tfschema:"location"` - StreamingCapacity int32 `tfschema:"streaming_capacity"` + StreamingCapacity int64 `tfschema:"streaming_capacity"` Tags map[string]interface{} `tfschema:"tags"` } @@ -63,7 +62,7 @@ func (r ClusterResource) Arguments() map[string]*pluginsdk.Schema { ), }, - "tags": tags.Schema(), + "tags": commonschema.Tags(), } } @@ -83,9 +82,9 @@ func (r ClusterResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.ClustersClient subscriptionId := metadata.Client.Account.SubscriptionId - id := parse.NewClusterID(subscriptionId, model.ResourceGroup, model.Name) + id := clusters.NewClusterID(subscriptionId, model.ResourceGroup, model.Name) - existing, err := client.Get(ctx, id.ResourceGroupName, id.Name) + existing, err := client.Get(ctx, id) if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } @@ -93,25 +92,21 @@ func (r ClusterResource) Create() sdk.ResourceFunc { return metadata.ResourceRequiresImport(r.ResourceType(), id) } - props := streamanalytics.Cluster{ + props := clusters.Cluster{ Name: utils.String(model.Name), Location: utils.String(model.Location), - Sku: &streamanalytics.ClusterSku{ - Name: streamanalytics.ClusterSkuNameDefault, - Capacity: utils.Int32(model.StreamingCapacity), + Sku: &clusters.ClusterSku{ + Name: utils.ToPtr(clusters.ClusterSkuNameDefault), + Capacity: utils.ToPtr(model.StreamingCapacity), }, Tags: tags.Expand(model.Tags), } - future, err := client.CreateOrUpdate(ctx, props, id.ResourceGroupName, id.Name, "", "") - if err != nil { + var opts clusters.CreateOrUpdateOperationOptions + if err := client.CreateOrUpdateThenPoll(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for creation of %s: %+v", id, err) - } - metadata.SetID(id) return nil @@ -124,12 +119,12 @@ func (r ClusterResource) Read() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.ClustersClient - id, err := parse.ClusterID(metadata.ResourceData.Id()) + id, err := clusters.ParseClusterID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroupName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) @@ -138,11 +133,20 @@ func (r ClusterResource) Read() sdk.ResourceFunc { } state := ClusterModel{ - Name: id.Name, + Name: id.ClusterName, ResourceGroup: id.ResourceGroupName, - Location: *resp.Location, - StreamingCapacity: *resp.Sku.Capacity, - Tags: tags.Flatten(resp.Tags), + StreamingCapacity: *resp.Model.Sku.Capacity, + } + + if model := resp.Model; model != nil { + state.Location = *model.Location + state.Tags = tags.Flatten(model.Tags) + + var capacity int64 + if v := model.Sku.Capacity; v != nil { + capacity = *v + } + state.StreamingCapacity = capacity } return metadata.Encode(&state) @@ -155,15 +159,15 @@ func (r ClusterResource) Delete() sdk.ResourceFunc { Timeout: 90 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.ClustersClient - id, err := parse.ClusterID(metadata.ResourceData.Id()) + id, err := clusters.ParseClusterID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - if resp, err := client.Delete(ctx, id.ResourceGroupName, id.Name); err != nil { - if !response.WasNotFound(resp.HttpResponse()) { + if resp, err := client.Delete(ctx, *id); err != nil { + if !response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("deleting %s: %+v", *id, err) } } @@ -176,7 +180,7 @@ func (r ClusterResource) Update() sdk.ResourceFunc { return sdk.ResourceFunc{ Timeout: 90 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { - id, err := parse.ClusterID(metadata.ResourceData.Id()) + id, err := clusters.ParseClusterID(metadata.ResourceData.Id()) if err != nil { return err } @@ -189,23 +193,18 @@ func (r ClusterResource) Update() sdk.ResourceFunc { } if metadata.ResourceData.HasChange("streaming_capacity") || metadata.ResourceData.HasChange("tags") { - props := streamanalytics.Cluster{ - Sku: &streamanalytics.ClusterSku{ - Capacity: utils.Int32(state.StreamingCapacity), + props := clusters.Cluster{ + Sku: &clusters.ClusterSku{ + Capacity: utils.ToPtr(state.StreamingCapacity), }, Tags: tags.Expand(state.Tags), } - future, err := client.Update(ctx, props, id.ResourceGroupName, id.Name, "") - if err != nil { + var opts clusters.UpdateOperationOptions + if err := client.UpdateThenPoll(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } - - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for update to %s: %+v", *id, err) - } } - return nil }, } diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go index 2c279abdd18d..a5383bba5ac4 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -89,12 +90,12 @@ func TestAccStreamAnalyticsCluster_requiresImport(t *testing.T) { } func (r StreamAnalyticsClusterResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.ClusterID(state.ID) + id, err := clusters.ParseClusterID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.ClustersClient.Get(ctx, id.ResourceGroupName, id.Name) + resp, err := client.StreamAnalytics.ClustersClient.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go index b5d78e3ccb9a..410b5ebdd89e 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go @@ -2,14 +2,14 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -25,7 +25,7 @@ func resourceStreamAnalyticsFunctionUDA() *pluginsdk.Resource { Delete: resourceStreamAnalyticsFunctionUDADelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.FunctionID(id) + _, err := functions.ParseFunctionID(id) return err }), @@ -118,12 +118,12 @@ func resourceStreamAnalyticsFunctionUDACreate(d *pluginsdk.ResourceData, meta in ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - jobId, err := parse.StreamingJobID(d.Get("stream_analytics_job_id").(string)) + jobId, err := streamingjobs.ParseStreamingJobID(d.Get("stream_analytics_job_id").(string)) if err != nil { return err } - id := parse.NewFunctionID(subscriptionId, jobid.ResourceGroupName, jobId.Name, d.Get("name").(string)) + id := functions.NewFunctionID(subscriptionId, jobId.ResourceGroupName, jobId.JobName, d.Get("name").(string)) existing, err := client.Get(ctx, id) if err != nil { @@ -136,13 +136,11 @@ func resourceStreamAnalyticsFunctionUDACreate(d *pluginsdk.ResourceData, meta in return tf.ImportAsExistsError("azurerm_stream_analytics_function_javascript_uda", id.ID()) } - props := streamanalytics.Function{ - Properties: &streamanalytics.AggregateFunctionProperties{ - Type: streamanalytics.TypeBasicFunctionPropertiesTypeAggregate, - FunctionConfiguration: &streamanalytics.FunctionConfiguration{ - Binding: &streamanalytics.JavaScriptFunctionBinding{ - Type: streamanalytics.TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf, - JavaScriptFunctionBindingProperties: &streamanalytics.JavaScriptFunctionBindingProperties{ + props := functions.Function{ + Properties: &functions.AggregateFunctionProperties{ + Properties: &functions.FunctionConfiguration{ + Binding: &functions.JavaScriptFunctionBinding{ + Properties: &functions.JavaScriptFunctionBindingProperties{ Script: utils.String(d.Get("script").(string)), }, }, @@ -152,6 +150,7 @@ func resourceStreamAnalyticsFunctionUDACreate(d *pluginsdk.ResourceData, meta in }, } + var opts functions.CreateOrReplaceOperationOptions if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -166,12 +165,12 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FunctionID(d.Id()) + id, err := functions.ParseFunctionID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %q was not found - removing from state!", *id) @@ -182,35 +181,38 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("retrieving %s: %+v", *id, err) } - d.Set("name", id.Name) + d.Set("name", id.FunctionName) - jobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + jobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) d.Set("stream_analytics_job_id", jobId.ID()) - if props := resp.Properties; props != nil { - aggregateProps, ok := props.AsAggregateFunctionProperties() - if !ok { - return fmt.Errorf("converting Props to a Aggregate Function") - } - - binding, ok := aggregateProps.Binding.AsJavaScriptFunctionBinding() - if !ok { - return fmt.Errorf("converting Binding to a JavaScript Function Binding") - } - - if bindingProps := binding.JavaScriptFunctionBindingProperties; bindingProps != nil { - d.Set("script", bindingProps.Script) - } - - if err := d.Set("input", flattenStreamAnalyticsFunctionUDAInputs(aggregateProps.Inputs)); err != nil { - return fmt.Errorf("flattening `input`: %+v", err) - } - - if err := d.Set("output", flattenStreamAnalyticsFunctionUDAOutput(aggregateProps.Output)); err != nil { - return fmt.Errorf("flattening `output`: %+v", err) + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + function, ok := props.(functions.AggregateFunctionProperties) + if !ok { + return fmt.Errorf("converting to an Aggregate Function") + } + + binding, ok := function.Properties.Binding.(functions.JavaScriptFunctionBindingProperties) + if !ok { + return fmt.Errorf("converting Binding to a JavaScript Function Binding") + } + + script := "" + if v := binding.Script; v != nil { + script = *v + } + d.Set("script", script) + + if err := d.Set("input", flattenStreamAnalyticsFunctionUDAInputs(function.Properties.Inputs)); err != nil { + return fmt.Errorf("flattening `input`: %+v", err) + } + + if err := d.Set("output", flattenStreamAnalyticsFunctionUDAOutput(function.Properties.Output)); err != nil { + return fmt.Errorf("flattening `output`: %+v", err) + } } } - return nil } @@ -219,18 +221,16 @@ func resourceStreamAnalyticsFunctionUDAUpdate(d *pluginsdk.ResourceData, meta in ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FunctionID(d.Id()) + id, err := functions.ParseFunctionID(d.Id()) if err != nil { return err } - props := streamanalytics.Function{ - Properties: &streamanalytics.AggregateFunctionProperties{ - Type: streamanalytics.TypeBasicFunctionPropertiesTypeAggregate, - FunctionConfiguration: &streamanalytics.FunctionConfiguration{ - Binding: &streamanalytics.JavaScriptFunctionBinding{ - Type: streamanalytics.TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf, - JavaScriptFunctionBindingProperties: &streamanalytics.JavaScriptFunctionBindingProperties{ + props := functions.Function{ + Properties: &functions.AggregateFunctionProperties{ + Properties: &functions.FunctionConfiguration{ + Binding: &functions.JavaScriptFunctionBinding{ + Properties: &functions.JavaScriptFunctionBindingProperties{ Script: utils.String(d.Get("script").(string)), }, }, @@ -240,6 +240,7 @@ func resourceStreamAnalyticsFunctionUDAUpdate(d *pluginsdk.ResourceData, meta in }, } + var opts functions.UpdateOperationOptions if _, err := client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -252,7 +253,7 @@ func resourceStreamAnalyticsFunctionUDADelete(d *pluginsdk.ResourceData, meta in ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FunctionID(d.Id()) + id, err := functions.ParseFunctionID(d.Id()) if err != nil { return err } @@ -266,13 +267,13 @@ func resourceStreamAnalyticsFunctionUDADelete(d *pluginsdk.ResourceData, meta in return nil } -func expandStreamAnalyticsFunctionUDAInputs(input []interface{}) *[]streamanalytics.FunctionInput { - outputs := make([]streamanalytics.FunctionInput, 0) +func expandStreamAnalyticsFunctionUDAInputs(input []interface{}) *[]functions.FunctionInput { + outputs := make([]functions.FunctionInput, 0) for _, raw := range input { v := raw.(map[string]interface{}) variableType := v["type"].(string) - outputs = append(outputs, streamanalytics.FunctionInput{ + outputs = append(outputs, functions.FunctionInput{ DataType: utils.String(variableType), IsConfigurationParameter: utils.Bool(v["configuration_parameter"].(bool)), }) @@ -281,7 +282,7 @@ func expandStreamAnalyticsFunctionUDAInputs(input []interface{}) *[]streamanalyt return &outputs } -func flattenStreamAnalyticsFunctionUDAInputs(input *[]streamanalytics.FunctionInput) []interface{} { +func flattenStreamAnalyticsFunctionUDAInputs(input *[]functions.FunctionInput) []interface{} { if input == nil { return []interface{}{} } @@ -308,16 +309,16 @@ func flattenStreamAnalyticsFunctionUDAInputs(input *[]streamanalytics.FunctionIn return outputs } -func expandStreamAnalyticsFunctionUDAOutput(input []interface{}) *streamanalytics.FunctionOutput { +func expandStreamAnalyticsFunctionUDAOutput(input []interface{}) *functions.FunctionOutput { output := input[0].(map[string]interface{}) dataType := output["type"].(string) - return &streamanalytics.FunctionOutput{ + return &functions.FunctionOutput{ DataType: utils.String(dataType), } } -func flattenStreamAnalyticsFunctionUDAOutput(input *streamanalytics.FunctionOutput) []interface{} { +func flattenStreamAnalyticsFunctionUDAOutput(input *functions.FunctionOutput) []interface{} { if input == nil { return []interface{}{} } diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go index 5abf4558b213..3ee37f911af2 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -119,12 +120,12 @@ func TestAccStreamAnalyticsFunctionJavaScriptUDA_isConfigurationParameter(t *tes } func (r StreamAnalyticsFunctionJavaScriptUDAResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.FunctionID(state.ID) + id, err := functions.ParseFunctionID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.Functionsclient.Get(ctx, id) + resp, err := client.StreamAnalytics.FunctionsClient.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go index 784df9d8db46..5637ee5691d7 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go @@ -2,15 +2,14 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -24,7 +23,7 @@ func resourceStreamAnalyticsFunctionUDF() *pluginsdk.Resource { Update: resourceStreamAnalyticsFunctionUDFCreateUpdate, Delete: resourceStreamAnalyticsFunctionUDFDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.FunctionID(id) + _, err := functions.ParseFunctionID(id) return err }), @@ -120,7 +119,7 @@ func resourceStreamAnalyticsFunctionUDFCreateUpdate(d *pluginsdk.ResourceData, m ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewFunctionID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) + id := functions.NewFunctionID(subscriptionId, d.Get("resource_group_name").(string), d.Get("stream_analytics_job_name").(string), d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id) if err != nil { @@ -134,21 +133,15 @@ func resourceStreamAnalyticsFunctionUDFCreateUpdate(d *pluginsdk.ResourceData, m } } - script := d.Get("script").(string) - inputsRaw := d.Get("input").([]interface{}) - inputs := expandStreamAnalyticsFunctionInputs(inputsRaw) - - outputRaw := d.Get("output").([]interface{}) - output := expandStreamAnalyticsFunctionOutput(outputRaw) - - function := streamanalytics.Function{ - Properties: &streamanalytics.ScalarFunctionProperties{ - Type: streamanalytics.TypeBasicFunctionPropertiesTypeScalar, - FunctionConfiguration: &streamanalytics.FunctionConfiguration{ - Binding: &streamanalytics.JavaScriptFunctionBinding{ - Type: streamanalytics.TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf, - JavaScriptFunctionBindingProperties: &streamanalytics.JavaScriptFunctionBindingProperties{ - Script: utils.String(script), + inputs := expandStreamAnalyticsFunctionInputs(d.Get("input").([]interface{})) + output := expandStreamAnalyticsFunctionOutput(d.Get("output").([]interface{})) + + function := functions.Function{ + Properties: &functions.ScalarFunctionProperties{ + Properties: &functions.FunctionConfiguration{ + Binding: &functions.JavaScriptFunctionBinding{ + Properties: &functions.JavaScriptFunctionBindingProperties{ + Script: utils.String(d.Get("script").(string)), }, }, Inputs: inputs, @@ -157,13 +150,15 @@ func resourceStreamAnalyticsFunctionUDFCreateUpdate(d *pluginsdk.ResourceData, m }, } + var createOpts functions.CreateOrReplaceOperationOptions + var updateOpts functions.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, function, id.ResourceGroupName, id.JobName, id.Name, "", ""); err != nil { + if _, err := client.CreateOrReplace(ctx, id, function, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, function, id.ResourceGroupName, id.JobName, id.Name, ""); err != nil { + } else if _, err := client.Update(ctx, id, function, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -175,12 +170,12 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FunctionID(d.Id()) + id, err := functions.ParseFunctionID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %q was not found - removing from state!", id) @@ -191,34 +186,37 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", id.Name) + d.Set("name", id.JobName) d.Set("stream_analytics_job_name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.Properties; props != nil { - scalarProps, ok := props.AsScalarFunctionProperties() - if !ok { - return fmt.Errorf("converting Props to a Scalar Function") - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + function, ok := props.(functions.ScalarFunctionProperties) + if !ok { + return fmt.Errorf("converting to Scalar Function") + } - binding, ok := scalarProps.Binding.AsJavaScriptFunctionBinding() - if !ok { - return fmt.Errorf("converting Binding to a JavaScript Function Binding") - } + binding, ok := function.Properties.Binding.(functions.JavaScriptFunctionBindingProperties) + if !ok { + return fmt.Errorf("converting to Binding") + } - if bindingProps := binding.JavaScriptFunctionBindingProperties; bindingProps != nil { - d.Set("script", bindingProps.Script) - } + script := "" + if v := binding.Script; v != nil { + script = *v + } + d.Set("script", script) - if err := d.Set("input", flattenStreamAnalyticsFunctionInputs(scalarProps.Inputs)); err != nil { - return fmt.Errorf("flattening `input`: %+v", err) - } + if err := d.Set("input", flattenStreamAnalyticsFunctionInputs(function.Properties.Inputs)); err != nil { + return fmt.Errorf("flattening `input`: %+v", err) + } - if err := d.Set("output", flattenStreamAnalyticsFunctionOutput(scalarProps.Output)); err != nil { - return fmt.Errorf("flattening `output`: %+v", err) + if err := d.Set("output", flattenStreamAnalyticsFunctionOutput(function.Properties.Output)); err != nil { + return fmt.Errorf("flattening `output`: %+v", err) + } } } - return nil } @@ -227,7 +225,7 @@ func resourceStreamAnalyticsFunctionUDFDelete(d *pluginsdk.ResourceData, meta in ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.FunctionID(d.Id()) + id, err := functions.ParseFunctionID(d.Id()) if err != nil { return err } @@ -241,13 +239,13 @@ func resourceStreamAnalyticsFunctionUDFDelete(d *pluginsdk.ResourceData, meta in return nil } -func expandStreamAnalyticsFunctionInputs(input []interface{}) *[]streamanalytics.FunctionInput { - outputs := make([]streamanalytics.FunctionInput, 0) +func expandStreamAnalyticsFunctionInputs(input []interface{}) *[]functions.FunctionInput { + outputs := make([]functions.FunctionInput, 0) for _, raw := range input { v := raw.(map[string]interface{}) variableType := v["type"].(string) - outputs = append(outputs, streamanalytics.FunctionInput{ + outputs = append(outputs, functions.FunctionInput{ DataType: utils.String(variableType), IsConfigurationParameter: utils.Bool(v["configuration_parameter"].(bool)), }) @@ -256,7 +254,7 @@ func expandStreamAnalyticsFunctionInputs(input []interface{}) *[]streamanalytics return &outputs } -func flattenStreamAnalyticsFunctionInputs(input *[]streamanalytics.FunctionInput) []interface{} { +func flattenStreamAnalyticsFunctionInputs(input *[]functions.FunctionInput) []interface{} { if input == nil { return []interface{}{} } @@ -283,16 +281,16 @@ func flattenStreamAnalyticsFunctionInputs(input *[]streamanalytics.FunctionInput return outputs } -func expandStreamAnalyticsFunctionOutput(input []interface{}) *streamanalytics.FunctionOutput { +func expandStreamAnalyticsFunctionOutput(input []interface{}) *functions.FunctionOutput { output := input[0].(map[string]interface{}) dataType := output["type"].(string) - return &streamanalytics.FunctionOutput{ + return &functions.FunctionOutput{ DataType: utils.String(dataType), } } -func flattenStreamAnalyticsFunctionOutput(input *streamanalytics.FunctionOutput) []interface{} { +func flattenStreamAnalyticsFunctionOutput(input *functions.FunctionOutput) []interface{} { if input == nil { return []interface{}{} } diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go index 9187032720bd..c661039a0963 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -90,7 +89,7 @@ func TestAccStreamAnalyticsFunctionJavaScriptUDF_isConfigurationParameter(t *tes } func (r StreamAnalyticsFunctionJavaScriptUDFResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.FunctionID(state.ID) + id, err := functions.ParseFunctionID(state.ID) if err != nil { return nil, err } diff --git a/internal/services/streamanalytics/stream_analytics_job_data_source.go b/internal/services/streamanalytics/stream_analytics_job_data_source.go index 43328f7ce504..bfb9575ad915 100644 --- a/internal/services/streamanalytics/stream_analytics_job_data_source.go +++ b/internal/services/streamanalytics/stream_analytics_job_data_source.go @@ -2,12 +2,13 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "time" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" ) @@ -101,8 +102,9 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) - resp, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") + id := streamingjobs.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + var opts streamingjobs.GetOperationOptions + resp, err := client.Get(ctx, id, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { return fmt.Errorf("%s was not found", id) @@ -113,42 +115,90 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ d.SetId(id.ID()) - d.Set("name", id.Name) + d.Set("name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) - d.Set("location", location.NormalizeNilable(resp.Location)) - if err := d.Set("identity", flattenJobIdentity(resp.Identity)); err != nil { - return fmt.Errorf("setting `identity`: %v", err) - } - - if props := resp.StreamingJobProperties; props != nil { - d.Set("compatibility_level", string(props.CompatibilityLevel)) - d.Set("data_locale", props.DataLocale) - if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { - d.Set("events_late_arrival_max_delay_in_seconds", int(*v)) - } - if v := props.EventsOutOfOrderMaxDelayInSeconds; v != nil { - d.Set("events_out_of_order_max_delay_in_seconds", int(*v)) - } - d.Set("events_out_of_order_policy", string(props.EventsOutOfOrderPolicy)) - d.Set("job_id", props.JobID) - d.Set("output_error_policy", string(props.OutputErrorPolicy)) - - if v := props.LastOutputEventTime; v != nil { - d.Set("last_output_time", v.String()) + if model := resp.Model; model != nil { + d.Set("location", location.NormalizeNilable(model.Location)) + if err := d.Set("identity", flattenJobIdentity(model.Identity)); err != nil { + return fmt.Errorf("setting `identity`: %v", err) } - if v := props.OutputStartTime; v != nil { - d.Set("start_time", v.String()) - } - - d.Set("start_mode", props.OutputStartMode) - - if props.Transformation != nil && props.Transformation.TransformationProperties != nil { - d.Set("streaming_units", props.Transformation.TransformationProperties.StreamingUnits) - d.Set("transformation_query", props.Transformation.TransformationProperties.Query) + if props := model.Properties; props != nil { + compatibilityLevel := "" + if v := props.CompatibilityLevel; v != nil { + compatibilityLevel = string(*v) + } + d.Set("compatibility_level", compatibilityLevel) + + dataLocale := "" + if v := props.DataLocale; v != nil { + dataLocale = *v + } + d.Set("data_locale", dataLocale) + + var lateArrival int64 + if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { + lateArrival = *v + } + d.Set("events_late_arrival_max_delay_in_seconds", lateArrival) + + var maxDelay int64 + if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { + maxDelay = *v + } + d.Set("events_out_of_order_max_delay_in_seconds", maxDelay) + + orderPolicy := "" + if v := props.EventsOutOfOrderPolicy; v != nil { + orderPolicy = string(*v) + } + d.Set("events_out_of_order_policy", orderPolicy) + + outputPolicy := "" + if v := props.OutputErrorPolicy; v != nil { + outputPolicy = string(*v) + } + d.Set("output_error_policy", outputPolicy) + + lastOutputTime := "" + if v := props.LastOutputEventTime; v != nil { + lastOutputTime = *v + } + d.Set("last_output_time", lastOutputTime) + + startTime := "" + if v := props.OutputStartTime; v != nil { + startTime = *v + } + d.Set("start_time", startTime) + + startMode := "" + if v := props.OutputStartMode; v != nil { + startMode = string(*v) + } + d.Set("start_mode", startMode) + + jobId := "" + if v := props.JobId; v != nil { + jobId = *v + } + d.Set("job_id", jobId) + + if props.Transformation != nil && props.Transformation.Properties != nil { + var streamingUnits int64 + if v := props.Transformation.Properties.StreamingUnits; v != nil { + streamingUnits = *v + } + d.Set("streaming_units", streamingUnits) + + query := "" + if v := props.Transformation.Properties.Query; v != nil { + query = *v + } + d.Set("transformation_query", query) + } } } - return nil } diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index bf9454a55fec..a5c9bc3d8643 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -2,20 +2,21 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/identity" + "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/locks" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" - "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" @@ -29,7 +30,7 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Update: resourceStreamAnalyticsJobCreateUpdate, Delete: resourceStreamAnalyticsJobDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := parse.StreamingJobID(id) + _, err := streamingjobs.ParseStreamingJobID(id) return err }), @@ -64,9 +65,9 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Computed: true, ValidateFunc: validation.StringInSlice([]string{ // values found in the other API the portal uses - string(streamanalytics.CompatibilityLevelOneFullStopZero), + string(streamingjobs.CompatibilityLevelOnePointZero), "1.1", - "1.2", + string(streamingjobs.CompatibilityLevelOnePointTwo), }, false), }, @@ -97,10 +98,10 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Type: pluginsdk.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.EventsOutOfOrderPolicyAdjust), - string(streamanalytics.EventsOutOfOrderPolicyDrop), + string(streamingjobs.EventsOutOfOrderPolicyAdjust), + string(streamingjobs.EventsOutOfOrderPolicyDrop), }, false), - Default: string(streamanalytics.EventsOutOfOrderPolicyAdjust), + Default: string(streamingjobs.EventsOutOfOrderPolicyAdjust), }, "type": { @@ -108,20 +109,20 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Optional: true, ForceNew: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.JobTypeCloud), - string(streamanalytics.JobTypeEdge), + string(streamingjobs.JobTypeCloud), + string(streamingjobs.JobTypeEdge), }, false), - Default: string(streamanalytics.JobTypeCloud), + Default: string(streamingjobs.JobTypeCloud), }, "output_error_policy": { Type: pluginsdk.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.OutputErrorPolicyDrop), - string(streamanalytics.OutputErrorPolicyStop), + string(streamingjobs.OutputErrorPolicyDrop), + string(streamingjobs.OutputErrorPolicyStop), }, false), - Default: string(streamanalytics.OutputErrorPolicyDrop), + Default: string(streamingjobs.OutputErrorPolicyDrop), }, "streaming_units": { @@ -133,10 +134,10 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { "content_storage_policy": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.ContentStoragePolicySystemAccount), + Default: string(streamingjobs.ContentStoragePolicySystemAccount), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.ContentStoragePolicySystemAccount), - string(streamanalytics.ContentStoragePolicyJobStorageAccount), + string(streamingjobs.ContentStoragePolicySystemAccount), + string(streamingjobs.ContentStoragePolicyJobStorageAccount), }, false), }, @@ -149,9 +150,9 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Type: pluginsdk.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeConnectionString), - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeUserToken), + string(streamingjobs.AuthenticationModeConnectionString), + string(streamingjobs.AuthenticationModeMsi), + string(streamingjobs.AuthenticationModeUserToken), }, false), }, @@ -184,7 +185,7 @@ func resourceStreamAnalyticsJob() *pluginsdk.Resource { Computed: true, }, - "tags": tags.Schema(), + "tags": commonschema.Tags(), }, } } @@ -198,13 +199,14 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte log.Printf("[INFO] preparing arguments for Azure Stream Analytics Job creation.") - id := parse.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) + id := streamingjobs.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) locks.ByID(id.ID()) defer locks.UnlockByID(id.ID()) if d.IsNewResource() { - existing, err := client.Get(ctx, id.ResourceGroupName, id.Name, "") + var opts streamingjobs.GetOperationOptions + existing, err := client.Get(ctx, id, opts) if err != nil { if !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) @@ -216,32 +218,24 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte } } - compatibilityLevel := d.Get("compatibility_level").(string) - eventsLateArrivalMaxDelayInSeconds := d.Get("events_late_arrival_max_delay_in_seconds").(int) - eventsOutOfOrderMaxDelayInSeconds := d.Get("events_out_of_order_max_delay_in_seconds").(int) - eventsOutOfOrderPolicy := d.Get("events_out_of_order_policy").(string) - jobType := d.Get("type").(string) - location := azure.NormalizeLocation(d.Get("location").(string)) - outputErrorPolicy := d.Get("output_error_policy").(string) - transformationQuery := d.Get("transformation_query").(string) - contentStoragePolicy := d.Get("content_storage_policy").(string) - t := d.Get("tags").(map[string]interface{}) - // needs to be defined inline for a Create but via a separate API for Update - transformation := streamanalytics.Transformation{ + transformation := streamingjobs.Transformation{ Name: utils.String("main"), - TransformationProperties: &streamanalytics.TransformationProperties{ - Query: utils.String(transformationQuery), + Properties: &streamingjobs.TransformationProperties{ + Query: utils.String(d.Get("transformation_query").(string)), }, } - if jobType == string(streamanalytics.JobTypeEdge) { + contentStoragePolicy := d.Get("content_storage_policy").(string) + jobType := d.Get("type").(string) + + if jobType == string(streamingjobs.JobTypeEdge) { if _, ok := d.GetOk("streaming_units"); ok { return fmt.Errorf("the job type `Edge` doesn't support `streaming_units`") } } else { if v, ok := d.GetOk("streaming_units"); ok { - transformation.TransformationProperties.StreamingUnits = utils.Int32(int32(v.(int))) + transformation.Properties.StreamingUnits = utils.Int64(int64(v.(int))) } else { return fmt.Errorf("`streaming_units` must be set when `type` is `Cloud`") } @@ -252,79 +246,88 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("expanding `identity`: %+v", err) } - props := streamanalytics.StreamingJob{ - Name: utils.String(id.Name), - Location: utils.String(location), - StreamingJobProperties: &streamanalytics.StreamingJobProperties{ - Sku: &streamanalytics.Sku{ - Name: streamanalytics.SkuNameStandard, + props := streamingjobs.StreamingJob{ + Name: utils.String(id.JobName), + Location: utils.String(azure.NormalizeLocation(d.Get("location").(string))), + Properties: &streamingjobs.StreamingJobProperties{ + Sku: &streamingjobs.Sku{ + Name: utils.ToPtr(streamingjobs.SkuNameStandard), }, - ContentStoragePolicy: streamanalytics.ContentStoragePolicy(contentStoragePolicy), - CompatibilityLevel: streamanalytics.CompatibilityLevel(compatibilityLevel), - EventsLateArrivalMaxDelayInSeconds: utils.Int32(int32(eventsLateArrivalMaxDelayInSeconds)), - EventsOutOfOrderMaxDelayInSeconds: utils.Int32(int32(eventsOutOfOrderMaxDelayInSeconds)), - EventsOutOfOrderPolicy: streamanalytics.EventsOutOfOrderPolicy(eventsOutOfOrderPolicy), - OutputErrorPolicy: streamanalytics.OutputErrorPolicy(outputErrorPolicy), - JobType: streamanalytics.JobType(jobType), + ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), + CompatibilityLevel: utils.ToPtr(streamingjobs.CompatibilityLevel(d.Get("compatibility_level").(string))), + EventsLateArrivalMaxDelayInSeconds: utils.Int64(int64(d.Get("events_late_arrival_max_delay_in_seconds").(int))), + EventsOutOfOrderMaxDelayInSeconds: utils.Int64(int64(d.Get("events_out_of_order_max_delay_in_seconds").(int))), + EventsOutOfOrderPolicy: utils.ToPtr(streamingjobs.EventsOutOfOrderPolicy(d.Get("events_out_of_order_policy").(string))), + OutputErrorPolicy: utils.ToPtr(streamingjobs.OutputErrorPolicy(d.Get("output_error_policy").(string))), + JobType: utils.ToPtr(streamingjobs.JobType(jobType)), }, Identity: expandedIdentity, - Tags: tags.Expand(t), + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } - if contentStoragePolicy == string(streamanalytics.ContentStoragePolicyJobStorageAccount) { + if contentStoragePolicy == string(streamingjobs.ContentStoragePolicyJobStorageAccount) { if v, ok := d.GetOk("job_storage_account"); ok { - props.JobStorageAccount = expandJobStorageAccount(v.([]interface{})) + props.Properties.JobStorageAccount = expandJobStorageAccount(v.([]interface{})) } else { return fmt.Errorf("`job_storage_account` must be set when `content_storage_policy` is `JobStorageAccount`") } } - if jobType == string(streamanalytics.JobTypeEdge) { + if jobType == string(streamingjobs.JobTypeEdge) { if _, ok := d.GetOk("stream_analytics_cluster_id"); ok { return fmt.Errorf("the job type `Edge` doesn't support `stream_analytics_cluster_id`") } } else { if streamAnalyticsCluster := d.Get("stream_analytics_cluster_id"); streamAnalyticsCluster != "" { - props.StreamingJobProperties.Cluster = &streamanalytics.ClusterInfo{ - ID: utils.String(streamAnalyticsCluster.(string)), + props.Properties.Cluster = &streamingjobs.ClusterInfo{ + Id: utils.String(streamAnalyticsCluster.(string)), } } else { - props.StreamingJobProperties.Cluster = &streamanalytics.ClusterInfo{ - ID: nil, + props.Properties.Cluster = &streamingjobs.ClusterInfo{ + Id: nil, } } } if dataLocale, ok := d.GetOk("data_locale"); ok { - props.StreamingJobProperties.DataLocale = utils.String(dataLocale.(string)) + props.Properties.DataLocale = utils.String(dataLocale.(string)) } if d.IsNewResource() { - props.StreamingJobProperties.Transformation = &transformation + props.Properties.Transformation = &transformation - future, err := client.CreateOrReplace(ctx, props, id.ResourceGroupName, id.Name, "", "") - if err != nil { + var opts streamingjobs.CreateOrReplaceOperationOptions + if err := client.CreateOrReplaceThenPoll(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for creation of %s: %+v", id, err) - } - d.SetId(id.ID()) } else { - if _, err := client.Update(ctx, props, id.ResourceGroupName, id.Name, ""); err != nil { + var updateOpts streamingjobs.UpdateOperationOptions + if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } - job, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") + var getOpts streamingjobs.GetOperationOptions + job, err := client.Get(ctx, id, getOpts) if err != nil { return err } - if readTransformation := job.Transformation; readTransformation != nil { - if _, err := transformationsClient.Update(ctx, transformation, id.ResourceGroupName, id.Name, *readTransformation.Name, ""); err != nil { - return fmt.Errorf("updating transformation for %s: %+v", id, err) + if job.Model != nil && job.Model.Properties != nil { + if job.Model.Properties.Transformation != nil && job.Model.Properties.Transformation.Name != nil { + transformationId := transformations.NewTransformationID(subscriptionId, id.ResourceGroupName, id.JobName, *transformation.Name) + transformation, err := transformationsClient.Get(ctx, transformationId) + if err != nil { + return fmt.Errorf("retrieving %s: %+v", transformationId, err) + } + + if transformation.Model != nil { + var updateOpts transformations.UpdateOperationOptions + if _, err := transformationsClient.Update(ctx, transformationId, *transformation.Model, updateOpts); err != nil { + return fmt.Errorf("updating transformation for %s: %+v", id, err) + } + } } } } @@ -337,12 +340,13 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamingJobID(d.Id()) + id, err := streamingjobs.ParseStreamingJobID(d.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroupName, id.Name, "transformation") + var opts streamingjobs.GetOperationOptions + resp, err := client.Get(ctx, *id, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) @@ -353,47 +357,113 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) return fmt.Errorf("retrieving %s: %+v", *id, err) } - d.Set("name", id.Name) + d.Set("name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) - if resp.Location != nil { - d.Set("location", azure.NormalizeLocation(*resp.Location)) - } - - if err := d.Set("identity", flattenJobIdentity(resp.Identity)); err != nil { - return fmt.Errorf("setting `identity`: %v", err) - } + if model := resp.Model; model != nil { + d.Set("location", azure.NormalizeLocation(*model.Location)) - if props := resp.StreamingJobProperties; props != nil { - d.Set("compatibility_level", string(props.CompatibilityLevel)) - d.Set("data_locale", props.DataLocale) - if props.EventsLateArrivalMaxDelayInSeconds != nil { - d.Set("events_late_arrival_max_delay_in_seconds", int(*props.EventsLateArrivalMaxDelayInSeconds)) - } - if props.EventsOutOfOrderMaxDelayInSeconds != nil { - d.Set("events_out_of_order_max_delay_in_seconds", int(*props.EventsOutOfOrderMaxDelayInSeconds)) - } - if props.Cluster != nil { - d.Set("stream_analytics_cluster_id", props.Cluster.ID) + if err := d.Set("identity", flattenJobIdentity(model.Identity)); err != nil { + return fmt.Errorf("setting `identity`: %v", err) } - d.Set("events_out_of_order_policy", string(props.EventsOutOfOrderPolicy)) - d.Set("output_error_policy", string(props.OutputErrorPolicy)) - d.Set("type", string(props.JobType)) - d.Set("content_storage_policy", string(props.ContentStoragePolicy)) - d.Set("job_storage_account", flattenJobStorageAccount(d, props.JobStorageAccount)) - - // Computed - d.Set("job_id", props.JobID) - - if transformation := props.Transformation; transformation != nil { - if units := transformation.StreamingUnits; units != nil { - d.Set("streaming_units", int(*units)) + if props := model.Properties; props != nil { + compatibilityLevel := "" + if v := props.CompatibilityLevel; v != nil { + compatibilityLevel = string(*v) + } + d.Set("compatibility_level", compatibilityLevel) + + dataLocale := "" + if v := props.DataLocale; v != nil { + dataLocale = *v + } + d.Set("data_locale", dataLocale) + + var lateArrival int64 + if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { + lateArrival = *v + } + d.Set("events_late_arrival_max_delay_in_seconds", lateArrival) + + var maxDelay int64 + if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { + maxDelay = *v + } + d.Set("events_out_of_order_max_delay_in_seconds", maxDelay) + + orderPolicy := "" + if v := props.EventsOutOfOrderPolicy; v != nil { + orderPolicy = string(*v) } - d.Set("transformation_query", transformation.Query) + d.Set("events_out_of_order_policy", orderPolicy) + + outputPolicy := "" + if v := props.OutputErrorPolicy; v != nil { + outputPolicy = string(*v) + } + d.Set("output_error_policy", outputPolicy) + + lastOutputTime := "" + if v := props.LastOutputEventTime; v != nil { + lastOutputTime = *v + } + d.Set("last_output_time", lastOutputTime) + + startTime := "" + if v := props.OutputStartTime; v != nil { + startTime = *v + } + d.Set("start_time", startTime) + + startMode := "" + if v := props.OutputStartMode; v != nil { + startMode = string(*v) + } + d.Set("start_mode", startMode) + + cluster := "" + if props.Cluster != nil && props.Cluster.Id != nil { + cluster = *props.Cluster.Id + } + d.Set("stream_analytics_cluster_id", cluster) + + jobType := "" + if v := props.JobType; v != nil { + jobType = string(*v) + } + d.Set("type", jobType) + + storagePolicy := "" + if v := props.ContentStoragePolicy; v != nil { + storagePolicy = string(*v) + } + d.Set("content_storage_policy", storagePolicy) + + jobId := "" + if v := props.JobId; v != nil { + jobId = *v + } + d.Set("job_id", jobId) + + d.Set("job_storage_account", flattenJobStorageAccount(d, props.JobStorageAccount)) + + if transformation := props.Transformation; transformation != nil { + var streamingUnits int64 + if v := props.Transformation.Properties.StreamingUnits; v != nil { + streamingUnits = *v + } + d.Set("streaming_units", streamingUnits) + + query := "" + if v := props.Transformation.Properties.Query; v != nil { + query = *v + } + d.Set("transformation_query", query) + } + return tags.FlattenAndSet(d, model.Tags) } } - - return tags.FlattenAndSet(d, resp.Tags) + return nil } func resourceStreamAnalyticsJobDelete(d *pluginsdk.ResourceData, meta interface{}) error { @@ -401,24 +471,19 @@ func resourceStreamAnalyticsJobDelete(d *pluginsdk.ResourceData, meta interface{ ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() - id, err := parse.StreamingJobID(d.Id()) + id, err := streamingjobs.ParseStreamingJobID(d.Id()) if err != nil { return err } - future, err := client.Delete(ctx, id.ResourceGroupName, id.Name) - if err != nil { + if err := client.DeleteThenPoll(ctx, *id); err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of %s: %+v", *id, err) - } - return nil } -func expandStreamAnalyticsJobIdentity(input []interface{}) (*streamanalytics.Identity, error) { +func expandStreamAnalyticsJobIdentity(input []interface{}) (*streamingjobs.Identity, error) { expanded, err := identity.ExpandSystemAssigned(input) if err != nil { return nil, err @@ -433,12 +498,12 @@ func expandStreamAnalyticsJobIdentity(input []interface{}) (*streamanalytics.Ide return nil, nil } - return &streamanalytics.Identity{ + return &streamingjobs.Identity{ Type: utils.String(string(expanded.Type)), }, nil } -func flattenJobIdentity(identity *streamanalytics.Identity) []interface{} { +func flattenJobIdentity(identity *streamingjobs.Identity) []interface{} { if identity == nil { return nil } @@ -449,13 +514,13 @@ func flattenJobIdentity(identity *streamanalytics.Identity) []interface{} { } var tenantId string - if identity.TenantID != nil { - tenantId = *identity.TenantID + if identity.TenantId != nil { + tenantId = *identity.TenantId } var principalId string - if identity.PrincipalID != nil { - principalId = *identity.PrincipalID + if identity.PrincipalId != nil { + principalId = *identity.PrincipalId } return []interface{}{ @@ -467,7 +532,7 @@ func flattenJobIdentity(identity *streamanalytics.Identity) []interface{} { } } -func expandJobStorageAccount(input []interface{}) *streamanalytics.JobStorageAccount { +func expandJobStorageAccount(input []interface{}) *streamingjobs.JobStorageAccount { if input == nil { return nil } @@ -477,21 +542,21 @@ func expandJobStorageAccount(input []interface{}) *streamanalytics.JobStorageAcc accountName := v["account_name"].(string) accountKey := v["account_key"].(string) - return &streamanalytics.JobStorageAccount{ - AuthenticationMode: streamanalytics.AuthenticationMode(authenticationMode), + return &streamingjobs.JobStorageAccount{ + AuthenticationMode: utils.ToPtr(streamingjobs.AuthenticationMode(authenticationMode)), AccountName: utils.String(accountName), AccountKey: utils.String(accountKey), } } -func flattenJobStorageAccount(d *pluginsdk.ResourceData, input *streamanalytics.JobStorageAccount) []interface{} { +func flattenJobStorageAccount(d *pluginsdk.ResourceData, input *streamingjobs.JobStorageAccount) []interface{} { if input == nil { return []interface{}{} } return []interface{}{ map[string]interface{}{ - "authentication_mode": string(input.AuthenticationMode), + "authentication_mode": string(*input.AuthenticationMode), "account_name": *input.AccountName, "account_key": d.Get("job_storage_account.0.account_key").(string), }, diff --git a/internal/services/streamanalytics/stream_analytics_job_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_resource_test.go index 999b1d1895e8..0ee008d2ef03 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource_test.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -149,7 +148,7 @@ func TestAccStreamAnalyticsJob_jobStorageAccount(t *testing.T) { } func (r StreamAnalyticsJobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.StreamingJobID(state.ID) + id, err := streamingjobs.ParseStreamingJobID(state.ID) if err != nil { return nil, err } diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go index 4b9cf42da8b9..4119c22a460d 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go @@ -3,9 +3,11 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" + "github.com/hashicorp/terraform-provider-azurerm/utils" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/Azure/go-autorest/autorest/date" "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/locks" @@ -38,9 +40,9 @@ func (r JobScheduleResource) Arguments() map[string]*pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.OutputStartModeCustomTime), - string(streamanalytics.OutputStartModeJobStartTime), - string(streamanalytics.OutputStartModeLastOutputEventTime), + string(streamingjobs.OutputStartModeCustomTime), + string(streamingjobs.OutputStartModeJobStartTime), + string(streamingjobs.OutputStartModeLastOutputEventTime), }, false), }, @@ -84,34 +86,35 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { } client := metadata.Client.StreamAnalytics.JobsClient - streamAnalyticsId, err := parse.StreamingJobID(model.StreamAnalyticsJob) + streamAnalyticsId, err := streamingjobs.ParseStreamingJobID(model.StreamAnalyticsJob) if err != nil { return err } // This is a virtual resource so the last segment is hardcoded - id := parse.NewStreamingJobScheduleID(streamAnalyticsId.SubscriptionId, streamAnalyticsid.ResourceGroupName, streamAnalyticsId.Name, "default") + id := parse.NewStreamingJobScheduleID(streamAnalyticsId.SubscriptionId, streamAnalyticsId.ResourceGroupName, streamAnalyticsId.JobName, "default") locks.ByID(id.ID()) defer locks.UnlockByID(id.ID()) - existing, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") + var opts streamingjobs.GetOperationOptions + existing, err := client.Get(ctx, *streamAnalyticsId, opts) if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } - outputStartMode := streamanalytics.OutputStartMode(model.StartMode) - if outputStartMode == streamanalytics.OutputStartModeLastOutputEventTime { - if v := existing.StreamingJobProperties.LastOutputEventTime; v == nil { + outputStartMode := streamingjobs.OutputStartMode(model.StartMode) + if outputStartMode == streamingjobs.OutputStartModeLastOutputEventTime { + if v := existing.Model.Properties.LastOutputEventTime; v == nil { return fmt.Errorf("`start_mode` can only be set to `LastOutputEventTime` if this job was previously started") } } - props := &streamanalytics.StartStreamingJobParameters{ - OutputStartMode: outputStartMode, + props := &streamingjobs.StartStreamingJobParameters{ + OutputStartMode: utils.ToPtr(outputStartMode), } - if outputStartMode == streamanalytics.OutputStartModeCustomTime { + if outputStartMode == streamingjobs.OutputStartModeCustomTime { if model.StartTime == "" { return fmt.Errorf("`start_time` must be specified if `start_mode` is set to `CustomTime`") } else { @@ -119,19 +122,14 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { outputStartTime := &date.Time{ Time: startTime, } - props.OutputStartTime = outputStartTime + props.OutputStartTime = utils.String(outputStartTime.String()) } } - future, err := client.Start(ctx, id.ResourceGroupName, id.JobName, props) - if err != nil { + if err := client.StartThenPoll(ctx, *streamAnalyticsId, *props); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting on create/update for %s: %+v", id, err) - } - metadata.SetID(id) return nil @@ -149,9 +147,10 @@ func (r JobScheduleResource) Read() sdk.ResourceFunc { return err } - streamAnalyticsId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + streamAnalyticsId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) - resp, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") + var opts streamingjobs.GetOperationOptions + resp, err := client.Get(ctx, streamAnalyticsId, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) @@ -159,27 +158,33 @@ func (r JobScheduleResource) Read() sdk.ResourceFunc { return fmt.Errorf("retrieving %s: %+v", *id, err) } - if props := resp.StreamingJobProperties; props != nil { - startTime := "" - if v := props.OutputStartTime; v != nil { - startTime = v.String() - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + startTime := "" + if v := props.OutputStartTime; v != nil { + startTime = *v + } - lastOutputTime := "" - if v := props.LastOutputEventTime; v != nil { - lastOutputTime = v.String() - } + lastOutputTime := "" + if v := props.LastOutputEventTime; v != nil { + lastOutputTime = *v + } - state := JobScheduleResourceModel{ - StreamAnalyticsJob: streamAnalyticsId.ID(), - StartMode: string(props.OutputStartMode), - StartTime: startTime, - LastOutputTime: lastOutputTime, - } + startMode := "" + if v := props.OutputStartMode; v != nil { + startMode = string(*v) + } - return metadata.Encode(&state) - } + state := JobScheduleResourceModel{ + StreamAnalyticsJob: streamAnalyticsId.ID(), + StartMode: startMode, + StartTime: startTime, + LastOutputTime: lastOutputTime, + } + return metadata.Encode(&state) + } + } return nil }, } @@ -201,42 +206,36 @@ func (r JobScheduleResource) Update() sdk.ResourceFunc { } if metadata.ResourceData.HasChanges("start_mode", "start_time") { - outputStartMode := streamanalytics.OutputStartMode(state.StartMode) + outputStartMode := streamingjobs.OutputStartMode(state.StartMode) startTime, _ := date.ParseTime(time.RFC3339, state.StartTime) outputStartTime := &date.Time{ Time: startTime, } - props := &streamanalytics.StartStreamingJobParameters{ - OutputStartMode: outputStartMode, + props := &streamingjobs.StartStreamingJobParameters{ + OutputStartMode: utils.ToPtr(outputStartMode), } - if outputStartMode == streamanalytics.OutputStartModeCustomTime { - props.OutputStartTime = outputStartTime + if outputStartMode == streamingjobs.OutputStartModeCustomTime { + props.OutputStartTime = utils.String(outputStartTime.String()) } - existing, err := client.Get(ctx, id.ResourceGroupName, id.JobName, "") + var opts streamingjobs.GetOperationOptions + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + existing, err := client.Get(ctx, streamingJobId, opts) if err != nil { return fmt.Errorf("retrieving %s: %+v", *id, err) } - if v := existing.StreamingJobProperties; v != nil && v.JobState != nil && *v.JobState == "Running" { - future, err := client.Stop(ctx, id.ResourceGroupName, id.JobName) - if err != nil { - return err - } - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for %s to stop: %+v", *id, err) + if v := existing.Model.Properties; v != nil && v.JobState != nil && *v.JobState == "Running" { + if err := client.StopThenPoll(ctx, streamingJobId); err != nil { + return fmt.Errorf("stopping %s: %+v", *id, err) } } - future, err := client.Start(ctx, id.ResourceGroupName, id.JobName, props) - if err != nil { + if err := client.StartThenPoll(ctx, streamingJobId, *props); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for update of %q: %+v", *id, err) - } } return nil @@ -256,14 +255,10 @@ func (r JobScheduleResource) Delete() sdk.ResourceFunc { metadata.Logger.Infof("deleting %s", *id) - future, err := client.Stop(ctx, id.ResourceGroupName, id.JobName) - if err != nil { + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + if err := client.StopThenPoll(ctx, streamingJobId); err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } - - if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of %s: %+v", id, err) - } return nil }, } diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go index ea976a6b9e2b..500a1030583b 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "testing" "time" @@ -74,14 +76,17 @@ func (r StreamAnalyticsJobScheduleResource) Exists(ctx context.Context, client * return nil, err } - resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroupName, id.JobName, "") + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + + var opts streamingjobs.GetOperationOptions + resp, err := client.StreamAnalytics.JobsClient.Get(ctx, streamingJobId, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), err } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } - return utils.Bool(resp.StreamingJobProperties != nil && resp.StreamingJobProperties.OutputStartTime != nil), nil + return utils.Bool(resp.Model != nil && resp.Model.Properties.OutputStartTime != nil), nil } func (r StreamAnalyticsJobScheduleResource) basic(data acceptance.TestData) string { diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go index 97a1eed38c87..ad3cad3a2693 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go @@ -3,14 +3,13 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "strings" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -35,7 +34,7 @@ func (r ManagedPrivateEndpointResource) ResourceType() string { } func (r ManagedPrivateEndpointResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.PrivateEndpointID + return privateendpoints.ValidatePrivateEndpointID } func (r ManagedPrivateEndpointResource) Arguments() map[string]*pluginsdk.Schema { @@ -88,9 +87,9 @@ func (r ManagedPrivateEndpointResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.EndpointsClient subscriptionId := metadata.Client.Account.SubscriptionId - id := parse.NewPrivateEndpointID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsCluster, model.Name) + id := privateendpoints.NewPrivateEndpointID(subscriptionId, model.ResourceGroup, model.StreamAnalyticsCluster, model.Name) - existing, err := client.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) + existing, err := client.Get(ctx, id) if err != nil && !response.WasNotFound(existing.HttpResponse) { return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } @@ -98,12 +97,12 @@ func (r ManagedPrivateEndpointResource) Create() sdk.ResourceFunc { return metadata.ResourceRequiresImport(r.ResourceType(), id) } - props := streamanalytics.PrivateEndpoint{ - PrivateEndpointProperties: &streamanalytics.PrivateEndpointProperties{ - ManualPrivateLinkServiceConnections: &[]streamanalytics.PrivateLinkServiceConnection{ + props := privateendpoints.PrivateEndpoint{ + Properties: &privateendpoints.PrivateEndpointProperties{ + ManualPrivateLinkServiceConnections: &[]privateendpoints.PrivateLinkServiceConnection{ { - PrivateLinkServiceConnectionProperties: &streamanalytics.PrivateLinkServiceConnectionProperties{ - PrivateLinkServiceID: utils.String(model.TargetResourceId), + Properties: &privateendpoints.PrivateLinkServiceConnectionProperties{ + PrivateLinkServiceId: utils.String(model.TargetResourceId), GroupIds: &[]string{model.SubResourceName}, }, }, @@ -111,7 +110,8 @@ func (r ManagedPrivateEndpointResource) Create() sdk.ResourceFunc { }, } - if _, err := client.CreateOrUpdate(ctx, props, id.ResourceGroupName, id.ClusterName, id.Name, "", ""); err != nil { + var opts privateendpoints.CreateOrUpdateOperationOptions + if _, err := client.CreateOrUpdate(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -127,12 +127,12 @@ func (r ManagedPrivateEndpointResource) Read() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.EndpointsClient - id, err := parse.PrivateEndpointID(metadata.ResourceData.Id()) + id, err := privateendpoints.ParsePrivateEndpointID(metadata.ResourceData.Id()) if err != nil { return err } - resp, err := client.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) @@ -140,21 +140,24 @@ func (r ManagedPrivateEndpointResource) Read() sdk.ResourceFunc { return fmt.Errorf("reading %s: %+v", *id, err) } - if resp.PrivateEndpointProperties.ManualPrivateLinkServiceConnections == nil { - return fmt.Errorf("TODO") + if resp.Model.Properties.ManualPrivateLinkServiceConnections == nil { + return fmt.Errorf("no private link service connections available") } state := ManagedPrivateEndpointModel{ - Name: id.Name, + Name: id.PrivateEndpointName, ResourceGroup: id.ResourceGroupName, StreamAnalyticsCluster: id.ClusterName, } - for _, mplsc := range *resp.PrivateEndpointProperties.ManualPrivateLinkServiceConnections { - state.TargetResourceId = *mplsc.PrivateLinkServiceID - state.SubResourceName = strings.Join(*mplsc.GroupIds, "") + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + for _, mplsc := range *props.ManualPrivateLinkServiceConnections { + state.TargetResourceId = *mplsc.Properties.PrivateLinkServiceId + state.SubResourceName = strings.Join(*mplsc.Properties.GroupIds, "") + } + } } - return metadata.Encode(&state) }, } @@ -165,22 +168,17 @@ func (r ManagedPrivateEndpointResource) Delete() sdk.ResourceFunc { Timeout: 5 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.StreamAnalytics.EndpointsClient - id, err := parse.PrivateEndpointID(metadata.ResourceData.Id()) + id, err := privateendpoints.ParsePrivateEndpointID(metadata.ResourceData.Id()) if err != nil { return err } metadata.Logger.Infof("deleting %s", *id) - future, err := client.Delete(ctx, id.ResourceGroupName, id.ClusterName, id.Name) - if err != nil { + if err := client.DeleteThenPoll(ctx, *id); err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } - if err := future.WaitForCompletionRef(ctx, client.Client); err != nil { - return fmt.Errorf("waiting for deletion of %s: %+v", *id, err) - } - return nil }, } diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go index 26ff6ca7fa9c..b86a9b1e1918 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go @@ -3,12 +3,13 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/utils" ) @@ -46,12 +47,12 @@ func TestAccStreamAnalyticsManagedPrivateEndpoint_requiresImport(t *testing.T) { } func (r StreamAnalyticsManagedPrivateEndpointResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { - id, err := parse.PrivateEndpointID(state.ID) + id, err := privateendpoints.ParsePrivateEndpointID(state.ID) if err != nil { return nil, err } - resp, err := client.StreamAnalytics.EndpointsClient.Get(ctx, id.ResourceGroupName, id.ClusterName, id.Name) + resp, err := client.StreamAnalytics.EndpointsClient.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go index 27f640115f0d..e4ff5cea3681 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go @@ -11,7 +11,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" cosmosParse "github.com/hashicorp/terraform-provider-azurerm/internal/services/cosmos/parse" cosmosValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/cosmos/validate" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -105,7 +104,7 @@ func (r OutputCosmosDBResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.OutputsClient subscriptionId := metadata.Client.Account.SubscriptionId - streamingJobId, err := parse.StreamingJobID(model.StreamAnalyticsJob) + streamingJobId, err := streamingjobs.ParseStreamingJobID(model.StreamAnalyticsJob) if err != nil { return err } @@ -180,7 +179,7 @@ func (r OutputCosmosDBResource) Read() sdk.ResourceFunc { return fmt.Errorf("converting to CosmosDb Output") } - streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) state := OutputCosmosDBResourceModel{ Name: id.OutputName, StreamAnalyticsJob: streamingJobId.ID(), diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go index 81d0e73bde2a..31c0ea352840 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -9,7 +9,6 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -109,7 +108,7 @@ func (r OutputPowerBIResource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.OutputsClient subscriptionId := metadata.Client.Account.SubscriptionId - streamingJobId, err := parse.StreamingJobID(model.StreamAnalyticsJob) + streamingJobId, err := streamingjobs.ParseStreamingJobID(model.StreamAnalyticsJob) if err != nil { return err } @@ -260,7 +259,7 @@ func (r OutputPowerBIResource) Read() sdk.ResourceFunc { return fmt.Errorf("converting to PowerBI Output") } - streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) state := OutputPowerBIResourceModel{ Name: id.OutputName, diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go index b7bc5ea2c4f0..1a9570a94c14 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go @@ -6,7 +6,6 @@ import ( "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -100,10 +99,10 @@ func resourceStreamAnalyticsOutputServiceBusTopic() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(outputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeConnectionString), + string(outputs.AuthenticationModeMsi), + string(outputs.AuthenticationModeConnectionString), }, false), }, }, @@ -138,30 +137,33 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou return fmt.Errorf("expanding `serialization`: %+v", err) } - props := streamanalytics.Output{ - Name: utils.String(id.Name), - Properties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.ServiceBusTopicOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic, - ServiceBusTopicOutputDataSourceProperties: &streamanalytics.ServiceBusTopicOutputDataSourceProperties{ + systemPropertyColumns := d.Get("system_property_columns").(map[string]interface{}) + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.ServiceBusTopicOutputDataSource{ + Properties: &outputs.ServiceBusTopicOutputDataSourceProperties{ TopicName: utils.String(d.Get("topic_name").(string)), ServiceBusNamespace: utils.String(d.Get("servicebus_namespace").(string)), SharedAccessPolicyKey: utils.String(d.Get("shared_access_policy_key").(string)), SharedAccessPolicyName: utils.String(d.Get("shared_access_policy_name").(string)), PropertyColumns: utils.ExpandStringSlice(d.Get("property_columns").([]interface{})), - SystemPropertyColumns: utils.ExpandMapStringPtrString(d.Get("system_property_columns").(map[string]interface{})), - AuthenticationMode: streamanalytics.AuthenticationMode(d.Get("authentication_mode").(string)), + SystemPropertyColumns: systemPropertyColumns, + //SystemPropertyColumns: utils.ExpandMapStringPtrString(d.Get("system_property_columns").(map[string]interface{})), + AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, }, } + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } - } else if _, err := client.Update(ctx, *id, props, opts); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -180,7 +182,7 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, return err } - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) @@ -191,31 +193,56 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, return fmt.Errorf("retrieving %s: %+v", *id, err) } - d.Set("name", id.Name) + d.Set("name", id.OutputName) d.Set("stream_analytics_job_name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsServiceBusTopicOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to a ServiceBus Topic Output: %+v", err) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.ServiceBusTopicOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to ServiceBus Topic Output") + } - d.Set("topic_name", v.TopicName) - d.Set("servicebus_namespace", v.ServiceBusNamespace) - d.Set("shared_access_policy_name", v.SharedAccessPolicyName) - d.Set("property_columns", v.PropertyColumns) - d.Set("authentication_mode", v.AuthenticationMode) + topicName := "" + if v := output.TopicName; v != nil { + topicName = *v + } + d.Set("topic_name", topicName) - if err = d.Set("system_property_columns", utils.FlattenMapStringPtrString(v.SystemPropertyColumns)); err != nil { - return err - } + namespace := "" + if v := output.ServiceBusNamespace; v != nil { + namespace = *v + } + d.Set("servicebus_namespace", namespace) + + accessPolicy := "" + if v := output.SharedAccessPolicyName; v != nil { + accessPolicy = *v + } + d.Set("shared_access_policy_name", v.accessPolicy) - if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { - return fmt.Errorf("setting `serialization`: %+v", err) + propertyColumns := "" + if v := output.PropertyColumns; v != nil { + propertyColumns = *v + } + d.Set("property_columns", propertyColumns) + + authMode := "" + if v := output.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) + + if err = d.Set("system_property_columns", utils.FlattenMapStringPtrString(output.SystemPropertyColumns)); err != nil { + return err + } + + if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } } } - return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go index 9df99d3059ad..da2ea51568f4 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" @@ -166,7 +168,7 @@ func TestAccStreamAnalyticsOutputServiceBusTopic_systemPropertyColumns(t *testin func (r StreamAnalyticsOutputServiceBusTopicResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { - return nil, err + return utils.Bool(false), err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go index 2573b2e2f0de..4fedaae1a9bf 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go @@ -2,10 +2,10 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -24,7 +24,7 @@ func resourceStreamAnalyticsOutputSynapse() *pluginsdk.Resource { Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }, importStreamAnalyticsOutput(streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse)), + }, importStreamAnalyticsOutput(outputs.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse)), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -112,12 +112,11 @@ func resourceStreamAnalyticsOutputSynapseCreateUpdate(d *pluginsdk.ResourceData, } } - props := streamanalytics.Output{ - Name: utils.String(id.Name), - Properties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureSynapseOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse, - AzureSynapseOutputDataSourceProperties: &streamanalytics.AzureSynapseOutputDataSourceProperties{ + props := outputs.Output{ + Name: utils.String(id.OutputName), + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureSynapseOutputDataSource{ + Properties: &outputs.AzureSynapseDataSourceProperties{ Server: utils.String(d.Get("server").(string)), Database: utils.String(d.Get("database").(string)), User: utils.String(d.Get("user").(string)), @@ -128,13 +127,16 @@ func resourceStreamAnalyticsOutputSynapseCreateUpdate(d *pluginsdk.ResourceData, }, } + var createOpts outputs.CreateOrReplaceOperationOptions + var updateOpts outputs.UpdateOperationOptions + if d.IsNewResource() { - if _, err := client.CreateOrReplace(ctx, id, props, opts); err != nil { + if _, err := client.CreateOrReplace(ctx, id, props, createOpts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) - } else if _, err := client.Update(ctx, *id, props, opts); err != nil { + } else if _, err := client.Update(ctx, id, props, updateOpts); err != nil { return fmt.Errorf("updating %s: %+v", id, err) } @@ -151,7 +153,7 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in return err } - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { log.Printf("[DEBUG] %s was not found - removing from state!", *id) @@ -162,22 +164,43 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in return fmt.Errorf("retreving %s: %+v", *id, err) } - d.Set("name", id.Name) + d.Set("name", id.OutputName) d.Set("stream_analytics_job_name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) - if props := resp.OutputProperties; props != nil { - v, ok := props.Datasource.AsAzureSynapseOutputDataSource() - if !ok { - return fmt.Errorf("converting Output Data Source to Synapse Output: %+v", err) - } + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.AzureSynapseDataSourceProperties) + if !ok { + return fmt.Errorf("converting to Synapse Output") + } + + server := "" + if v := output.Server; v != nil { + server = *v + } + d.Set("server", server) + + database := "" + if v := output.Database; v != nil { + database = *v + } + d.Set("database", database) + + table := "" + if v := output.Table; v != nil { + table = *v + } + d.Set("table", table) + + user := "" + if v := output.User; v != nil { + user = *v + } + d.Set("user", user) - d.Set("server", v.Server) - d.Set("database", v.Database) - d.Set("table", v.Table) - d.Set("user", v.User) + } } - return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go index fb336be0a31f..60098e70a364 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index 6e8a0965c737..61f44796d761 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -3,9 +3,9 @@ package streamanalytics import ( "context" "fmt" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -27,7 +27,7 @@ type OutputTableResourceModel struct { Table string `tfschema:"table"` PartitionKey string `tfschema:"partition_key"` RowKey string `tfschema:"row_key"` - BatchSize int32 `tfschema:"batch_size"` + BatchSize int64 `tfschema:"batch_size"` ColumnsToRemove []string `tfschema:"columns_to_remove"` } @@ -136,29 +136,29 @@ func (r OutputTableResource) Create() sdk.ResourceFunc { return metadata.ResourceRequiresImport(r.ResourceType(), id) } - tableOutputProps := &streamanalytics.AzureTableOutputDataSourceProperties{ + tableOutputProps := &outputs.AzureTableOutputDataSourceProperties{ AccountName: utils.String(model.StorageAccount), AccountKey: utils.String(model.StorageAccountKey), Table: utils.String(model.Table), PartitionKey: utils.String(model.PartitionKey), RowKey: utils.String(model.RowKey), - BatchSize: utils.Int32(model.BatchSize), + BatchSize: utils.Int64(model.BatchSize), } if v := model.ColumnsToRemove; v != nil && len(v) > 0 { tableOutputProps.ColumnsToRemove = &v } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(model.Name), - Properties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureTableOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable, - AzureTableOutputDataSourceProperties: tableOutputProps, + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureTableOutputDataSource{ + Properties: tableOutputProps, }, }, } + var opts outputs.CreateOrReplaceOperationOptions if _, err = client.CreateOrReplace(ctx, id, props, opts); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -180,7 +180,7 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { return err } - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return metadata.MarkAsGone(id) @@ -188,35 +188,66 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { return fmt.Errorf("reading %s: %+v", *id, err) } - if props := resp.OutputProperties; props != nil && props.Datasource != nil { - v, ok := props.Datasource.AsAzureTableOutputDataSource() - if !ok { - return fmt.Errorf("converting output data source to a blob output: %+v", err) - } - - if v.AccountName == nil || v.Table == nil || v.PartitionKey == nil || v.RowKey == nil || v.BatchSize == nil { - return nil - } - - state := OutputTableResourceModel{ - Name: id.Name, - ResourceGroup: id.ResourceGroupName, - StreamAnalyticsJob: id.JobName, - StorageAccount: *v.AccountName, - StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), - Table: *v.Table, - PartitionKey: *v.PartitionKey, - RowKey: *v.RowKey, - BatchSize: *v.BatchSize, - } - - var columnsToRemove []string - if columns := v.ColumnsToRemove; columns != nil && len(*columns) > 0 { - columnsToRemove = *columns + if model := resp.Model; model != nil { + if props := model.Properties; props != nil { + output, ok := props.Datasource.(outputs.AzureTableOutputDataSourceProperties) + if !ok { + return fmt.Errorf("converting to Table Output") + } + + if output.AccountName == nil || output.Table == nil || output.PartitionKey == nil || output.RowKey == nil || output.BatchSize == nil { + return nil + } + + state := OutputTableResourceModel{ + Name: id.OutputName, + ResourceGroup: id.ResourceGroupName, + StreamAnalyticsJob: id.JobName, + StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), + Table: *v.Table, + PartitionKey: *v.PartitionKey, + RowKey: *v.RowKey, + BatchSize: *v.BatchSize, + } + + accountName := "" + if v := output.AccountName; v != nil { + accountName = *v + } + state.StorageAccount = accountName + + table := "" + if v := output.Table; v != nil { + table = *v + } + state.Table = table + + partitonKey := "" + if v := output.PartitionKey; v != nil { + partitonKey = *v + } + state.PartitionKey = partitonKey + + rowKey := "" + if v := output.RowKey; v != nil { + rowKey = *v + } + state.RowKey = rowKey + + var batchSize int64 + if v := output.BatchSize; v != nil { + batchSize = *v + } + state.BatchSize = batchSize + + var columnsToRemove []string + if columns := output.ColumnsToRemove; columns != nil && len(*columns) > 0 { + columnsToRemove = *columns + } + state.ColumnsToRemove = columnsToRemove + + return metadata.Encode(&state) } - state.ColumnsToRemove = columnsToRemove - - return metadata.Encode(&state) } return nil }, @@ -238,31 +269,31 @@ func (r OutputTableResource) Update() sdk.ResourceFunc { return fmt.Errorf("decoding: %+v", err) } - props := streamanalytics.Output{ + props := outputs.Output{ Name: utils.String(state.Name), - Properties: &streamanalytics.OutputProperties{ - Datasource: &streamanalytics.AzureTableOutputDataSource{ - Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable, - AzureTableOutputDataSourceProperties: &streamanalytics.AzureTableOutputDataSourceProperties{ + Properties: &outputs.OutputProperties{ + Datasource: &outputs.AzureTableOutputDataSource{ + Properties: &outputs.AzureTableOutputDataSourceProperties{ AccountName: utils.String(state.StorageAccount), AccountKey: utils.String(state.StorageAccountKey), Table: utils.String(state.Table), PartitionKey: utils.String(state.PartitionKey), RowKey: utils.String(state.RowKey), - BatchSize: utils.Int32(state.BatchSize), + BatchSize: utils.Int64(state.BatchSize), }, }, }, } if metadata.ResourceData.HasChange("columns_to_remove") { - tableOutput, ok := props.OutputProperties.Datasource.AsAzureTableOutputDataSource() + tableOutput, ok := props.Properties.Datasource.(outputs.AzureTableOutputDataSourceProperties) if !ok { return fmt.Errorf("converting output data source to a table output: %+v", err) } tableOutput.ColumnsToRemove = &state.ColumnsToRemove } + var opts outputs.UpdateOperationOptions if _, err = client.Update(ctx, *id, props, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } @@ -302,14 +333,15 @@ func (r OutputTableResource) CustomImporter() sdk.ResourceRunFunc { } client := metadata.Client.StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil || resp.Model == nil || resp.Model.Properties == nil { return fmt.Errorf("reading %s: %+v", *id, err) } - props := resp.OutputProperties - if _, ok := props.Datasource.AsAzureTableOutputDataSource(); !ok { - return fmt.Errorf("specified output is not of type %s", streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftStorageTable) + props := resp.Model.Properties + if _, ok := props.Datasource.(outputs.AzureTableOutputDataSourceProperties); !ok { + // TODO should these types exist in pandora? + return fmt.Errorf("specified output is not of type %s", outputs.TypeBasicOutputDataSourceTypeMicrosoftStorageTable) } return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go index 50d6b9e0d6fe..fcbb6b2b3a05 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go @@ -3,6 +3,8 @@ package streamanalytics_test import ( "context" "fmt" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "strings" "testing" diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index c9d34e1540a4..c718bab895af 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -238,7 +238,7 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me d.Set("name", id.InputName) d.Set("stream_analytics_job_name", id.JobName) - d.Set("resource_group_name", id.ResourceGroupNameName) + d.Set("resource_group_name", id.ResourceGroupName) if model := resp.Model; model != nil { if props := model.Properties; props != nil { diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go index 52106c7e4d48..ca405ea9285b 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go @@ -212,7 +212,7 @@ func resourceStreamAnalyticsReferenceInputMsSqlRead(d *pluginsdk.ResourceData, m d.SetId(id.ID()) d.Set("name", id.InputName) d.Set("stream_analytics_job_name", id.JobName) - d.Set("resource_group_name", id.ResourceGroupNameName) + d.Set("resource_group_name", id.ResourceGroupName) if model := resp.Model; model != nil { if props := model.Properties; props != nil { diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go index 783fe8aa1206..9dff5f4011a2 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go @@ -188,7 +188,7 @@ func resourceStreamAnalyticsStreamInputBlobRead(d *pluginsdk.ResourceData, meta d.Set("name", id.InputName) d.Set("stream_analytics_job_name", id.JobName) - d.Set("resource_group_name", id.ResourceGroupNameName) + d.Set("resource_group_name", id.ResourceGroupName) if model := resp.Model; model != nil { if props := model.Properties; props != nil { diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go index 36f5b1868369..21a1a3413285 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go @@ -197,7 +197,7 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m d.Set("name", id.InputName) d.Set("stream_analytics_job_name", id.JobName) - d.Set("resource_group_name", id.ResourceGroupNameName) + d.Set("resource_group_name", id.ResourceGroupName) if model := resp.Model; model != nil { if props := model.Properties; props != nil { diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go index cc161851d099..8dd996bab0d3 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go @@ -10,7 +10,6 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -132,7 +131,7 @@ func (r StreamInputEventHubV2Resource) Create() sdk.ResourceFunc { client := metadata.Client.StreamAnalytics.InputsClient subscriptionId := metadata.Client.Account.SubscriptionId - streamingJobStruct, err := parse.StreamingJobID(model.StreamAnalyticsJobId) + streamingJobStruct, err := streamingjobs.ParseStreamingJobID(model.StreamAnalyticsJobId) if err != nil { return err } @@ -262,7 +261,7 @@ func (r StreamInputEventHubV2Resource) Read() sdk.ResourceFunc { return fmt.Errorf("reading %s: %+v", *id, err) } - streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupNameName, id.JobName) + streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) state := StreamInputEventHubV2ResourceModel{ Name: id.InputName, diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go index 6bedf72ba286..6c78894d7bdc 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go @@ -178,7 +178,7 @@ func resourceStreamAnalyticsStreamInputIoTHubRead(d *pluginsdk.ResourceData, met d.Set("name", id.InputName) d.Set("stream_analytics_job_name", id.JobName) - d.Set("resource_group_name", id.ResourceGroupNameName) + d.Set("resource_group_name", id.ResourceGroupName) if model := resp.Model; model != nil { if props := model.Properties; props != nil { diff --git a/internal/services/streamanalytics/validate/function_id.go b/internal/services/streamanalytics/validate/function_id.go index a45e2f60497d..b174a36fe802 100644 --- a/internal/services/streamanalytics/validate/function_id.go +++ b/internal/services/streamanalytics/validate/function_id.go @@ -4,8 +4,6 @@ package validate import ( "fmt" - - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" ) func FunctionID(input interface{}, key string) (warnings []string, errors []error) { @@ -15,7 +13,7 @@ func FunctionID(input interface{}, key string) (warnings []string, errors []erro return } - if _, err := parse.FunctionID(v); err != nil { + if _, err := functions.ParseFunctionID(v); err != nil { errors = append(errors, err) } diff --git a/internal/services/streamanalytics/validate/streaming_job_id.go b/internal/services/streamanalytics/validate/streaming_job_id.go index aea3ee143145..e78e8fc45bf4 100644 --- a/internal/services/streamanalytics/validate/streaming_job_id.go +++ b/internal/services/streamanalytics/validate/streaming_job_id.go @@ -4,8 +4,6 @@ package validate import ( "fmt" - - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" ) func StreamingJobID(input interface{}, key string) (warnings []string, errors []error) { @@ -15,7 +13,7 @@ func StreamingJobID(input interface{}, key string) (warnings []string, errors [] return } - if _, err := parse.StreamingJobID(v); err != nil { + if _, err := streamingjobs.ParseStreamingJobID(v); err != nil { errors = append(errors, err) } From 7196e9cf674fedceff9c9dfad4bb7b57351a2828 Mon Sep 17 00:00:00 2001 From: Steph Date: Mon, 21 Nov 2022 16:57:18 +0100 Subject: [PATCH 03/14] modules --- vendor/modules.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/vendor/modules.txt b/vendor/modules.txt index 02c76c616995..72be104f0aa2 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -394,6 +394,13 @@ github.com/hashicorp/go-azure-sdk/resource-manager/storage/2022-05-01/tableservi github.com/hashicorp/go-azure-sdk/resource-manager/storage/2022-05-01/tableserviceproperties github.com/hashicorp/go-azure-sdk/resource-manager/storagepool/2021-08-01/diskpools github.com/hashicorp/go-azure-sdk/resource-manager/storagepool/2021-08-01/iscsitargets +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs +github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations github.com/hashicorp/go-azure-sdk/resource-manager/trafficmanager/2018-08-01/endpoints github.com/hashicorp/go-azure-sdk/resource-manager/trafficmanager/2018-08-01/geographichierarchies github.com/hashicorp/go-azure-sdk/resource-manager/trafficmanager/2018-08-01/profiles From 6d1171ba82eafbce373870878d8c069628ca84b9 Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 22 Nov 2022 18:05:41 +0100 Subject: [PATCH 04/14] fix correct type assertions --- .../source_control_token_resource_test.go | 2 +- ...l_network_rule_collection_resource_test.go | 2 +- .../services/streamanalytics/helpers_input.go | 121 ++---- .../streamanalytics/helpers_output.go | 58 ++- .../services/streamanalytics/parse/cluster.go | 69 ---- .../streamanalytics/parse/cluster_test.go | 112 ----- .../streamanalytics/parse/function.go | 75 ---- .../streamanalytics/parse/function_test.go | 128 ------ .../services/streamanalytics/parse/output.go | 75 ---- .../streamanalytics/parse/output_test.go | 128 ------ .../streamanalytics/parse/private_endpoint.go | 75 ---- .../parse/private_endpoint_test.go | 128 ------ .../streamanalytics/parse/stream_input.go | 75 ---- .../parse/stream_input_test.go | 128 ------ .../streamanalytics/parse/streaming_job.go | 69 ---- .../parse/streaming_job_schedule.go | 12 +- .../parse/streaming_job_test.go | 112 ----- .../services/streamanalytics/registration.go | 12 +- .../services/streamanalytics/resourceids.go | 6 - .../stream_analytics_cluster_resource.go | 5 +- .../stream_analytics_cluster_resource_test.go | 4 +- ...lytics_function_javascript_uda_resource.go | 6 +- ...s_function_javascript_uda_resource_test.go | 4 +- ...lytics_function_javascript_udf_resource.go | 2 +- ...s_function_javascript_udf_resource_test.go | 4 +- .../stream_analytics_job_data_source.go | 4 +- .../stream_analytics_job_resource.go | 26 +- .../stream_analytics_job_resource_test.go | 7 +- .../stream_analytics_job_schedule_resource.go | 8 +- ...am_analytics_job_schedule_resource_test.go | 6 +- ...ytics_managed_private_endpoint_resource.go | 4 +- ..._managed_private_endpoint_resource_test.go | 4 +- .../stream_analytics_output.go | 64 ++- .../stream_analytics_output_blob_resource.go | 18 +- ...eam_analytics_output_blob_resource_test.go | 6 +- ...ream_analytics_output_cosmosdb_resource.go | 18 +- ...analytics_output_cosmosdb_resource_test.go | 4 +- ...ream_analytics_output_eventhub_resource.go | 14 +- ...analytics_output_eventhub_resource_test.go | 6 +- ...ream_analytics_output_function_resource.go | 18 +- ...analytics_output_function_resource_test.go | 4 +- .../stream_analytics_output_mssql_resource.go | 18 +- ...am_analytics_output_mssql_resource_test.go | 5 +- ...tream_analytics_output_powerbi_resource.go | 20 +- ..._analytics_output_powerbi_resource_test.go | 4 +- ...lytics_output_servicebus_queue_resource.go | 18 +- ...s_output_servicebus_queue_resource_test.go | 5 +- ...lytics_output_servicebus_topic_resource.go | 30 +- ...s_output_servicebus_topic_resource_test.go | 6 +- ...tream_analytics_output_synapse_resource.go | 14 +- ..._analytics_output_synapse_resource_test.go | 4 +- .../stream_analytics_output_table_resource.go | 27 +- ...am_analytics_output_table_resource_test.go | 4 +- .../stream_analytics_reference_input.go | 8 +- ...analytics_reference_input_blob_resource.go | 37 +- ...tics_reference_input_blob_resource_test.go | 4 +- ...nalytics_reference_input_mssql_resource.go | 33 +- ...ics_reference_input_mssql_resource_test.go | 4 +- ...am_analytics_stream_input_blob_resource.go | 4 +- ...alytics_stream_input_blob_resource_test.go | 6 +- ...nalytics_stream_input_eventhub_resource.go | 17 +- ...ics_stream_input_eventhub_resource_test.go | 4 +- ...ytics_stream_input_eventhub_v2_resource.go | 21 +- ..._stream_input_eventhub_v2_resource_test.go | 4 +- ..._analytics_stream_input_iothub_resource.go | 6 +- ...ytics_stream_input_iothub_resource_test.go | 6 +- .../streamanalytics/validate/cluster_id.go | 23 -- .../validate/cluster_id_test.go | 76 ---- .../streamanalytics/validate/function_id.go | 21 - .../validate/function_id_test.go | 88 ---- .../streamanalytics/validate/output_id.go | 21 - .../validate/output_id_test.go | 88 ---- .../validate/private_endpoint_id.go | 23 -- .../validate/private_endpoint_id_test.go | 88 ---- .../validate/stream_input_id.go | 23 -- .../validate/stream_input_id_test.go | 88 ---- .../validate/streaming_job_id.go | 21 - .../validate/streaming_job_id_test.go | 76 ---- .../2020-03-01/clusters/README.md | 133 ++++++ .../2020-03-01/clusters/client.go | 18 + .../2020-03-01/clusters/constants.go | 117 ++++++ .../2020-03-01/clusters/id_cluster.go | 124 ++++++ .../method_createorupdate_autorest.go | 113 ++++++ .../clusters/method_delete_autorest.go | 78 ++++ .../clusters/method_get_autorest.go | 68 ++++ .../method_listbyresourcegroup_autorest.go | 187 +++++++++ .../method_listbysubscription_autorest.go | 187 +++++++++ .../method_liststreamingjobs_autorest.go | 186 +++++++++ .../clusters/method_update_autorest.go | 108 +++++ .../2020-03-01/clusters/model_cluster.go | 15 + .../2020-03-01/clusters/model_clusterjob.go | 10 + .../clusters/model_clusterproperties.go | 30 ++ .../2020-03-01/clusters/model_clustersku.go | 9 + .../2020-03-01/clusters/predicates.go | 52 +++ .../2020-03-01/clusters/version.go | 12 + .../2020-03-01/functions/README.md | 149 +++++++ .../2020-03-01/functions/client.go | 18 + .../2020-03-01/functions/constants.go | 31 ++ .../2020-03-01/functions/id_function.go | 137 +++++++ .../2020-03-01/functions/id_streamingjob.go | 124 ++++++ .../method_createorreplace_autorest.go | 103 +++++ .../functions/method_delete_autorest.go | 66 +++ .../functions/method_get_autorest.go | 68 ++++ .../method_listbystreamingjob_autorest.go | 215 ++++++++++ ...thod_retrievedefaultdefinition_autorest.go | 70 ++++ .../functions/method_test_autorest.go | 79 ++++ .../functions/method_update_autorest.go | 98 +++++ .../model_aggregatefunctionproperties.go | 42 ++ ...achinelearningwebservicefunctionbinding.go | 41 ++ ...ningwebservicefunctionbindingproperties.go | 12 + ...rvicefunctionbindingretrievalproperties.go | 9 + ...tionretrievedefaultdefinitionparameters.go | 41 ++ ...uremachinelearningwebserviceinputcolumn.go | 10 + ...el_azuremachinelearningwebserviceinputs.go | 9 + ...remachinelearningwebserviceoutputcolumn.go | 9 + .../functions/model_errorresponse.go | 9 + .../2020-03-01/functions/model_function.go | 44 ++ .../functions/model_functionbinding.go | 56 +++ .../functions/model_functionconfiguration.go | 42 ++ .../functions/model_functioninput.go | 9 + .../functions/model_functionoutput.go | 8 + .../functions/model_functionproperties.go | 56 +++ ...tionretrievedefaultdefinitionparameters.go | 56 +++ .../model_javascriptfunctionbinding.go | 41 ++ ...del_javascriptfunctionbindingproperties.go | 8 + ...criptfunctionbindingretrievalproperties.go | 9 + ...tionretrievedefaultdefinitionparameters.go | 41 ++ .../functions/model_resourceteststatus.go | 9 + .../model_scalarfunctionproperties.go | 42 ++ .../2020-03-01/functions/predicates.go | 24 ++ .../2020-03-01/functions/version.go | 12 + .../2020-03-01/inputs/README.md | 128 ++++++ .../2020-03-01/inputs/client.go | 18 + .../2020-03-01/inputs/constants.go | 186 +++++++++ .../2020-03-01/inputs/id_input.go | 137 +++++++ .../2020-03-01/inputs/id_streamingjob.go | 124 ++++++ .../inputs/method_createorreplace_autorest.go | 103 +++++ .../inputs/method_delete_autorest.go | 66 +++ .../2020-03-01/inputs/method_get_autorest.go | 68 ++++ .../method_listbystreamingjob_autorest.go | 215 ++++++++++ .../2020-03-01/inputs/method_test_autorest.go | 79 ++++ .../inputs/method_update_autorest.go | 98 +++++ .../inputs/model_avroserialization.go | 41 ++ .../model_azuresqlreferenceinputdatasource.go | 41 ++ ...resqlreferenceinputdatasourceproperties.go | 16 + .../inputs/model_blobdatasourceproperties.go | 13 + .../model_blobreferenceinputdatasource.go | 41 ++ .../inputs/model_blobstreaminputdatasource.go | 41 ++ ...del_blobstreaminputdatasourceproperties.go | 14 + .../2020-03-01/inputs/model_compression.go | 8 + .../inputs/model_csvserialization.go | 41 ++ .../model_csvserializationproperties.go | 9 + .../inputs/model_diagnosticcondition.go | 10 + .../2020-03-01/inputs/model_diagnostics.go | 8 + .../2020-03-01/inputs/model_errorresponse.go | 9 + .../model_eventhubstreaminputdatasource.go | 41 ++ ...eventhubstreaminputdatasourceproperties.go | 13 + .../model_eventhubv2streaminputdatasource.go | 41 ++ .../model_filereferenceinputdatasource.go | 41 ++ ..._filereferenceinputdatasourceproperties.go | 8 + ...model_gatewaymessagebussourceproperties.go | 8 + ..._gatewaymessagebusstreaminputdatasource.go | 41 ++ .../2020-03-01/inputs/model_input.go | 44 ++ .../inputs/model_inputproperties.go | 56 +++ .../model_iothubstreaminputdatasource.go | 41 ++ ...l_iothubstreaminputdatasourceproperties.go | 12 + .../inputs/model_jsonserialization.go | 41 ++ .../model_jsonserializationproperties.go | 9 + .../inputs/model_parquetserialization.go | 41 ++ .../inputs/model_referenceinputdatasource.go | 64 +++ .../inputs/model_referenceinputproperties.go | 83 ++++ .../inputs/model_resourceteststatus.go | 9 + .../2020-03-01/inputs/model_serialization.go | 72 ++++ .../2020-03-01/inputs/model_storageaccount.go | 9 + .../inputs/model_streaminputdatasource.go | 80 ++++ .../inputs/model_streaminputproperties.go | 83 ++++ .../2020-03-01/inputs/predicates.go | 24 ++ .../2020-03-01/inputs/version.go | 12 + .../2020-03-01/outputs/README.md | 128 ++++++ .../2020-03-01/outputs/client.go | 18 + .../2020-03-01/outputs/constants.go | 124 ++++++ .../2020-03-01/outputs/id_output.go | 137 +++++++ .../2020-03-01/outputs/id_streamingjob.go | 124 ++++++ .../method_createorreplace_autorest.go | 103 +++++ .../outputs/method_delete_autorest.go | 66 +++ .../2020-03-01/outputs/method_get_autorest.go | 68 ++++ .../method_listbystreamingjob_autorest.go | 215 ++++++++++ .../outputs/method_test_autorest.go | 79 ++++ .../outputs/method_update_autorest.go | 98 +++++ .../outputs/model_avroserialization.go | 41 ++ ...odel_azuredatalakestoreoutputdatasource.go | 41 ++ ...datalakestoreoutputdatasourceproperties.go | 16 + .../model_azurefunctionoutputdatasource.go | 41 ++ ...azurefunctionoutputdatasourceproperties.go | 12 + ...el_azuresqldatabasedatasourceproperties.go | 15 + .../model_azuresqldatabaseoutputdatasource.go | 41 ++ .../model_azuresynapsedatasourceproperties.go | 12 + .../model_azuresynapseoutputdatasource.go | 41 ++ .../model_azuretableoutputdatasource.go | 41 ++ ...el_azuretableoutputdatasourceproperties.go | 14 + .../outputs/model_bloboutputdatasource.go | 41 ++ .../model_bloboutputdatasourceproperties.go | 14 + .../outputs/model_csvserialization.go | 41 ++ .../model_csvserializationproperties.go | 9 + .../outputs/model_diagnosticcondition.go | 10 + .../2020-03-01/outputs/model_diagnostics.go | 8 + .../model_documentdboutputdatasource.go | 41 ++ ...el_documentdboutputdatasourceproperties.go | 13 + .../2020-03-01/outputs/model_errorresponse.go | 9 + .../outputs/model_eventhuboutputdatasource.go | 41 ++ ...odel_eventhuboutputdatasourceproperties.go | 14 + .../model_eventhubv2outputdatasource.go | 41 ++ ...model_gatewaymessagebusoutputdatasource.go | 41 ++ ...model_gatewaymessagebussourceproperties.go | 8 + .../outputs/model_jsonserialization.go | 41 ++ .../model_jsonserializationproperties.go | 9 + .../2020-03-01/outputs/model_output.go | 11 + .../outputs/model_outputdatasource.go | 144 +++++++ .../outputs/model_outputproperties.go | 55 +++ .../outputs/model_parquetserialization.go | 41 ++ .../outputs/model_powerbioutputdatasource.go | 41 ++ ...model_powerbioutputdatasourceproperties.go | 15 + .../outputs/model_resourceteststatus.go | 9 + .../2020-03-01/outputs/model_serialization.go | 72 ++++ .../model_servicebusqueueoutputdatasource.go | 41 ++ ...rvicebusqueueoutputdatasourceproperties.go | 14 + .../model_servicebustopicoutputdatasource.go | 41 ++ ...rvicebustopicoutputdatasourceproperties.go | 14 + .../outputs/model_storageaccount.go | 9 + .../2020-03-01/outputs/predicates.go | 24 ++ .../2020-03-01/outputs/version.go | 12 + .../2020-03-01/privateendpoints/README.md | 86 ++++ .../2020-03-01/privateendpoints/client.go | 18 + .../2020-03-01/privateendpoints/id_cluster.go | 124 ++++++ .../privateendpoints/id_privateendpoint.go | 137 +++++++ .../method_createorupdate_autorest.go | 103 +++++ .../method_delete_autorest.go | 78 ++++ .../privateendpoints/method_get_autorest.go | 68 ++++ .../method_listbycluster_autorest.go | 186 +++++++++ .../privateendpoints/model_privateendpoint.go | 12 + .../model_privateendpointproperties.go | 9 + .../model_privatelinkconnectionstate.go | 10 + .../model_privatelinkserviceconnection.go | 8 + ..._privatelinkserviceconnectionproperties.go | 11 + .../2020-03-01/privateendpoints/predicates.go | 29 ++ .../2020-03-01/privateendpoints/version.go | 12 + .../2020-03-01/streamingjobs/README.md | 166 ++++++++ .../2020-03-01/streamingjobs/client.go | 18 + .../2020-03-01/streamingjobs/constants.go | 382 ++++++++++++++++++ .../streamingjobs/id_streamingjob.go | 124 ++++++ .../method_createorreplace_autorest.go | 113 ++++++ .../streamingjobs/method_delete_autorest.go | 78 ++++ .../streamingjobs/method_get_autorest.go | 97 +++++ .../streamingjobs/method_list_autorest.go | 216 ++++++++++ .../method_listbyresourcegroup_autorest.go | 216 ++++++++++ .../streamingjobs/method_scale_autorest.go | 79 ++++ .../streamingjobs/method_start_autorest.go | 79 ++++ .../streamingjobs/method_stop_autorest.go | 78 ++++ .../streamingjobs/method_update_autorest.go | 98 +++++ .../model_aggregatefunctionproperties.go | 42 ++ .../streamingjobs/model_avroserialization.go | 41 ++ ...odel_azuredatalakestoreoutputdatasource.go | 41 ++ ...datalakestoreoutputdatasourceproperties.go | 16 + .../model_azurefunctionoutputdatasource.go | 41 ++ ...azurefunctionoutputdatasourceproperties.go | 12 + ...achinelearningwebservicefunctionbinding.go | 41 ++ ...ningwebservicefunctionbindingproperties.go | 12 + ...uremachinelearningwebserviceinputcolumn.go | 10 + ...el_azuremachinelearningwebserviceinputs.go | 9 + ...remachinelearningwebserviceoutputcolumn.go | 9 + ...el_azuresqldatabasedatasourceproperties.go | 15 + .../model_azuresqldatabaseoutputdatasource.go | 41 ++ .../model_azuresqlreferenceinputdatasource.go | 41 ++ ...resqlreferenceinputdatasourceproperties.go | 16 + .../model_azuresynapsedatasourceproperties.go | 12 + .../model_azuresynapseoutputdatasource.go | 41 ++ .../model_azuretableoutputdatasource.go | 41 ++ ...el_azuretableoutputdatasourceproperties.go | 14 + .../model_blobdatasourceproperties.go | 13 + .../model_bloboutputdatasource.go | 41 ++ .../model_bloboutputdatasourceproperties.go | 14 + .../model_blobreferenceinputdatasource.go | 41 ++ .../model_blobstreaminputdatasource.go | 41 ++ ...del_blobstreaminputdatasourceproperties.go | 14 + .../streamingjobs/model_clusterinfo.go | 8 + .../streamingjobs/model_compression.go | 8 + .../streamingjobs/model_csvserialization.go | 41 ++ .../model_csvserializationproperties.go | 9 + .../model_diagnosticcondition.go | 10 + .../streamingjobs/model_diagnostics.go | 8 + .../model_documentdboutputdatasource.go | 41 ++ ...el_documentdboutputdatasourceproperties.go | 13 + .../model_eventhuboutputdatasource.go | 41 ++ ...odel_eventhuboutputdatasourceproperties.go | 14 + .../model_eventhubstreaminputdatasource.go | 41 ++ ...eventhubstreaminputdatasourceproperties.go | 13 + .../model_eventhubv2outputdatasource.go | 41 ++ .../model_eventhubv2streaminputdatasource.go | 41 ++ .../model_filereferenceinputdatasource.go | 41 ++ ..._filereferenceinputdatasourceproperties.go | 8 + .../streamingjobs/model_function.go | 44 ++ .../streamingjobs/model_functionbinding.go | 56 +++ .../model_functionconfiguration.go | 42 ++ .../streamingjobs/model_functioninput.go | 9 + .../streamingjobs/model_functionoutput.go | 8 + .../streamingjobs/model_functionproperties.go | 56 +++ ...model_gatewaymessagebusoutputdatasource.go | 41 ++ ...model_gatewaymessagebussourceproperties.go | 8 + ..._gatewaymessagebusstreaminputdatasource.go | 41 ++ .../streamingjobs/model_identity.go | 10 + .../2020-03-01/streamingjobs/model_input.go | 44 ++ .../streamingjobs/model_inputproperties.go | 56 +++ .../model_iothubstreaminputdatasource.go | 41 ++ ...l_iothubstreaminputdatasourceproperties.go | 12 + .../model_javascriptfunctionbinding.go | 41 ++ ...del_javascriptfunctionbindingproperties.go | 8 + .../streamingjobs/model_jobstorageaccount.go | 10 + .../streamingjobs/model_jsonserialization.go | 41 ++ .../model_jsonserializationproperties.go | 9 + .../2020-03-01/streamingjobs/model_output.go | 11 + .../streamingjobs/model_outputdatasource.go | 144 +++++++ .../streamingjobs/model_outputproperties.go | 55 +++ .../model_parquetserialization.go | 41 ++ .../model_powerbioutputdatasource.go | 41 ++ ...model_powerbioutputdatasourceproperties.go | 15 + .../model_referenceinputdatasource.go | 64 +++ .../model_referenceinputproperties.go | 83 ++++ .../model_scalarfunctionproperties.go | 42 ++ .../model_scalestreamingjobparameters.go | 8 + .../streamingjobs/model_serialization.go | 72 ++++ .../model_servicebusqueueoutputdatasource.go | 41 ++ ...rvicebusqueueoutputdatasourceproperties.go | 14 + .../model_servicebustopicoutputdatasource.go | 41 ++ ...rvicebustopicoutputdatasourceproperties.go | 14 + .../2020-03-01/streamingjobs/model_sku.go | 8 + .../model_startstreamingjobparameters.go | 27 ++ .../streamingjobs/model_storageaccount.go | 9 + .../streamingjobs/model_streamingjob.go | 14 + .../model_streamingjobproperties.go | 72 ++++ .../model_streaminputdatasource.go | 80 ++++ .../model_streaminputproperties.go | 83 ++++ .../streamingjobs/model_transformation.go | 11 + .../model_transformationproperties.go | 11 + .../2020-03-01/streamingjobs/predicates.go | 29 ++ .../2020-03-01/streamingjobs/version.go | 12 + .../2020-03-01/transformations/README.md | 78 ++++ .../2020-03-01/transformations/client.go | 18 + .../transformations/id_transformation.go | 137 +++++++ .../method_createorreplace_autorest.go | 103 +++++ .../transformations/method_get_autorest.go | 68 ++++ .../transformations/method_update_autorest.go | 98 +++++ .../transformations/model_transformation.go | 11 + .../model_transformationproperties.go | 11 + .../2020-03-01/transformations/version.go | 12 + 354 files changed, 14202 insertions(+), 2223 deletions(-) delete mode 100644 internal/services/streamanalytics/parse/cluster.go delete mode 100644 internal/services/streamanalytics/parse/cluster_test.go delete mode 100644 internal/services/streamanalytics/parse/function.go delete mode 100644 internal/services/streamanalytics/parse/function_test.go delete mode 100644 internal/services/streamanalytics/parse/output.go delete mode 100644 internal/services/streamanalytics/parse/output_test.go delete mode 100644 internal/services/streamanalytics/parse/private_endpoint.go delete mode 100644 internal/services/streamanalytics/parse/private_endpoint_test.go delete mode 100644 internal/services/streamanalytics/parse/stream_input.go delete mode 100644 internal/services/streamanalytics/parse/stream_input_test.go delete mode 100644 internal/services/streamanalytics/parse/streaming_job.go delete mode 100644 internal/services/streamanalytics/parse/streaming_job_test.go delete mode 100644 internal/services/streamanalytics/validate/cluster_id.go delete mode 100644 internal/services/streamanalytics/validate/cluster_id_test.go delete mode 100644 internal/services/streamanalytics/validate/function_id.go delete mode 100644 internal/services/streamanalytics/validate/function_id_test.go delete mode 100644 internal/services/streamanalytics/validate/output_id.go delete mode 100644 internal/services/streamanalytics/validate/output_id_test.go delete mode 100644 internal/services/streamanalytics/validate/private_endpoint_id.go delete mode 100644 internal/services/streamanalytics/validate/private_endpoint_id_test.go delete mode 100644 internal/services/streamanalytics/validate/stream_input_id.go delete mode 100644 internal/services/streamanalytics/validate/stream_input_id_test.go delete mode 100644 internal/services/streamanalytics/validate/streaming_job_id.go delete mode 100644 internal/services/streamanalytics/validate/streaming_job_id_test.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/id_cluster.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_createorupdate_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbyresourcegroup_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbysubscription_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_liststreamingjobs_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_cluster.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clustersku.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_function.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_streamingjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_createorreplace_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_listbystreamingjob_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_retrievedefaultdefinition_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_test_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_aggregatefunctionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingretrievalproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionretrievedefaultdefinitionparameters.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputcolumn.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputs.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceoutputcolumn.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_errorresponse.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_function.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionconfiguration.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functioninput.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionoutput.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionretrievedefaultdefinitionparameters.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingretrievalproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionretrievedefaultdefinitionparameters.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_resourceteststatus.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_scalarfunctionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_input.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_streamingjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_createorreplace_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_listbystreamingjob_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_test_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_avroserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobreferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_compression.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnosticcondition.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnostics.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_errorresponse.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubv2streaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebussourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebusstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_input.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_inputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_parquetserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_resourceteststatus.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_serialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_storageaccount.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_output.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_streamingjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_createorreplace_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_listbystreamingjob_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_test_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_avroserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabasedatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabaseoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapsedatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapseoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnosticcondition.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnostics.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_errorresponse.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhubv2outputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebusoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebussourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_output.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_parquetserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_resourceteststatus.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_serialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_storageaccount.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_cluster.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_privateendpoint.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_createorupdate_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_listbycluster_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpoint.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpointproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkconnectionstate.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnection.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnectionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/id_streamingjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_createorreplace_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_list_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_listbyresourcegroup_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_scale_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_start_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_stop_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_aggregatefunctionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_avroserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbindingproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputcolumn.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputs.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceoutputcolumn.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabasedatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabaseoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapsedatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapseoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobreferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_clusterinfo.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_compression.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnosticcondition.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnostics.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2outputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2streaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_function.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionconfiguration.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functioninput.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionoutput.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebussourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_identity.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_input.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_inputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbinding.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbindingproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jobstorageaccount.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserializationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_output.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_parquetserialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalarfunctionproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalestreamingjobparameters.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_serialization.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasourceproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_sku.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_startstreamingjobparameters.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_storageaccount.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjob.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjobproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputdatasource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformation.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/version.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/id_transformation.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_createorreplace_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_update_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformation.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformationproperties.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/version.go diff --git a/internal/services/appservice/source_control_token_resource_test.go b/internal/services/appservice/source_control_token_resource_test.go index 19b6643101bd..f7133fafc33d 100644 --- a/internal/services/appservice/source_control_token_resource_test.go +++ b/internal/services/appservice/source_control_token_resource_test.go @@ -65,7 +65,7 @@ func (r AppServiceGitHubTokenResource) Exists(ctx context.Context, client *clien resp, err := client.AppService.BaseClient.GetSourceControl(ctx, "GitHub") if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return utils.Bool(false), err + return nil, err } return nil, fmt.Errorf("retrieving Source Control GitHub Token") } diff --git a/internal/services/firewall/firewall_network_rule_collection_resource_test.go b/internal/services/firewall/firewall_network_rule_collection_resource_test.go index 1a32bfc91d69..456b5517833f 100644 --- a/internal/services/firewall/firewall_network_rule_collection_resource_test.go +++ b/internal/services/firewall/firewall_network_rule_collection_resource_test.go @@ -395,7 +395,7 @@ func (FirewallNetworkRuleCollectionResource) Destroy(ctx context.Context, client read, err := clients.Firewall.AzureFirewallsClient.Get(ctx, resourceGroup, firewallName) if err != nil { - return utils.Bool(false), err + return nil, err } rules := make([]network.AzureFirewallNetworkRuleCollection, 0) diff --git a/internal/services/streamanalytics/helpers_input.go b/internal/services/streamanalytics/helpers_input.go index 05728c305bd7..0180a29d8890 100644 --- a/internal/services/streamanalytics/helpers_input.go +++ b/internal/services/streamanalytics/helpers_input.go @@ -2,9 +2,8 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -21,9 +20,9 @@ func schemaStreamAnalyticsStreamInputSerialization() *pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.TypeAvro), - string(streamanalytics.TypeCsv), - string(streamanalytics.TypeJSON), + string(inputs.EventSerializationTypeAvro), + string(inputs.EventSerializationTypeCsv), + string(inputs.EventSerializationTypeJson), }, false), }, @@ -43,7 +42,7 @@ func schemaStreamAnalyticsStreamInputSerialization() *pluginsdk.Schema { Type: pluginsdk.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.EncodingUTF8), + string(inputs.EncodingUTFEight), }, false), }, }, @@ -51,44 +50,43 @@ func schemaStreamAnalyticsStreamInputSerialization() *pluginsdk.Schema { } } -func expandStreamAnalyticsStreamInputSerialization(input []interface{}) (streamanalytics.BasicSerialization, error) { +func expandStreamAnalyticsStreamInputSerialization(input []interface{}) (inputs.Serialization, error) { v := input[0].(map[string]interface{}) - inputType := streamanalytics.Type(v["type"].(string)) + inputType := v["type"].(string) encoding := v["encoding"].(string) fieldDelimiter := v["field_delimiter"].(string) + var props interface{} + switch inputType { - case streamanalytics.TypeAvro: - return streamanalytics.AvroSerialization{ - Type: streamanalytics.TypeAvro, - Properties: map[string]interface{}{}, + case string(inputs.EventSerializationTypeAvro): + return inputs.AvroSerialization{ + Properties: &props, }, nil - case streamanalytics.TypeCsv: + case string(inputs.EventSerializationTypeCsv): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Csv`") } if fieldDelimiter == "" { return nil, fmt.Errorf("`field_delimiter` must be set when `type` is set to `Csv`") } - return streamanalytics.CsvSerialization{ - Type: streamanalytics.TypeCsv, - CsvSerializationProperties: &streamanalytics.CsvSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), + return inputs.CsvSerialization{ + Properties: &inputs.CsvSerializationProperties{ + Encoding: utils.ToPtr(inputs.Encoding(encoding)), FieldDelimiter: utils.String(fieldDelimiter), }, }, nil - case streamanalytics.TypeJSON: + case string(inputs.EventSerializationTypeJson): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Json`") } - return streamanalytics.JSONSerialization{ - Type: streamanalytics.TypeJSON, - JSONSerializationProperties: &streamanalytics.JSONSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), + return inputs.JsonSerialization{ + Properties: &inputs.JsonSerializationProperties{ + Encoding: utils.ToPtr(inputs.Encoding(encoding)), }, }, nil } @@ -96,44 +94,43 @@ func expandStreamAnalyticsStreamInputSerialization(input []interface{}) (streama return nil, fmt.Errorf("Unsupported Input Type %q", inputType) } -func expandStreamAnalyticsStreamInputSerializationTyped(serialization []Serialization) (streamanalytics.BasicSerialization, error) { +func expandStreamAnalyticsStreamInputSerializationTyped(serialization []Serialization) (inputs.Serialization, error) { v := serialization[0] - inputType := streamanalytics.Type(v.Type) + inputType := v.Type encoding := v.Encoding fieldDelimiter := v.FieldDelimiter + var props interface{} + switch inputType { - case streamanalytics.TypeAvro: - return streamanalytics.AvroSerialization{ - Type: streamanalytics.TypeAvro, - Properties: map[string]interface{}{}, + case string(inputs.EventSerializationTypeAvro): + return inputs.AvroSerialization{ + Properties: &props, }, nil - case streamanalytics.TypeCsv: + case string(inputs.EventSerializationTypeCsv): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Csv`") } if fieldDelimiter == "" { return nil, fmt.Errorf("`field_delimiter` must be set when `type` is set to `Csv`") } - return streamanalytics.CsvSerialization{ - Type: streamanalytics.TypeCsv, - CsvSerializationProperties: &streamanalytics.CsvSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), + return inputs.CsvSerialization{ + Properties: &inputs.CsvSerializationProperties{ + Encoding: utils.ToPtr(inputs.Encoding(encoding)), FieldDelimiter: utils.String(fieldDelimiter), }, }, nil - case streamanalytics.TypeJSON: + case string(inputs.EventSerializationTypeJson): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Json`") } - return streamanalytics.JSONSerialization{ - Type: streamanalytics.TypeJSON, - JSONSerializationProperties: &streamanalytics.JSONSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), + return inputs.JsonSerialization{ + Properties: &inputs.JsonSerializationProperties{ + Encoding: utils.ToPtr(inputs.Encoding(encoding)), }, }, nil } @@ -141,51 +138,13 @@ func expandStreamAnalyticsStreamInputSerializationTyped(serialization []Serializ return nil, fmt.Errorf("Unsupported Input Type %q", inputType) } -func flattenStreamAnalyticsStreamInputSerialization(input streamanalytics.BasicSerialization) []interface{} { - var encoding string - var fieldDelimiter string - var inputType string - - if _, ok := input.AsAvroSerialization(); ok { - inputType = string(streamanalytics.TypeAvro) - } - - if v, ok := input.AsCsvSerialization(); ok { - if props := v.CsvSerializationProperties; props != nil { - encoding = string(props.Encoding) - - if props.FieldDelimiter != nil { - fieldDelimiter = *props.FieldDelimiter - } - } - - inputType = string(streamanalytics.TypeCsv) - } - - if v, ok := input.AsJSONSerialization(); ok { - if props := v.JSONSerializationProperties; props != nil { - encoding = string(props.Encoding) - } - - inputType = string(streamanalytics.TypeJSON) - } - - return []interface{}{ - map[string]interface{}{ - "encoding": encoding, - "type": inputType, - "field_delimiter": fieldDelimiter, - }, - } -} - -func flattenStreamAnalyticsStreamInputSerialization2(input inputs.Serialization) []interface{} { +func flattenStreamAnalyticsStreamInputSerialization(input inputs.Serialization) []interface{} { var encoding string var fieldDelimiter string var inputType string if _, ok := input.(inputs.AvroSerialization); ok { - inputType = string(streamanalytics.TypeAvro) + inputType = string(inputs.EventSerializationTypeAvro) } if csv, ok := input.(inputs.CsvSerialization); ok { @@ -195,7 +154,7 @@ func flattenStreamAnalyticsStreamInputSerialization2(input inputs.Serialization) } if v := props.FieldDelimiter; v != nil { - fieldDelimiter = string(*v) + fieldDelimiter = *v } } @@ -227,7 +186,7 @@ func flattenStreamAnalyticsStreamInputSerializationTyped(input inputs.Serializat var inputType string if _, ok := input.(inputs.AvroSerialization); ok { - inputType = string(streamanalytics.TypeAvro) + inputType = string(inputs.EventSerializationTypeAvro) } if csv, ok := input.(inputs.CsvSerialization); ok { @@ -237,7 +196,7 @@ func flattenStreamAnalyticsStreamInputSerializationTyped(input inputs.Serializat } if v := props.FieldDelimiter; v != nil { - fieldDelimiter = string(*v) + fieldDelimiter = *v } } diff --git a/internal/services/streamanalytics/helpers_output.go b/internal/services/streamanalytics/helpers_output.go index 86384f3dfdc3..61ae3c69474f 100644 --- a/internal/services/streamanalytics/helpers_output.go +++ b/internal/services/streamanalytics/helpers_output.go @@ -2,9 +2,8 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -21,10 +20,10 @@ func schemaStreamAnalyticsOutputSerialization() *pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.TypeAvro), - string(streamanalytics.TypeCsv), - string(streamanalytics.TypeJSON), - string(streamanalytics.TypeParquet), + string(outputs.EventSerializationTypeAvro), + string(outputs.EventSerializationTypeCsv), + string(outputs.EventSerializationTypeJson), + string(outputs.EventSerializationTypeParquet), }, false), }, @@ -44,7 +43,7 @@ func schemaStreamAnalyticsOutputSerialization() *pluginsdk.Schema { Type: pluginsdk.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.EncodingUTF8), + string(outputs.EncodingUTFEight), }, false), }, @@ -52,8 +51,8 @@ func schemaStreamAnalyticsOutputSerialization() *pluginsdk.Schema { Type: pluginsdk.TypeString, Optional: true, ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.JSONOutputSerializationFormatArray), - string(streamanalytics.JSONOutputSerializationFormatLineSeparated), + string(outputs.JsonOutputSerializationFormatArray), + string(outputs.JsonOutputSerializationFormatLineSeparated), }, false), }, }, @@ -61,16 +60,16 @@ func schemaStreamAnalyticsOutputSerialization() *pluginsdk.Schema { } } -func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalytics.BasicSerialization, error) { +func expandStreamAnalyticsOutputSerialization(input []interface{}) (outputs.Serialization, error) { v := input[0].(map[string]interface{}) - outputType := streamanalytics.Type(v["type"].(string)) + outputType := v["type"].(string) encoding := v["encoding"].(string) fieldDelimiter := v["field_delimiter"].(string) format := v["format"].(string) switch outputType { - case streamanalytics.TypeAvro: + case string(outputs.EventSerializationTypeAvro): if encoding != "" { return nil, fmt.Errorf("`encoding` cannot be set when `type` is set to `Avro`") } @@ -80,12 +79,12 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalyt if format != "" { return nil, fmt.Errorf("`format` cannot be set when `type` is set to `Avro`") } - return streamanalytics.AvroSerialization{ - Type: streamanalytics.TypeAvro, - Properties: map[string]interface{}{}, + var props interface{} + return outputs.AvroSerialization{ + Properties: &props, }, nil - case streamanalytics.TypeCsv: + case string(outputs.EventSerializationTypeCsv): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Csv`") } @@ -95,15 +94,14 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalyt if format != "" { return nil, fmt.Errorf("`format` cannot be set when `type` is set to `Csv`") } - return streamanalytics.CsvSerialization{ - Type: streamanalytics.TypeCsv, - CsvSerializationProperties: &streamanalytics.CsvSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), + return outputs.CsvSerialization{ + Properties: &outputs.CsvSerializationProperties{ + Encoding: utils.ToPtr(outputs.Encoding(encoding)), FieldDelimiter: utils.String(fieldDelimiter), }, }, nil - case streamanalytics.TypeJSON: + case string(outputs.EventSerializationTypeJson): if encoding == "" { return nil, fmt.Errorf("`encoding` must be specified when `type` is set to `Json`") } @@ -114,15 +112,14 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalyt return nil, fmt.Errorf("`field_delimiter` cannot be set when `type` is set to `Json`") } - return streamanalytics.JSONSerialization{ - Type: streamanalytics.TypeJSON, - JSONSerializationProperties: &streamanalytics.JSONSerializationProperties{ - Encoding: streamanalytics.Encoding(encoding), - Format: streamanalytics.JSONOutputSerializationFormat(format), + return outputs.JsonSerialization{ + Properties: &outputs.JsonSerializationProperties{ + Encoding: utils.ToPtr(outputs.Encoding(encoding)), + Format: utils.ToPtr(outputs.JsonOutputSerializationFormat(format)), }, }, nil - case streamanalytics.TypeParquet: + case string(outputs.EventSerializationTypeParquet): if encoding != "" { return nil, fmt.Errorf("`encoding` cannot be set when `type` is set to `Parquet`") } @@ -132,9 +129,10 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (streamanalyt if format != "" { return nil, fmt.Errorf("`format` cannot be set when `type` is set to `Parquet`") } - return streamanalytics.ParquetSerialization{ - Type: streamanalytics.TypeParquet, - Properties: map[string]interface{}{}, + + var props interface{} + return outputs.ParquetSerialization{ + Properties: &props, }, nil } diff --git a/internal/services/streamanalytics/parse/cluster.go b/internal/services/streamanalytics/parse/cluster.go deleted file mode 100644 index 941ed4c44428..000000000000 --- a/internal/services/streamanalytics/parse/cluster.go +++ /dev/null @@ -1,69 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type ClusterId struct { - SubscriptionId string - ResourceGroup string - Name string -} - -func NewClusterID(subscriptionId, resourceGroup, name string) ClusterId { - return ClusterId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - Name: name, - } -} - -func (id ClusterId) String() string { - segments := []string{ - fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Cluster", segmentsStr) -} - -func (id ClusterId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.Name) -} - -// ClusterID parses a Cluster ID into an ClusterId struct -func ClusterID(input string) (*ClusterId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := ClusterId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceId.Name, err = id.PopSegment("clusters"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/cluster_test.go b/internal/services/streamanalytics/parse/cluster_test.go deleted file mode 100644 index 3894e799f976..000000000000 --- a/internal/services/streamanalytics/parse/cluster_test.go +++ /dev/null @@ -1,112 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = ClusterId{} - -func TestClusterIDFormatter(t *testing.T) { - actual := NewClusterID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestClusterID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *ClusterId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1", - Expected: &ClusterId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - Name: "cluster1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/CLUSTERS/CLUSTER1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := ClusterID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/internal/services/streamanalytics/parse/function.go b/internal/services/streamanalytics/parse/function.go deleted file mode 100644 index 1dad541213e4..000000000000 --- a/internal/services/streamanalytics/parse/function.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type FunctionId struct { - SubscriptionId string - ResourceGroup string - StreamingjobName string - Name string -} - -func NewFunctionID(subscriptionId, resourceGroup, streamingjobName, name string) FunctionId { - return FunctionId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - StreamingjobName: streamingjobName, - Name: name, - } -} - -func (id FunctionId) String() string { - segments := []string{ - fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Streamingjob Name %q", id.JobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Function", segmentsStr) -} - -func (id FunctionId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/functions/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.Name) -} - -// FunctionID parses a Function ID into an FunctionId struct -func FunctionID(input string) (*FunctionId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := FunctionId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { - return nil, err - } - if resourceId.Name, err = id.PopSegment("functions"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/function_test.go b/internal/services/streamanalytics/parse/function_test.go deleted file mode 100644 index 7770dd5c2f3e..000000000000 --- a/internal/services/streamanalytics/parse/function_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = FunctionId{} - -func TestFunctionIDFormatter(t *testing.T) { - actual := NewFunctionID("12345678-1234-9876-4563-123456789012", "resGroup1", "streamingJob1", "function1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/function1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestFunctionID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *FunctionId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Error: true, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Error: true, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/function1", - Expected: &FunctionId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - StreamingjobName: "streamingJob1", - Name: "function1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/FUNCTIONS/FUNCTION1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := FunctionID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.StreamingjobName != v.Expected.StreamingjobName { - t.Fatalf("Expected %q but got %q for StreamingjobName", v.Expected.StreamingjobName, actual.StreamingjobName) - } - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/internal/services/streamanalytics/parse/output.go b/internal/services/streamanalytics/parse/output.go deleted file mode 100644 index bf0e9e28e987..000000000000 --- a/internal/services/streamanalytics/parse/output.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type OutputId struct { - SubscriptionId string - ResourceGroup string - StreamingjobName string - Name string -} - -func NewOutputID(subscriptionId, resourceGroup, streamingjobName, name string) OutputId { - return OutputId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - StreamingjobName: streamingjobName, - Name: name, - } -} - -func (id OutputId) String() string { - segments := []string{ - fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Streamingjob Name %q", id.JobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Output", segmentsStr) -} - -func (id OutputId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/outputs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.Name) -} - -// OutputID parses a Output ID into an OutputId struct -func OutputID(input string) (*OutputId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := OutputId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { - return nil, err - } - if resourceId.Name, err = id.PopSegment("outputs"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/output_test.go b/internal/services/streamanalytics/parse/output_test.go deleted file mode 100644 index 7fca887997ce..000000000000 --- a/internal/services/streamanalytics/parse/output_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = OutputId{} - -func TestOutputIDFormatter(t *testing.T) { - actual := NewOutputID("12345678-1234-9876-4563-123456789012", "resGroup1", "streamingJob1", "output1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/output1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestOutputID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *OutputId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Error: true, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Error: true, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/output1", - Expected: &OutputId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - StreamingjobName: "streamingJob1", - Name: "output1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/OUTPUTS/OUTPUT1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := OutputID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.StreamingjobName != v.Expected.StreamingjobName { - t.Fatalf("Expected %q but got %q for StreamingjobName", v.Expected.StreamingjobName, actual.StreamingjobName) - } - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/internal/services/streamanalytics/parse/private_endpoint.go b/internal/services/streamanalytics/parse/private_endpoint.go deleted file mode 100644 index 80bdad2dbdb9..000000000000 --- a/internal/services/streamanalytics/parse/private_endpoint.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type PrivateEndpointId struct { - SubscriptionId string - ResourceGroup string - ClusterName string - Name string -} - -func NewPrivateEndpointID(subscriptionId, resourceGroup, clusterName, name string) PrivateEndpointId { - return PrivateEndpointId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - ClusterName: clusterName, - Name: name, - } -} - -func (id PrivateEndpointId) String() string { - segments := []string{ - fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Cluster Name %q", id.ClusterName), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Private Endpoint", segmentsStr) -} - -func (id PrivateEndpointId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s/privateEndpoints/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.ClusterName, id.Name) -} - -// PrivateEndpointID parses a PrivateEndpoint ID into an PrivateEndpointId struct -func PrivateEndpointID(input string) (*PrivateEndpointId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := PrivateEndpointId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceId.ClusterName, err = id.PopSegment("clusters"); err != nil { - return nil, err - } - if resourceId.Name, err = id.PopSegment("privateEndpoints"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/private_endpoint_test.go b/internal/services/streamanalytics/parse/private_endpoint_test.go deleted file mode 100644 index 24315e140c34..000000000000 --- a/internal/services/streamanalytics/parse/private_endpoint_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = PrivateEndpointId{} - -func TestPrivateEndpointIDFormatter(t *testing.T) { - actual := NewPrivateEndpointID("12345678-1234-9876-4563-123456789012", "resGroup1", "cluster1", "endpoint1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/endpoint1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestPrivateEndpointID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *PrivateEndpointId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing ClusterName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for ClusterName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/", - Error: true, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/", - Error: true, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/endpoint1", - Expected: &PrivateEndpointId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - ClusterName: "cluster1", - Name: "endpoint1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/CLUSTERS/CLUSTER1/PRIVATEENDPOINTS/ENDPOINT1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := PrivateEndpointID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.ClusterName != v.Expected.ClusterName { - t.Fatalf("Expected %q but got %q for ClusterName", v.Expected.ClusterName, actual.ClusterName) - } - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/internal/services/streamanalytics/parse/stream_input.go b/internal/services/streamanalytics/parse/stream_input.go deleted file mode 100644 index dd5fd5b29df8..000000000000 --- a/internal/services/streamanalytics/parse/stream_input.go +++ /dev/null @@ -1,75 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type StreamInputId struct { - SubscriptionId string - ResourceGroup string - StreamingjobName string - InputName string -} - -func NewStreamInputID(subscriptionId, resourceGroup, streamingjobName, inputName string) StreamInputId { - return StreamInputId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - StreamingjobName: streamingjobName, - InputName: inputName, - } -} - -func (id StreamInputId) String() string { - segments := []string{ - fmt.Sprintf("Input Name %q", id.InputName), - fmt.Sprintf("Streamingjob Name %q", id.JobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Stream Input", segmentsStr) -} - -func (id StreamInputId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/inputs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.InputName) -} - -// StreamInputID parses a StreamInput ID into an StreamInputId struct -func StreamInputID(input string) (*StreamInputId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := StreamInputId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { - return nil, err - } - if resourceId.InputName, err = id.PopSegment("inputs"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/stream_input_test.go b/internal/services/streamanalytics/parse/stream_input_test.go deleted file mode 100644 index 2e608059198f..000000000000 --- a/internal/services/streamanalytics/parse/stream_input_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = StreamInputId{} - -func TestStreamInputIDFormatter(t *testing.T) { - actual := NewStreamInputID("12345678-1234-9876-4563-123456789012", "resGroup1", "streamingJob1", "streamInput1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/streamInput1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestStreamInputID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *StreamInputId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Error: true, - }, - - { - // missing InputName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Error: true, - }, - - { - // missing value for InputName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/streamInput1", - Expected: &StreamInputId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - StreamingjobName: "streamingJob1", - InputName: "streamInput1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/INPUTS/STREAMINPUT1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := StreamInputID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.StreamingjobName != v.Expected.StreamingjobName { - t.Fatalf("Expected %q but got %q for StreamingjobName", v.Expected.StreamingjobName, actual.StreamingjobName) - } - if actual.InputName != v.Expected.InputName { - t.Fatalf("Expected %q but got %q for InputName", v.Expected.InputName, actual.InputName) - } - } -} diff --git a/internal/services/streamanalytics/parse/streaming_job.go b/internal/services/streamanalytics/parse/streaming_job.go deleted file mode 100644 index 9ec8cb8a98d8..000000000000 --- a/internal/services/streamanalytics/parse/streaming_job.go +++ /dev/null @@ -1,69 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -type StreamingJobId struct { - SubscriptionId string - ResourceGroup string - Name string -} - -func NewStreamingJobID(subscriptionId, resourceGroup, name string) StreamingJobId { - return StreamingJobId{ - SubscriptionId: subscriptionId, - ResourceGroup: resourceGroup, - Name: name, - } -} - -func (id StreamingJobId) String() string { - segments := []string{ - fmt.Sprintf("Name %q", id.Name), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), - } - segmentsStr := strings.Join(segments, " / ") - return fmt.Sprintf("%s: (%s)", "Streaming Job", segmentsStr) -} - -func (id StreamingJobId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.Name) -} - -// StreamingJobID parses a StreamingJob ID into an StreamingJobId struct -func StreamingJobID(input string) (*StreamingJobId, error) { - id, err := resourceids.ParseAzureResourceID(input) - if err != nil { - return nil, err - } - - resourceId := StreamingJobId{ - SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, - } - - if resourceId.SubscriptionId == "" { - return nil, fmt.Errorf("ID was missing the 'subscriptions' element") - } - - if resourceid.ResourceGroupName == "" { - return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") - } - - if resourceId.Name, err = id.PopSegment("streamingjobs"); err != nil { - return nil, err - } - - if err := id.ValidateNoEmptySegments(input); err != nil { - return nil, err - } - - return &resourceId, nil -} diff --git a/internal/services/streamanalytics/parse/streaming_job_schedule.go b/internal/services/streamanalytics/parse/streaming_job_schedule.go index 808783096c6d..754ab8e9d6a3 100644 --- a/internal/services/streamanalytics/parse/streaming_job_schedule.go +++ b/internal/services/streamanalytics/parse/streaming_job_schedule.go @@ -28,8 +28,8 @@ func NewStreamingJobScheduleID(subscriptionId, resourceGroup, streamingjobName, func (id StreamingJobScheduleId) String() string { segments := []string{ fmt.Sprintf("Schedule Name %q", id.ScheduleName), - fmt.Sprintf("Streamingjob Name %q", id.JobName), - fmt.Sprintf("Resource Group %q", id.ResourceGroupName), + fmt.Sprintf("Streamingjob Name %q", id.StreamingjobName), + fmt.Sprintf("Resource Group %q", id.ResourceGroup), } segmentsStr := strings.Join(segments, " / ") return fmt.Sprintf("%s: (%s)", "Streaming Job Schedule", segmentsStr) @@ -37,7 +37,7 @@ func (id StreamingJobScheduleId) String() string { func (id StreamingJobScheduleId) ID() string { fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingjobs/%s/schedule/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.ScheduleName) + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroup, id.StreamingjobName, id.ScheduleName) } // StreamingJobScheduleID parses a StreamingJobSchedule ID into an StreamingJobScheduleId struct @@ -49,18 +49,18 @@ func StreamingJobScheduleID(input string) (*StreamingJobScheduleId, error) { resourceId := StreamingJobScheduleId{ SubscriptionId: id.SubscriptionID, - ResourceGroup: id.ResourceGroupName, + ResourceGroup: id.ResourceGroup, } if resourceId.SubscriptionId == "" { return nil, fmt.Errorf("ID was missing the 'subscriptions' element") } - if resourceid.ResourceGroupName == "" { + if resourceId.ResourceGroup == "" { return nil, fmt.Errorf("ID was missing the 'resourceGroups' element") } - if resourceid.JobName, err = id.PopSegment("streamingjobs"); err != nil { + if resourceId.StreamingjobName, err = id.PopSegment("streamingjobs"); err != nil { return nil, err } if resourceId.ScheduleName, err = id.PopSegment("schedule"); err != nil { diff --git a/internal/services/streamanalytics/parse/streaming_job_test.go b/internal/services/streamanalytics/parse/streaming_job_test.go deleted file mode 100644 index 513a01c034d4..000000000000 --- a/internal/services/streamanalytics/parse/streaming_job_test.go +++ /dev/null @@ -1,112 +0,0 @@ -package parse - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "testing" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -var _ resourceids.Id = StreamingJobId{} - -func TestStreamingJobIDFormatter(t *testing.T) { - actual := NewStreamingJobID("12345678-1234-9876-4563-123456789012", "resGroup1", "streamingJob1").ID() - expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1" - if actual != expected { - t.Fatalf("Expected %q but got %q", expected, actual) - } -} - -func TestStreamingJobID(t *testing.T) { - testData := []struct { - Input string - Error bool - Expected *StreamingJobId - }{ - - { - // empty - Input: "", - Error: true, - }, - - { - // missing SubscriptionId - Input: "/", - Error: true, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Error: true, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Error: true, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Error: true, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Error: true, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Error: true, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1", - Expected: &StreamingJobId{ - SubscriptionId: "12345678-1234-9876-4563-123456789012", - ResourceGroup: "resGroup1", - Name: "streamingJob1", - }, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1", - Error: true, - }, - } - - for _, v := range testData { - t.Logf("[DEBUG] Testing %q", v.Input) - - actual, err := StreamingJobID(v.Input) - if err != nil { - if v.Error { - continue - } - - t.Fatalf("Expect a value but got an error: %s", err) - } - if v.Error { - t.Fatal("Expect an error but didn't get one") - } - - if actual.SubscriptionId != v.Expected.SubscriptionId { - t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) - } - if actual.ResourceGroup != v.Expected.ResourceGroup { - t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) - } - if actual.Name != v.Expected.Name { - t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) - } - } -} diff --git a/internal/services/streamanalytics/registration.go b/internal/services/streamanalytics/registration.go index 591174c2ee22..9b200e71f712 100644 --- a/internal/services/streamanalytics/registration.go +++ b/internal/services/streamanalytics/registration.go @@ -29,7 +29,7 @@ func (r Registration) Resources() []sdk.Resource { OutputTableResource{}, OutputPowerBIResource{}, OutputCosmosDBResource{}, - //StreamInputEventHubV2Resource{}, + StreamInputEventHubV2Resource{}, } } @@ -64,10 +64,10 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { "azurerm_stream_analytics_output_servicebus_queue": resourceStreamAnalyticsOutputServiceBusQueue(), "azurerm_stream_analytics_output_servicebus_topic": resourceStreamAnalyticsOutputServiceBusTopic(), "azurerm_stream_analytics_output_synapse": resourceStreamAnalyticsOutputSynapse(), - //"azurerm_stream_analytics_reference_input_blob": resourceStreamAnalyticsReferenceInputBlob(), - //"azurerm_stream_analytics_reference_input_mssql": resourceStreamAnalyticsReferenceMsSql(), - "azurerm_stream_analytics_stream_input_blob": resourceStreamAnalyticsStreamInputBlob(), - //"azurerm_stream_analytics_stream_input_eventhub": resourceStreamAnalyticsStreamInputEventHub(), - //"azurerm_stream_analytics_stream_input_iothub": resourceStreamAnalyticsStreamInputIoTHub(), + "azurerm_stream_analytics_reference_input_blob": resourceStreamAnalyticsReferenceInputBlob(), + "azurerm_stream_analytics_reference_input_mssql": resourceStreamAnalyticsReferenceMsSql(), + "azurerm_stream_analytics_stream_input_blob": resourceStreamAnalyticsStreamInputBlob(), + "azurerm_stream_analytics_stream_input_eventhub": resourceStreamAnalyticsStreamInputEventHub(), + "azurerm_stream_analytics_stream_input_iothub": resourceStreamAnalyticsStreamInputIoTHub(), } } diff --git a/internal/services/streamanalytics/resourceids.go b/internal/services/streamanalytics/resourceids.go index 82d9f3b0e882..545dafbb8de3 100644 --- a/internal/services/streamanalytics/resourceids.go +++ b/internal/services/streamanalytics/resourceids.go @@ -1,9 +1,3 @@ package streamanalytics -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Function -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/function1 -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StreamingJob -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1 //go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StreamingJobSchedule -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/schedule/default -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=StreamInput -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/streamInput1 -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Output -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/output1 -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=Cluster -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1 -//go:generate go run ../../tools/generator-resource-id/main.go -path=./ -name=PrivateEndpoint -id=/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/endpoint1 diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource.go b/internal/services/streamanalytics/stream_analytics_cluster_resource.go index f5955eafa8c4..eac2ba33f7ab 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource.go @@ -3,14 +3,13 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -37,7 +36,7 @@ func (r ClusterResource) ResourceType() string { } func (r ClusterResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { - return validate.ClusterID + return clusters.ValidateClusterID } func (r ClusterResource) Arguments() map[string]*pluginsdk.Schema { diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go index a5383bba5ac4..4431ca397372 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go index 410b5ebdd89e..a53bbb2776d2 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" @@ -48,7 +48,7 @@ func resourceStreamAnalyticsFunctionUDA() *pluginsdk.Resource { Type: pluginsdk.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.StreamingJobID, + ValidateFunc: streamingjobs.ValidateStreamingJobID, }, "input": { diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go index 3ee37f911af2..423d76441e19 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go index 5637ee5691d7..e09a4f2c4e11 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go index c661039a0963..03a7d94b54de 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource_test.go @@ -5,6 +5,8 @@ import ( "fmt" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -94,7 +96,7 @@ func (r StreamAnalyticsFunctionJavaScriptUDFResource) Exists(ctx context.Context return nil, err } - resp, err := client.StreamAnalytics.Functionsclient.Get(ctx, id) + resp, err := client.StreamAnalytics.FunctionsClient.Get(ctx, *id) if err != nil { if response.WasNotFound(resp.HttpResponse) { return utils.Bool(false), nil diff --git a/internal/services/streamanalytics/stream_analytics_job_data_source.go b/internal/services/streamanalytics/stream_analytics_job_data_source.go index bfb9575ad915..513ac760c394 100644 --- a/internal/services/streamanalytics/stream_analytics_job_data_source.go +++ b/internal/services/streamanalytics/stream_analytics_job_data_source.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index a5c9bc3d8643..d76a531fa1fe 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -2,15 +2,15 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations" "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/identity" "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -386,7 +386,7 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) d.Set("events_late_arrival_max_delay_in_seconds", lateArrival) var maxDelay int64 - if v := props.EventsLateArrivalMaxDelayInSeconds; v != nil { + if v := props.EventsOutOfOrderMaxDelayInSeconds; v != nil { maxDelay = *v } d.Set("events_out_of_order_max_delay_in_seconds", maxDelay) @@ -403,24 +403,6 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) } d.Set("output_error_policy", outputPolicy) - lastOutputTime := "" - if v := props.LastOutputEventTime; v != nil { - lastOutputTime = *v - } - d.Set("last_output_time", lastOutputTime) - - startTime := "" - if v := props.OutputStartTime; v != nil { - startTime = *v - } - d.Set("start_time", startTime) - - startMode := "" - if v := props.OutputStartMode; v != nil { - startMode = string(*v) - } - d.Set("start_mode", startMode) - cluster := "" if props.Cluster != nil && props.Cluster.Id != nil { cluster = *props.Cluster.Id diff --git a/internal/services/streamanalytics/stream_analytics_job_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_resource_test.go index 0ee008d2ef03..131dcbe597c3 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource_test.go @@ -5,6 +5,8 @@ import ( "fmt" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -153,10 +155,11 @@ func (r StreamAnalyticsJobResource) Exists(ctx context.Context, client *clients. return nil, err } - resp, err := client.StreamAnalytics.JobsClient.Get(ctx, id.ResourceGroupName, id.Name, "") + var opts streamingjobs.GetOperationOptions + resp, err := client.StreamAnalytics.JobsClient.Get(ctx, *id, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { - return utils.Bool(false), err + return nil, err } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go index 4119c22a460d..931b83f803d3 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go @@ -3,12 +3,11 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" - "github.com/hashicorp/terraform-provider-azurerm/utils" "time" "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/locks" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -16,6 +15,7 @@ import ( streamAnalyticsValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" + "github.com/hashicorp/terraform-provider-azurerm/utils" ) type JobScheduleResource struct{} @@ -33,7 +33,7 @@ func (r JobScheduleResource) Arguments() map[string]*pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ForceNew: true, - ValidateFunc: streamAnalyticsValidate.StreamingJobID, + ValidateFunc: streamingjobs.ValidateStreamingJobID, }, "start_mode": { diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go index 500a1030583b..9658fde87dce 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource_test.go @@ -3,11 +3,11 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "testing" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -82,7 +82,7 @@ func (r StreamAnalyticsJobScheduleResource) Exists(ctx context.Context, client * resp, err := client.StreamAnalytics.JobsClient.Get(ctx, streamingJobId, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { - return utils.Bool(false), err + return nil, err } return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go index ad3cad3a2693..6334c1d4c9a6 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource.go @@ -3,12 +3,12 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "strings" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" diff --git a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go index b86a9b1e1918..8849ded8b478 100644 --- a/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_managed_private_endpoint_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_output.go b/internal/services/streamanalytics/stream_analytics_output.go index 3aa4dc0a613f..9656c632008d 100644 --- a/internal/services/streamanalytics/stream_analytics_output.go +++ b/internal/services/streamanalytics/stream_analytics_output.go @@ -3,14 +3,14 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" + "reflect" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" ) -func importStreamAnalyticsOutput(expectType streamanalytics.TypeBasicOutputDataSource) pluginsdk.ImporterFunc { +func importStreamAnalyticsOutput(expectType outputs.OutputDataSource) pluginsdk.ImporterFunc { return func(ctx context.Context, d *pluginsdk.ResourceData, meta interface{}) (data []*pluginsdk.ResourceData, err error) { id, err := outputs.ParseOutputID(d.Id()) if err != nil { @@ -18,45 +18,43 @@ func importStreamAnalyticsOutput(expectType streamanalytics.TypeBasicOutputDataS } client := meta.(*clients.Client).StreamAnalytics.OutputsClient - resp, err := client.Get(ctx, id) + resp, err := client.Get(ctx, *id) if err != nil { return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } - if props := resp.OutputProperties; props != nil { - var actualType streamanalytics.TypeBasicOutputDataSource + if props := resp.Model.Properties; props != nil { + var actualType outputs.OutputDataSource - if datasource, ok := props.Datasource.AsBlobOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsAzureTableOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsEventHubOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsEventHubV2OutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsAzureSQLDatabaseOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsAzureSynapseOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsDocumentDbOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsAzureFunctionOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsServiceBusQueueOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsServiceBusTopicOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsPowerBIOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsAzureDataLakeStoreOutputDataSource(); ok { - actualType = datasource.Type - } else if datasource, ok := props.Datasource.AsOutputDataSource(); ok { - actualType = datasource.Type + if datasource, ok := props.Datasource.(outputs.BlobOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.AzureTableOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.EventHubOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.EventHubV2OutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.AzureSqlDatabaseOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.AzureSynapseOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.DocumentDbOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.AzureFunctionOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.ServiceBusQueueOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.ServiceBusTopicOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.PowerBIOutputDataSource); ok { + actualType = datasource + } else if datasource, ok := props.Datasource.(outputs.AzureDataLakeStoreOutputDataSource); ok { + actualType = datasource } else { return nil, fmt.Errorf("unable to convert output data source: %+v", props.Datasource) } - if actualType != expectType { + if reflect.TypeOf(actualType) != reflect.TypeOf(expectType) { return nil, fmt.Errorf("stream analytics output has mismatched type, expected: %q, got %q", expectType, actualType) } } diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go index ad47e912cc2e..7d183425aa88 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" @@ -178,7 +178,7 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me } // timeWindow and sizeWindow must be set for Parquet serialization - _, isParquet := serialization.AsParquetSerialization() + _, isParquet := serialization.(outputs.ParquetSerialization) if isParquet && (props.Properties.TimeWindow == nil || props.Properties.SizeWindow == nil) { return fmt.Errorf("cannot create %s: batch_min_rows and batch_time_window must be set for Parquet serialization", id) } @@ -225,42 +225,42 @@ func resourceStreamAnalyticsOutputBlobRead(d *pluginsdk.ResourceData, meta inter if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.BlobOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.BlobOutputDataSource) if !ok { return fmt.Errorf("converting to Blob Output") } dateFormat := "" - if v := output.DateFormat; v != nil { + if v := output.Properties.DateFormat; v != nil { dateFormat = *v } d.Set("date_format", dateFormat) pathPattern := "" - if v := output.PathPattern; v != nil { + if v := output.Properties.PathPattern; v != nil { pathPattern = *v } d.Set("path_pattern", pathPattern) containerName := "" - if v := output.Container; v != nil { + if v := output.Properties.Container; v != nil { containerName = *v } d.Set("storage_container_name", containerName) timeFormat := "" - if v := output.TimeFormat; v != nil { + if v := output.Properties.TimeFormat; v != nil { timeFormat = *v } d.Set("time_format", timeFormat) authenticationMode := "" - if v := output.AuthenticationMode; v != nil { + if v := output.Properties.AuthenticationMode; v != nil { authenticationMode = string(*v) } d.Set("authentication_mode", authenticationMode) - if accounts := output.StorageAccounts; accounts != nil && len(*accounts) > 0 { + if accounts := output.Properties.StorageAccounts; accounts != nil && len(*accounts) > 0 { account := (*accounts)[0] d.Set("storage_account_name", account.AccountName) } diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go index 8a9821d34b4f..764e778914b5 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -138,7 +138,7 @@ func TestAccStreamAnalyticsOutputBlob_authenticationMode(t *testing.T) { func (r StreamAnalyticsOutputBlobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { - return utils.Bool(false), err + return nil, err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go index e4ff5cea3681..1b761d8fec68 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go @@ -3,15 +3,15 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" cosmosParse "github.com/hashicorp/terraform-provider-azurerm/internal/services/cosmos/parse" cosmosValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/cosmos/validate" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -44,7 +44,7 @@ func (r OutputCosmosDBResource) Arguments() map[string]*pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.StreamingJobID, + ValidateFunc: streamingjobs.ValidateStreamingJobID, }, "cosmosdb_account_key": { @@ -108,7 +108,7 @@ func (r OutputCosmosDBResource) Create() sdk.ResourceFunc { if err != nil { return err } - id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroup, streamingJobId.Name, model.Name) + id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroupName, streamingJobId.JobName, model.Name) existing, err := client.Get(ctx, id) if err != nil && !response.WasNotFound(existing.HttpResponse) { @@ -174,7 +174,7 @@ func (r OutputCosmosDBResource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.DocumentDbOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.DocumentDbOutputDataSource) if !ok { return fmt.Errorf("converting to CosmosDb Output") } @@ -187,23 +187,23 @@ func (r OutputCosmosDBResource) Read() sdk.ResourceFunc { state.AccountKey = metadata.ResourceData.Get("cosmosdb_account_key").(string) - databaseId := cosmosParse.NewSqlDatabaseID(id.SubscriptionId, id.ResourceGroupName, *output.AccountId, *output.Database) + databaseId := cosmosParse.NewSqlDatabaseID(id.SubscriptionId, id.ResourceGroupName, *output.Properties.AccountId, *output.Properties.Database) state.Database = databaseId.ID() collectionName := "" - if v := output.CollectionNamePattern; v != nil { + if v := output.Properties.CollectionNamePattern; v != nil { collectionName = *v } state.ContainerName = collectionName document := "" - if v := output.DocumentId; v != nil { + if v := output.Properties.DocumentId; v != nil { document = *v } state.DocumentID = document partitionKey := "" - if v := output.PartitionKey; v != nil { + if v := output.Properties.PartitionKey; v != nil { partitionKey = *v } state.PartitionKey = partitionKey diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go index 9cdc3fe18259..218f0a7dd679 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go index faf9ca6420b6..dfc68fe37323 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -198,37 +198,37 @@ func resourceStreamAnalyticsOutputEventHubRead(d *pluginsdk.ResourceData, meta i if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.EventHubOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.EventHubOutputDataSource) if !ok { return fmt.Errorf("converting to EventHub Output") } eventHubName := "" - if v := output.EventHubName; v != nil { + if v := output.Properties.EventHubName; v != nil { eventHubName = *v } d.Set("eventhub_name", eventHubName) serviceBusNamespace := "" - if v := output.ServiceBusNamespace; v != nil { + if v := output.Properties.ServiceBusNamespace; v != nil { serviceBusNamespace = *v } d.Set("servicebus_namespace", serviceBusNamespace) sharedAccessPolicyName := "" - if v := output.SharedAccessPolicyName; v != nil { + if v := output.Properties.SharedAccessPolicyName; v != nil { sharedAccessPolicyName = *v } d.Set("shared_access_policy_name", sharedAccessPolicyName) partitionKey := "" - if v := output.PartitionKey; v != nil { + if v := output.Properties.PartitionKey; v != nil { partitionKey = *v } d.Set("partition_key", partitionKey) authMode := "" - if v := output.AuthenticationMode; v != nil { + if v := output.Properties.AuthenticationMode; v != nil { authMode = string(*v) } d.Set("authentication_mode", authMode) diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go index 60471418bcdb..19fe2c601648 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -172,7 +172,7 @@ func TestAccStreamAnalyticsOutputEventHub_authenticationMode(t *testing.T) { func (r StreamAnalyticsOutputEventhubResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { - return utils.Bool(false), err + return nil, err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource.go b/internal/services/streamanalytics/stream_analytics_output_function_resource.go index 6297cb80316e..9562570899f5 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource.go @@ -3,11 +3,11 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -167,12 +167,12 @@ func (r OutputFunctionResource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.AzureFunctionOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.AzureFunctionOutputDataSource) if !ok { return fmt.Errorf("converting to Function Output") } - if output.FunctionAppName == nil || output.FunctionName == nil || output.MaxBatchCount == nil || output.MaxBatchSize == nil { + if output.Properties.FunctionAppName == nil || output.Properties.FunctionName == nil || output.Properties.MaxBatchCount == nil || output.Properties.MaxBatchSize == nil { return nil } @@ -184,25 +184,25 @@ func (r OutputFunctionResource) Read() sdk.ResourceFunc { } functionApp := "" - if v := output.FunctionAppName; v != nil { + if v := output.Properties.FunctionAppName; v != nil { functionApp = *v } state.FunctionApp = functionApp functionName := "" - if v := output.FunctionName; v != nil { + if v := output.Properties.FunctionName; v != nil { functionName = *v } state.FunctionName = functionName batchMaxInBytes := 0 - if v := output.MaxBatchSize; v != nil { + if v := output.Properties.MaxBatchSize; v != nil { batchMaxInBytes = int(*v) } state.BatchMaxInBytes = batchMaxInBytes batchMaxCount := 0 - if v := output.MaxBatchCount; v != nil { + if v := output.Properties.MaxBatchCount; v != nil { batchMaxCount = int(*v) } state.BatchMaxCount = batchMaxCount @@ -289,7 +289,7 @@ func (r OutputFunctionResource) CustomImporter() sdk.ResourceRunFunc { } props := resp.Model.Properties - if _, ok := props.Datasource.(outputs.AzureFunctionOutputDataSourceProperties); !ok { + if _, ok := props.Datasource.(outputs.AzureFunctionOutputDataSource); !ok { return fmt.Errorf("specified output is not of type") } return nil diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go index 9a3c9be472ec..77d89881c16d 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go index 5032ed2c8cf2..9ef6b4918df9 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go @@ -2,11 +2,11 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -195,49 +195,49 @@ func resourceStreamAnalyticsOutputSqlRead(d *pluginsdk.ResourceData, meta interf if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.AzureSqlDatabaseDataSourceProperties) + output, ok := props.Datasource.(outputs.AzureSqlDatabaseOutputDataSource) if !ok { return fmt.Errorf("converting to SQL Output") } server := "" - if v := output.Server; v != nil { + if v := output.Properties.Server; v != nil { server = *v } d.Set("server", server) database := "" - if v := output.Database; v != nil { + if v := output.Properties.Database; v != nil { database = *v } d.Set("database", database) table := "" - if v := output.Table; v != nil { + if v := output.Properties.Table; v != nil { table = *v } d.Set("table", table) user := "" - if v := output.User; v != nil { + if v := output.Properties.User; v != nil { user = *v } d.Set("user", user) authMode := "" - if v := output.AuthenticationMode; v != nil { + if v := output.Properties.AuthenticationMode; v != nil { authMode = string(*v) } d.Set("authentication_mode", authMode) maxBatchCount := float64(10000) - if v := output.MaxBatchCount; v != nil { + if v := output.Properties.MaxBatchCount; v != nil { maxBatchCount = *v } d.Set("max_batch_count", maxBatchCount) maxWriterCount := float64(1) - if v := output.MaxWriterCount; v != nil { + if v := output.Properties.MaxWriterCount; v != nil { maxWriterCount = *v } d.Set("max_writer_count", maxWriterCount) diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go index 1d49a7358321..dbfe3c7ae956 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -121,6 +121,7 @@ func TestAccStreamAnalyticsOutputSql_maxBatchCountAndMaxWriterCount(t *testing.T func (r StreamAnalyticsOutputSqlResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { + return nil, err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go index 31c0ea352840..ba72546f39e0 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -3,13 +3,13 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -43,7 +43,7 @@ func (r OutputPowerBIResource) Arguments() map[string]*pluginsdk.Schema { Type: pluginsdk.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.StreamingJobID, + ValidateFunc: streamingjobs.ValidateStreamingJobID, }, "dataset": { @@ -112,7 +112,7 @@ func (r OutputPowerBIResource) Create() sdk.ResourceFunc { if err != nil { return err } - id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroup, streamingJobId.Name, model.Name) + id := outputs.NewOutputID(subscriptionId, streamingJobId.ResourceGroupName, streamingJobId.JobName, model.Name) existing, err := client.Get(ctx, id) if err != nil && !response.WasNotFound(existing.HttpResponse) { @@ -254,7 +254,7 @@ func (r OutputPowerBIResource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.PowerBIOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.PowerBIOutputDataSource) if !ok { return fmt.Errorf("converting to PowerBI Output") } @@ -267,25 +267,25 @@ func (r OutputPowerBIResource) Read() sdk.ResourceFunc { } dataset := "" - if v := output.Dataset; v != nil { + if v := output.Properties.Dataset; v != nil { dataset = *v } state.DataSet = dataset table := "" - if v := output.Table; v != nil { + if v := output.Properties.Table; v != nil { table = *v } state.Table = table groupId := "" - if v := output.GroupId; v != nil { + if v := output.Properties.GroupId; v != nil { groupId = *v } state.GroupID = groupId groupName := "" - if v := output.GroupName; v != nil { + if v := output.Properties.GroupName; v != nil { groupName = *v } state.GroupName = groupName @@ -341,7 +341,7 @@ func (r OutputPowerBIResource) CustomImporter() sdk.ResourceRunFunc { } props := resp.Model.Properties - if _, ok := props.Datasource.(outputs.PowerBIOutputDataSourceProperties); !ok { + if _, ok := props.Datasource.(outputs.PowerBIOutputDataSource); !ok { return fmt.Errorf("specified output is not of type") } return nil diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go index 801d9d7a4b3a..f5ad3ccc5b64 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go index 0a7814eb0c42..d47d6119fa43 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -140,7 +140,7 @@ func resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate(d *pluginsdk.Resou return fmt.Errorf("expanding `serialization`: %+v", err) } - systemPropertyColumns := d.Get("system_property_columns").(interface{}) + systemPropertyColumns := d.Get("system_property_columns") props := outputs.Output{ Name: utils.String(id.OutputName), Properties: &outputs.OutputProperties{ @@ -202,43 +202,43 @@ func resourceStreamAnalyticsOutputServiceBusQueueRead(d *pluginsdk.ResourceData, if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.ServiceBusQueueOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.ServiceBusQueueOutputDataSource) if !ok { return fmt.Errorf("converting to ServiceBus Queue Output") } queue := "" - if v := output.QueueName; v != nil { + if v := output.Properties.QueueName; v != nil { queue = *v } d.Set("queue_name", queue) namespace := "" - if v := output.ServiceBusNamespace; v != nil { + if v := output.Properties.ServiceBusNamespace; v != nil { namespace = *v } d.Set("servicebus_namespace", namespace) policyName := "" - if v := output.SharedAccessPolicyName; v != nil { + if v := output.Properties.SharedAccessPolicyName; v != nil { policyName = *v } d.Set("shared_access_policy_name", policyName) var columns []string - if v := output.PropertyColumns; v != nil { + if v := output.Properties.PropertyColumns; v != nil { columns = *v } d.Set("property_columns", columns) var systemColumns interface{} - if v := output.SystemPropertyColumns; v != nil { + if v := output.Properties.SystemPropertyColumns; v != nil { systemColumns = *v } d.Set("system_property_columns", systemColumns) authMode := "" - if v := output.AuthenticationMode; v != nil { + if v := output.Properties.AuthenticationMode; v != nil { authMode = string(*v) } d.Set("authentication_mode", authMode) diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go index a6dca9e1bfd3..f2072e4de7db 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -188,6 +188,7 @@ func TestAccStreamAnalyticsOutputServiceBusQueue_systemPropertyColumns(t *testin func (r StreamAnalyticsOutputServiceBusQueueResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { + return nil, err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go index 1a9570a94c14..885b1ea1939c 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -148,7 +148,7 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou SharedAccessPolicyKey: utils.String(d.Get("shared_access_policy_key").(string)), SharedAccessPolicyName: utils.String(d.Get("shared_access_policy_name").(string)), PropertyColumns: utils.ExpandStringSlice(d.Get("property_columns").([]interface{})), - SystemPropertyColumns: systemPropertyColumns, + SystemPropertyColumns: expandSystemPropertyColumns(systemPropertyColumns), //SystemPropertyColumns: utils.ExpandMapStringPtrString(d.Get("system_property_columns").(map[string]interface{})), AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, @@ -199,42 +199,42 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.ServiceBusTopicOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.ServiceBusTopicOutputDataSource) if !ok { return fmt.Errorf("converting to ServiceBus Topic Output") } topicName := "" - if v := output.TopicName; v != nil { + if v := output.Properties.TopicName; v != nil { topicName = *v } d.Set("topic_name", topicName) namespace := "" - if v := output.ServiceBusNamespace; v != nil { + if v := output.Properties.ServiceBusNamespace; v != nil { namespace = *v } d.Set("servicebus_namespace", namespace) accessPolicy := "" - if v := output.SharedAccessPolicyName; v != nil { + if v := output.Properties.SharedAccessPolicyName; v != nil { accessPolicy = *v } - d.Set("shared_access_policy_name", v.accessPolicy) + d.Set("shared_access_policy_name", accessPolicy) - propertyColumns := "" - if v := output.PropertyColumns; v != nil { + var propertyColumns []string + if v := output.Properties.PropertyColumns; v != nil { propertyColumns = *v } d.Set("property_columns", propertyColumns) authMode := "" - if v := output.AuthenticationMode; v != nil { + if v := output.Properties.AuthenticationMode; v != nil { authMode = string(*v) } d.Set("authentication_mode", authMode) - if err = d.Set("system_property_columns", utils.FlattenMapStringPtrString(output.SystemPropertyColumns)); err != nil { + if err = d.Set("system_property_columns", output.Properties.SystemPropertyColumns); err != nil { return err } @@ -264,3 +264,11 @@ func resourceStreamAnalyticsOutputServiceBusTopicDelete(d *pluginsdk.ResourceDat return nil } + +func expandSystemPropertyColumns(input map[string]interface{}) *map[string]string { + output := make(map[string]string) + for k, v := range input { + output[k] = v.(string) + } + return &output +} diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go index da2ea51568f4..5f22d3781fc3 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -168,7 +168,7 @@ func TestAccStreamAnalyticsOutputServiceBusTopic_systemPropertyColumns(t *testin func (r StreamAnalyticsOutputServiceBusTopicResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := outputs.ParseOutputID(state.ID) if err != nil { - return utils.Bool(false), err + return nil, err } resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go index 4fedaae1a9bf..57381830c17a 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go @@ -2,11 +2,11 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -24,7 +24,7 @@ func resourceStreamAnalyticsOutputSynapse() *pluginsdk.Resource { Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }, importStreamAnalyticsOutput(outputs.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse)), + }, importStreamAnalyticsOutput(outputs.AzureSynapseOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -170,31 +170,31 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.AzureSynapseDataSourceProperties) + output, ok := props.Datasource.(outputs.AzureSynapseOutputDataSource) if !ok { return fmt.Errorf("converting to Synapse Output") } server := "" - if v := output.Server; v != nil { + if v := output.Properties.Server; v != nil { server = *v } d.Set("server", server) database := "" - if v := output.Database; v != nil { + if v := output.Properties.Database; v != nil { database = *v } d.Set("database", database) table := "" - if v := output.Table; v != nil { + if v := output.Properties.Table; v != nil { table = *v } d.Set("table", table) user := "" - if v := output.User; v != nil { + if v := output.Properties.User; v != nil { user = *v } d.Set("user", user) diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go index 60098e70a364..274b334c3b59 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index 61f44796d761..f761bee360e4 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -3,11 +3,11 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -190,12 +190,12 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - output, ok := props.Datasource.(outputs.AzureTableOutputDataSourceProperties) + output, ok := props.Datasource.(outputs.AzureTableOutputDataSource) if !ok { return fmt.Errorf("converting to Table Output") } - if output.AccountName == nil || output.Table == nil || output.PartitionKey == nil || output.RowKey == nil || output.BatchSize == nil { + if output.Properties.AccountName == nil || output.Properties.Table == nil || output.Properties.PartitionKey == nil || output.Properties.RowKey == nil || output.Properties.BatchSize == nil { return nil } @@ -204,44 +204,40 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { ResourceGroup: id.ResourceGroupName, StreamAnalyticsJob: id.JobName, StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), - Table: *v.Table, - PartitionKey: *v.PartitionKey, - RowKey: *v.RowKey, - BatchSize: *v.BatchSize, } accountName := "" - if v := output.AccountName; v != nil { + if v := output.Properties.AccountName; v != nil { accountName = *v } state.StorageAccount = accountName table := "" - if v := output.Table; v != nil { + if v := output.Properties.Table; v != nil { table = *v } state.Table = table partitonKey := "" - if v := output.PartitionKey; v != nil { + if v := output.Properties.PartitionKey; v != nil { partitonKey = *v } state.PartitionKey = partitonKey rowKey := "" - if v := output.RowKey; v != nil { + if v := output.Properties.RowKey; v != nil { rowKey = *v } state.RowKey = rowKey var batchSize int64 - if v := output.BatchSize; v != nil { + if v := output.Properties.BatchSize; v != nil { batchSize = *v } state.BatchSize = batchSize var columnsToRemove []string - if columns := output.ColumnsToRemove; columns != nil && len(*columns) > 0 { + if columns := output.Properties.ColumnsToRemove; columns != nil && len(*columns) > 0 { columnsToRemove = *columns } state.ColumnsToRemove = columnsToRemove @@ -339,9 +335,8 @@ func (r OutputTableResource) CustomImporter() sdk.ResourceRunFunc { } props := resp.Model.Properties - if _, ok := props.Datasource.(outputs.AzureTableOutputDataSourceProperties); !ok { - // TODO should these types exist in pandora? - return fmt.Errorf("specified output is not of type %s", outputs.TypeBasicOutputDataSourceTypeMicrosoftStorageTable) + if _, ok := props.Datasource.(outputs.AzureTableOutputDataSource); !ok { + return fmt.Errorf("specified output is not of type") } return nil } diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go index fcbb6b2b3a05..0fa4eaeb26fb 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource_test.go @@ -3,11 +3,11 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "strings" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_reference_input.go b/internal/services/streamanalytics/stream_analytics_reference_input.go index 52a4b6a1bc7c..a523c27285de 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input.go @@ -24,22 +24,22 @@ func importStreamAnalyticsReferenceInput(expectType string) pluginsdk.ImporterFu if model := resp.Model; model != nil { if props := model.Properties; props != nil { - input, ok := props.(inputs.Input) + input, ok := props.(inputs.InputProperties) if !ok { return nil, fmt.Errorf("failed to convert to Input") } - reference, ok := input.Properties.(inputs.ReferenceInputProperties) + reference, ok := input.(inputs.ReferenceInputProperties) if !ok { return nil, fmt.Errorf("failed to convert to Reference Input") } var actualType string - if _, ok := reference.Datasource.(inputs.BlobDataSourceProperties); ok { + if _, ok := reference.Datasource.(inputs.BlobReferenceInputDataSource); ok { actualType = "Microsoft.Storage/Blob" } if _, ok := reference.Datasource.(inputs.AzureSqlReferenceInputDataSource); ok { - actualType = "Microsoft.Storage/Blob" + actualType = "Microsoft.Sql/Server/Database" } if actualType != expectType { diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index c718bab895af..c8d5847b9e53 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -2,13 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -95,10 +94,10 @@ func resourceStreamAnalyticsReferenceInputBlob() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(inputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeConnectionString), - string(streamanalytics.AuthenticationModeMsi), + string(inputs.AuthenticationModeConnectionString), + string(inputs.AuthenticationModeMsi), }, false), }, }, @@ -129,15 +128,13 @@ func resourceStreamAnalyticsReferenceInputBlobCreate(d *pluginsdk.ResourceData, serializationRaw := d.Get("serialization").([]interface{}) serialization, err := expandStreamAnalyticsStreamInputSerialization(serializationRaw) if err != nil { - fmt.Errorf("expanding `serialization`: %+v", err) + return fmt.Errorf("expanding `serialization`: %+v", err) } props := inputs.Input{ Name: utils.String(id.InputName), Properties: &inputs.ReferenceInputProperties{ - //Type: streamanalytics.TypeBasicInputPropertiesTypeReference, Datasource: &inputs.BlobReferenceInputDataSource{ - //Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, Properties: &inputs.BlobDataSourceProperties{ Container: utils.String(d.Get("storage_container_name").(string)), DateFormat: utils.String(d.Get("date_format").(string)), @@ -179,16 +176,14 @@ func resourceStreamAnalyticsReferenceInputBlobUpdate(d *pluginsdk.ResourceData, serializationRaw := d.Get("serialization").([]interface{}) serialization, err := expandStreamAnalyticsStreamInputSerialization(serializationRaw) if err != nil { - fmt.Errorf("expanding `serialization`: %+v", err) + return fmt.Errorf("expanding `serialization`: %+v", err) } // TODO d.HasChanges() props := inputs.Input{ Name: utils.String(id.InputName), Properties: &inputs.ReferenceInputProperties{ - //Type: streamanalytics.TypeBasicInputPropertiesTypeReference, Datasource: &inputs.BlobReferenceInputDataSource{ - //Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, Properties: &inputs.BlobDataSourceProperties{ Container: utils.String(d.Get("storage_container_name").(string)), DateFormat: utils.String(d.Get("date_format").(string)), @@ -242,57 +237,57 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me if model := resp.Model; model != nil { if props := model.Properties; props != nil { - input, ok := props.(inputs.Input) + input, ok := props.(inputs.InputProperties) if !ok { return fmt.Errorf("blah") } - dataSource, ok := input.Properties.(inputs.ReferenceInputProperties) + dataSource, ok := input.(inputs.ReferenceInputProperties) if !ok { return fmt.Errorf("blah2") } - referenceInputBlob, ok := dataSource.Datasource.(inputs.BlobDataSourceProperties) + referenceInputBlob, ok := dataSource.Datasource.(inputs.BlobReferenceInputDataSource) if !ok { return fmt.Errorf("blah3") } dateFormat := "" - if v := referenceInputBlob.DateFormat; v != nil { + if v := referenceInputBlob.Properties.DateFormat; v != nil { dateFormat = *v } d.Set("date_format", dateFormat) pathPattern := "" - if v := referenceInputBlob.PathPattern; v != nil { + if v := referenceInputBlob.Properties.PathPattern; v != nil { pathPattern = *v } d.Set("path_pattern", pathPattern) containerName := "" - if v := referenceInputBlob.Container; v != nil { + if v := referenceInputBlob.Properties.Container; v != nil { containerName = *v } d.Set("storage_container_name", containerName) timeFormat := "" - if v := referenceInputBlob.TimeFormat; v != nil { + if v := referenceInputBlob.Properties.TimeFormat; v != nil { timeFormat = *v } d.Set("time_format", timeFormat) authMode := "" - if v := referenceInputBlob.AuthenticationMode; v != nil { + if v := referenceInputBlob.Properties.AuthenticationMode; v != nil { authMode = string(*v) } d.Set("authentication_mode", authMode) - if accounts := referenceInputBlob.StorageAccounts; accounts != nil && len(*accounts) > 0 { + if accounts := referenceInputBlob.Properties.StorageAccounts; accounts != nil && len(*accounts) > 0 { account := (*accounts)[0] d.Set("storage_account_name", account.AccountName) } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(dataSource.Serialization)); err != nil { + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(dataSource.Serialization)); err != nil { return fmt.Errorf("setting `serialization`: %+v", err) } diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go index 5deb74a7e70f..f8a7dd7e6f6b 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go index ca405ea9285b..34ddd360a46e 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go @@ -2,15 +2,14 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -25,7 +24,7 @@ func resourceStreamAnalyticsReferenceMsSql() *pluginsdk.Resource { Update: resourceStreamAnalyticsReferenceInputMsSqlCreateUpdate, Delete: resourceStreamAnalyticsReferenceInputMsSqlDelete, Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { - _, err := parse.StreamInputID(id) + _, err := inputs.ParseInputID(id) return err }, importStreamAnalyticsReferenceInput("Microsoft.Sql/Server/Database")), @@ -171,9 +170,7 @@ func resourceStreamAnalyticsReferenceInputMsSqlCreateUpdate(d *pluginsdk.Resourc props := inputs.Input{ Name: utils.String(id.InputName), Properties: &inputs.ReferenceInputProperties{ - //Type: streamanalytics.TypeBasicInputPropertiesTypeReference, Datasource: &inputs.AzureSqlReferenceInputDataSource{ - //Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase, Properties: properties, }, }, @@ -216,59 +213,65 @@ func resourceStreamAnalyticsReferenceInputMsSqlRead(d *pluginsdk.ResourceData, m if model := resp.Model; model != nil { if props := model.Properties; props != nil { - input, ok := props.(inputs.Input) + input, ok := props.(inputs.InputProperties) if !ok { return fmt.Errorf("failed to convert to Input") } - reference, ok := input.Properties.(inputs.ReferenceInputProperties) + reference, ok := input.(inputs.ReferenceInputProperties) if !ok { return fmt.Errorf("failed to convert to Reference Input") } - referenceInputAzureSql, ok := reference.Datasource.(inputs.AzureSqlReferenceInputDataSourceProperties) + referenceInputAzureSql, ok := reference.Datasource.(inputs.AzureSqlReferenceInputDataSource) if !ok { return fmt.Errorf("failed to convert to Azure Sql Reference Input") } server := "" - if v := referenceInputAzureSql.Server; v != nil { + if v := referenceInputAzureSql.Properties.Server; v != nil { server = *v } d.Set("server", server) database := "" - if v := referenceInputAzureSql.Database; v != nil { + if v := referenceInputAzureSql.Properties.Database; v != nil { database = *v } d.Set("database", database) username := "" - if v := referenceInputAzureSql.User; v != nil { + if v := referenceInputAzureSql.Properties.User; v != nil { username = *v } d.Set("username", username) refreshType := "" - if v := referenceInputAzureSql.RefreshType; v != nil { + if v := referenceInputAzureSql.Properties.RefreshType; v != nil { refreshType = string(*v) } d.Set("refresh_type", refreshType) + intervalDuration := "" + if v := referenceInputAzureSql.Properties.RefreshRate; v != nil { + intervalDuration = *v + } + d.Set("refresh_interval_duration", intervalDuration) + fullSnapshotQuery := "" - if v := referenceInputAzureSql.FullSnapshotQuery; v != nil { + if v := referenceInputAzureSql.Properties.FullSnapshotQuery; v != nil { fullSnapshotQuery = *v } d.Set("full_snapshot_query", fullSnapshotQuery) deltaSnapshotQuery := "" - if v := referenceInputAzureSql.DeltaSnapshotQuery; v != nil { + if v := referenceInputAzureSql.Properties.DeltaSnapshotQuery; v != nil { deltaSnapshotQuery = *v } d.Set("delta_snapshot_query", deltaSnapshotQuery) table := "" - if v := referenceInputAzureSql.Table; v != nil { + if v := referenceInputAzureSql.Properties.Table; v != nil { table = *v } d.Set("table", table) diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource_test.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource_test.go index 07c9375a70b7..eac3d4474994 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go index 9dff5f4011a2..55b4fa8e8281 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -237,7 +237,7 @@ func resourceStreamAnalyticsStreamInputBlobRead(d *pluginsdk.ResourceData, meta d.Set("storage_account_name", account.AccountName) } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(streamInput.Serialization)); err != nil { + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(streamInput.Serialization)); err != nil { return fmt.Errorf("setting `serialization`: %+v", err) } } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go index ff2f88479dc3..724e8fd6f051 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -100,7 +100,7 @@ func TestAccStreamAnalyticsStreamInputBlob_requiresImport(t *testing.T) { func (r StreamAnalyticsStreamInputBlobResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := inputs.ParseInputID(state.ID) if err != nil { - return utils.Bool(false), err + return nil, err } resp, err := client.StreamAnalytics.InputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go index 21a1a3413285..67799d59750f 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go @@ -2,13 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -92,10 +91,10 @@ func resourceStreamAnalyticsStreamInputEventHub() *pluginsdk.Resource { "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(inputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeConnectionString), + string(inputs.AuthenticationModeMsi), + string(inputs.AuthenticationModeConnectionString), }, false), }, @@ -149,9 +148,7 @@ func resourceStreamAnalyticsStreamInputEventHubCreateUpdate(d *pluginsdk.Resourc props := inputs.Input{ Name: utils.String(id.InputName), Properties: &inputs.StreamInputProperties{ - //Type: streamanalytics.TypeBasicInputPropertiesTypeStream, Datasource: &inputs.EventHubStreamInputDataSource{ - //Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, Properties: eventHubDataSourceProps, }, Serialization: serialization, @@ -233,7 +230,7 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m if v := streamEventHubInputProps.AuthenticationMode; v != nil { authMode = string(*v) } - d.Set("eventhub_name", authMode) + d.Set("authentication_mode", authMode) consumerGroupName := "" if v := streamEventHubInputProps.ConsumerGroupName; v != nil { @@ -243,7 +240,7 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m sharedAccessPolicyName := "" if v := streamEventHubInputProps.SharedAccessPolicyName; v != nil { - sharedAccessPolicyName = string(*v) + sharedAccessPolicyName = *v } d.Set("shared_access_policy_name", sharedAccessPolicyName) @@ -253,7 +250,7 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m } d.Set("partition_key", partitionKey) - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(streamInput.Serialization)); err != nil { + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(streamInput.Serialization)); err != nil { return fmt.Errorf("setting `serialization`: %+v", err) } } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go index 80fe913aff26..3c3592f8688e 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go index 8dd996bab0d3..75c2767087a9 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go @@ -3,14 +3,13 @@ package streamanalytics import ( "context" "fmt" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "time" - "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -53,7 +52,7 @@ func (r StreamInputEventHubV2Resource) Arguments() map[string]*pluginsdk.Schema Type: pluginsdk.TypeString, Required: true, ForceNew: true, - ValidateFunc: validate.StreamingJobID, + ValidateFunc: streamingjobs.ValidateStreamingJobID, }, "servicebus_namespace": { @@ -96,10 +95,10 @@ func (r StreamInputEventHubV2Resource) Arguments() map[string]*pluginsdk.Schema "authentication_mode": { Type: pluginsdk.TypeString, Optional: true, - Default: string(streamanalytics.AuthenticationModeConnectionString), + Default: string(inputs.AuthenticationModeConnectionString), ValidateFunc: validation.StringInSlice([]string{ - string(streamanalytics.AuthenticationModeMsi), - string(streamanalytics.AuthenticationModeConnectionString), + string(inputs.AuthenticationModeMsi), + string(inputs.AuthenticationModeConnectionString), }, false), }, @@ -135,7 +134,7 @@ func (r StreamInputEventHubV2Resource) Create() sdk.ResourceFunc { if err != nil { return err } - id := inputs.NewInputID(subscriptionId, streamingJobStruct.ResourceGroup, streamingJobStruct.Name, model.Name) + id := inputs.NewInputID(subscriptionId, streamingJobStruct.ResourceGroupName, streamingJobStruct.JobName, model.Name) existing, err := client.Get(ctx, id) if err != nil && !response.WasNotFound(existing.HttpResponse) { @@ -170,7 +169,6 @@ func (r StreamInputEventHubV2Resource) Create() sdk.ResourceFunc { Name: utils.String(model.Name), Properties: &inputs.StreamInputProperties{ Datasource: &inputs.EventHubV2StreamInputDataSource{ - //Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub, Properties: props, }, Serialization: serialization, @@ -224,7 +222,6 @@ func (r StreamInputEventHubV2Resource) Update() sdk.ResourceFunc { Name: utils.String(state.Name), Properties: &inputs.StreamInputProperties{ Datasource: &inputs.EventHubV2StreamInputDataSource{ - //Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub, Properties: props, }, Serialization: serialization, @@ -270,12 +267,12 @@ func (r StreamInputEventHubV2Resource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - input, ok := props.(inputs.InputProperties) + input, ok := props.(inputs.Input) if !ok { return fmt.Errorf("converting to an Input") } - streamInput, ok := input.(inputs.StreamInputProperties) + streamInput, ok := input.Properties.(inputs.StreamInputProperties) if !ok { return fmt.Errorf("converting to a Stream Input") } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource_test.go index b922222b4813..54ec0b9f4b57 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go index 6c78894d7bdc..eff54bb78ac3 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go @@ -2,12 +2,12 @@ package streamanalytics import ( "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" @@ -222,7 +222,7 @@ func resourceStreamAnalyticsStreamInputIoTHubRead(d *pluginsdk.ResourceData, met } d.Set("shared_access_policy_name", sharedAccessPolicyName) - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization2(streamInput.Serialization)); err != nil { + if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(streamInput.Serialization)); err != nil { return fmt.Errorf("setting `serialization`: %+v", err) } } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource_test.go b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource_test.go index b61c198be387..aba97bf16b66 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource_test.go @@ -3,10 +3,10 @@ package streamanalytics_test import ( "context" "fmt" - "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "testing" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" @@ -100,7 +100,7 @@ func TestAccStreamAnalyticsStreamInputIoTHub_requiresImport(t *testing.T) { func (r StreamAnalyticsStreamInputIoTHubResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := inputs.ParseInputID(state.ID) if err != nil { - return utils.Bool(false), err + return nil, err } resp, err := client.StreamAnalytics.InputsClient.Get(ctx, *id) diff --git a/internal/services/streamanalytics/validate/cluster_id.go b/internal/services/streamanalytics/validate/cluster_id.go deleted file mode 100644 index 5ae0bc34eb43..000000000000 --- a/internal/services/streamanalytics/validate/cluster_id.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" -) - -func ClusterID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := parse.ClusterID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/cluster_id_test.go b/internal/services/streamanalytics/validate/cluster_id_test.go deleted file mode 100644 index cfde63948720..000000000000 --- a/internal/services/streamanalytics/validate/cluster_id_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestClusterID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/CLUSTERS/CLUSTER1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := ClusterID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/internal/services/streamanalytics/validate/function_id.go b/internal/services/streamanalytics/validate/function_id.go deleted file mode 100644 index b174a36fe802..000000000000 --- a/internal/services/streamanalytics/validate/function_id.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" -) - -func FunctionID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := functions.ParseFunctionID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/function_id_test.go b/internal/services/streamanalytics/validate/function_id_test.go deleted file mode 100644 index 40ac977dee90..000000000000 --- a/internal/services/streamanalytics/validate/function_id_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestFunctionID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Valid: false, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Valid: false, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/functions/function1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/FUNCTIONS/FUNCTION1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := FunctionID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/internal/services/streamanalytics/validate/output_id.go b/internal/services/streamanalytics/validate/output_id.go deleted file mode 100644 index c0ddb0cea900..000000000000 --- a/internal/services/streamanalytics/validate/output_id.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" -) - -func OutputID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := outputs.ParseOutputID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/output_id_test.go b/internal/services/streamanalytics/validate/output_id_test.go deleted file mode 100644 index 0c7edfeb5b73..000000000000 --- a/internal/services/streamanalytics/validate/output_id_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestOutputID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Valid: false, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Valid: false, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/outputs/output1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/OUTPUTS/OUTPUT1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := OutputID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/internal/services/streamanalytics/validate/private_endpoint_id.go b/internal/services/streamanalytics/validate/private_endpoint_id.go deleted file mode 100644 index 765000d76e43..000000000000 --- a/internal/services/streamanalytics/validate/private_endpoint_id.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" -) - -func PrivateEndpointID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := parse.PrivateEndpointID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/private_endpoint_id_test.go b/internal/services/streamanalytics/validate/private_endpoint_id_test.go deleted file mode 100644 index c57a59d68454..000000000000 --- a/internal/services/streamanalytics/validate/private_endpoint_id_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestPrivateEndpointID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing ClusterName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for ClusterName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/", - Valid: false, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/", - Valid: false, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/clusters/cluster1/privateEndpoints/endpoint1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/CLUSTERS/CLUSTER1/PRIVATEENDPOINTS/ENDPOINT1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := PrivateEndpointID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/internal/services/streamanalytics/validate/stream_input_id.go b/internal/services/streamanalytics/validate/stream_input_id.go deleted file mode 100644 index 09434b279217..000000000000 --- a/internal/services/streamanalytics/validate/stream_input_id.go +++ /dev/null @@ -1,23 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" - - "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" -) - -func StreamInputID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := parse.StreamInputID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/stream_input_id_test.go b/internal/services/streamanalytics/validate/stream_input_id_test.go deleted file mode 100644 index b98b6bacd98c..000000000000 --- a/internal/services/streamanalytics/validate/stream_input_id_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestStreamInputID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for StreamingjobName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Valid: false, - }, - - { - // missing InputName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/", - Valid: false, - }, - - { - // missing value for InputName - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1/inputs/streamInput1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1/INPUTS/STREAMINPUT1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := StreamInputID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/internal/services/streamanalytics/validate/streaming_job_id.go b/internal/services/streamanalytics/validate/streaming_job_id.go deleted file mode 100644 index e78e8fc45bf4..000000000000 --- a/internal/services/streamanalytics/validate/streaming_job_id.go +++ /dev/null @@ -1,21 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import ( - "fmt" -) - -func StreamingJobID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := streamingjobs.ParseStreamingJobID(v); err != nil { - errors = append(errors, err) - } - - return -} diff --git a/internal/services/streamanalytics/validate/streaming_job_id_test.go b/internal/services/streamanalytics/validate/streaming_job_id_test.go deleted file mode 100644 index 791179560f7f..000000000000 --- a/internal/services/streamanalytics/validate/streaming_job_id_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package validate - -// NOTE: this file is generated via 'go:generate' - manual changes will be overwritten - -import "testing" - -func TestStreamingJobID(t *testing.T) { - cases := []struct { - Input string - Valid bool - }{ - - { - // empty - Input: "", - Valid: false, - }, - - { - // missing SubscriptionId - Input: "/", - Valid: false, - }, - - { - // missing value for SubscriptionId - Input: "/subscriptions/", - Valid: false, - }, - - { - // missing ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", - Valid: false, - }, - - { - // missing value for ResourceGroup - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", - Valid: false, - }, - - { - // missing Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/", - Valid: false, - }, - - { - // missing value for Name - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/", - Valid: false, - }, - - { - // valid - Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.StreamAnalytics/streamingjobs/streamingJob1", - Valid: true, - }, - - { - // upper-cased - Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.STREAMANALYTICS/STREAMINGJOBS/STREAMINGJOB1", - Valid: false, - }, - } - for _, tc := range cases { - t.Logf("[DEBUG] Testing Value %s", tc.Input) - _, errors := StreamingJobID(tc.Input, "test") - valid := len(errors) == 0 - - if tc.Valid != valid { - t.Fatalf("Expected %t but got %t", tc.Valid, valid) - } - } -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/README.md new file mode 100644 index 000000000000..30893f526bf0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/README.md @@ -0,0 +1,133 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters` Documentation + +The `clusters` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters" +``` + + +### Client Initialization + +```go +client := clusters.NewClustersClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `ClustersClient.CreateOrUpdate` + +```go +ctx := context.TODO() +id := clusters.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +payload := clusters.Cluster{ + // ... +} + + +if err := client.CreateOrUpdateThenPoll(ctx, id, payload, clusters.DefaultCreateOrUpdateOperationOptions()); err != nil { + // handle the error +} +``` + + +### Example Usage: `ClustersClient.Delete` + +```go +ctx := context.TODO() +id := clusters.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +if err := client.DeleteThenPoll(ctx, id); err != nil { + // handle the error +} +``` + + +### Example Usage: `ClustersClient.Get` + +```go +ctx := context.TODO() +id := clusters.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `ClustersClient.ListByResourceGroup` + +```go +ctx := context.TODO() +id := clusters.NewResourceGroupID("12345678-1234-9876-4563-123456789012", "example-resource-group") + +// alternatively `client.ListByResourceGroup(ctx, id)` can be used to do batched pagination +items, err := client.ListByResourceGroupComplete(ctx, id) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `ClustersClient.ListBySubscription` + +```go +ctx := context.TODO() +id := clusters.NewSubscriptionID("12345678-1234-9876-4563-123456789012") + +// alternatively `client.ListBySubscription(ctx, id)` can be used to do batched pagination +items, err := client.ListBySubscriptionComplete(ctx, id) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `ClustersClient.ListStreamingJobs` + +```go +ctx := context.TODO() +id := clusters.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +// alternatively `client.ListStreamingJobs(ctx, id)` can be used to do batched pagination +items, err := client.ListStreamingJobsComplete(ctx, id) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `ClustersClient.Update` + +```go +ctx := context.TODO() +id := clusters.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +payload := clusters.Cluster{ + // ... +} + + +if err := client.UpdateThenPoll(ctx, id, payload, clusters.DefaultUpdateOperationOptions()); err != nil { + // handle the error +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/client.go new file mode 100644 index 000000000000..1e6fabf8cbaa --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/client.go @@ -0,0 +1,18 @@ +package clusters + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClustersClient struct { + Client autorest.Client + baseUri string +} + +func NewClustersClientWithBaseURI(endpoint string) ClustersClient { + return ClustersClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/constants.go new file mode 100644 index 000000000000..b2f24dc720b1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/constants.go @@ -0,0 +1,117 @@ +package clusters + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClusterProvisioningState string + +const ( + ClusterProvisioningStateCanceled ClusterProvisioningState = "Canceled" + ClusterProvisioningStateFailed ClusterProvisioningState = "Failed" + ClusterProvisioningStateInProgress ClusterProvisioningState = "InProgress" + ClusterProvisioningStateSucceeded ClusterProvisioningState = "Succeeded" +) + +func PossibleValuesForClusterProvisioningState() []string { + return []string{ + string(ClusterProvisioningStateCanceled), + string(ClusterProvisioningStateFailed), + string(ClusterProvisioningStateInProgress), + string(ClusterProvisioningStateSucceeded), + } +} + +func parseClusterProvisioningState(input string) (*ClusterProvisioningState, error) { + vals := map[string]ClusterProvisioningState{ + "canceled": ClusterProvisioningStateCanceled, + "failed": ClusterProvisioningStateFailed, + "inprogress": ClusterProvisioningStateInProgress, + "succeeded": ClusterProvisioningStateSucceeded, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := ClusterProvisioningState(input) + return &out, nil +} + +type ClusterSkuName string + +const ( + ClusterSkuNameDefault ClusterSkuName = "Default" +) + +func PossibleValuesForClusterSkuName() []string { + return []string{ + string(ClusterSkuNameDefault), + } +} + +func parseClusterSkuName(input string) (*ClusterSkuName, error) { + vals := map[string]ClusterSkuName{ + "default": ClusterSkuNameDefault, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := ClusterSkuName(input) + return &out, nil +} + +type JobState string + +const ( + JobStateCreated JobState = "Created" + JobStateDegraded JobState = "Degraded" + JobStateDeleting JobState = "Deleting" + JobStateFailed JobState = "Failed" + JobStateRestarting JobState = "Restarting" + JobStateRunning JobState = "Running" + JobStateScaling JobState = "Scaling" + JobStateStarting JobState = "Starting" + JobStateStopped JobState = "Stopped" + JobStateStopping JobState = "Stopping" +) + +func PossibleValuesForJobState() []string { + return []string{ + string(JobStateCreated), + string(JobStateDegraded), + string(JobStateDeleting), + string(JobStateFailed), + string(JobStateRestarting), + string(JobStateRunning), + string(JobStateScaling), + string(JobStateStarting), + string(JobStateStopped), + string(JobStateStopping), + } +} + +func parseJobState(input string) (*JobState, error) { + vals := map[string]JobState{ + "created": JobStateCreated, + "degraded": JobStateDegraded, + "deleting": JobStateDeleting, + "failed": JobStateFailed, + "restarting": JobStateRestarting, + "running": JobStateRunning, + "scaling": JobStateScaling, + "starting": JobStateStarting, + "stopped": JobStateStopped, + "stopping": JobStateStopping, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := JobState(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/id_cluster.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/id_cluster.go new file mode 100644 index 000000000000..8609331d7324 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/id_cluster.go @@ -0,0 +1,124 @@ +package clusters + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = ClusterId{} + +// ClusterId is a struct representing the Resource ID for a Cluster +type ClusterId struct { + SubscriptionId string + ResourceGroupName string + ClusterName string +} + +// NewClusterID returns a new ClusterId struct +func NewClusterID(subscriptionId string, resourceGroupName string, clusterName string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + ClusterName: clusterName, + } +} + +// ParseClusterID parses 'input' into a ClusterId +func ParseClusterID(input string) (*ClusterId, error) { + parser := resourceids.NewParserFromResourceIdType(ClusterId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := ClusterId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseClusterIDInsensitively parses 'input' case-insensitively into a ClusterId +// note: this method should only be used for API response data and not user input +func ParseClusterIDInsensitively(input string) (*ClusterId, error) { + parser := resourceids.NewParserFromResourceIdType(ClusterId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := ClusterId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateClusterID checks that 'input' can be parsed as a Cluster ID +func ValidateClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Cluster ID +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.ClusterName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Cluster ID +func (id ClusterId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticClusters", "clusters", "clusters"), + resourceids.UserSpecifiedSegment("clusterName", "clusterValue"), + } +} + +// String returns a human-readable description of this Cluster ID +func (id ClusterId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Cluster Name: %q", id.ClusterName), + } + return fmt.Sprintf("Cluster (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_createorupdate_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_createorupdate_autorest.go new file mode 100644 index 000000000000..5b138fdffab7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_createorupdate_autorest.go @@ -0,0 +1,113 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrUpdateOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +type CreateOrUpdateOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrUpdateOperationOptions() CreateOrUpdateOperationOptions { + return CreateOrUpdateOperationOptions{} +} + +func (o CreateOrUpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrUpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrUpdate ... +func (c ClustersClient) CreateOrUpdate(ctx context.Context, id ClusterId, input Cluster, options CreateOrUpdateOperationOptions) (result CreateOrUpdateOperationResponse, err error) { + req, err := c.preparerForCreateOrUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + result, err = c.senderForCreateOrUpdate(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "CreateOrUpdate", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// CreateOrUpdateThenPoll performs CreateOrUpdate then polls until it's completed +func (c ClustersClient) CreateOrUpdateThenPoll(ctx context.Context, id ClusterId, input Cluster, options CreateOrUpdateOperationOptions) error { + result, err := c.CreateOrUpdate(ctx, id, input, options) + if err != nil { + return fmt.Errorf("performing CreateOrUpdate: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after CreateOrUpdate: %+v", err) + } + + return nil +} + +// preparerForCreateOrUpdate prepares the CreateOrUpdate request. +func (c ClustersClient) preparerForCreateOrUpdate(ctx context.Context, id ClusterId, input Cluster, options CreateOrUpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForCreateOrUpdate sends the CreateOrUpdate request. The method will close the +// http.Response Body if it receives an error. +func (c ClustersClient) senderForCreateOrUpdate(ctx context.Context, req *http.Request) (future CreateOrUpdateOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_delete_autorest.go new file mode 100644 index 000000000000..285ecf817e5b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_delete_autorest.go @@ -0,0 +1,78 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Delete ... +func (c ClustersClient) Delete(ctx context.Context, id ClusterId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = c.senderForDelete(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// DeleteThenPoll performs Delete then polls until it's completed +func (c ClustersClient) DeleteThenPoll(ctx context.Context, id ClusterId) error { + result, err := c.Delete(ctx, id) + if err != nil { + return fmt.Errorf("performing Delete: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Delete: %+v", err) + } + + return nil +} + +// preparerForDelete prepares the Delete request. +func (c ClustersClient) preparerForDelete(ctx context.Context, id ClusterId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForDelete sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (c ClustersClient) senderForDelete(ctx context.Context, req *http.Request) (future DeleteOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_get_autorest.go new file mode 100644 index 000000000000..2957d3dd8de8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_get_autorest.go @@ -0,0 +1,68 @@ +package clusters + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *Cluster +} + +// Get ... +func (c ClustersClient) Get(ctx context.Context, id ClusterId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c ClustersClient) preparerForGet(ctx context.Context, id ClusterId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c ClustersClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbyresourcegroup_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbyresourcegroup_autorest.go new file mode 100644 index 000000000000..e6a046991d7a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbyresourcegroup_autorest.go @@ -0,0 +1,187 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByResourceGroupOperationResponse struct { + HttpResponse *http.Response + Model *[]Cluster + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByResourceGroupOperationResponse, error) +} + +type ListByResourceGroupCompleteResult struct { + Items []Cluster +} + +func (r ListByResourceGroupOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByResourceGroupOperationResponse) LoadMore(ctx context.Context) (resp ListByResourceGroupOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +// ListByResourceGroup ... +func (c ClustersClient) ListByResourceGroup(ctx context.Context, id commonids.ResourceGroupId) (resp ListByResourceGroupOperationResponse, err error) { + req, err := c.preparerForListByResourceGroup(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByResourceGroup(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByResourceGroup prepares the ListByResourceGroup request. +func (c ClustersClient) preparerForListByResourceGroup(ctx context.Context, id commonids.ResourceGroupId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/providers/Microsoft.StreamAnalytics/clusters", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByResourceGroupWithNextLink prepares the ListByResourceGroup request with the given nextLink token. +func (c ClustersClient) preparerForListByResourceGroupWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByResourceGroup handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (c ClustersClient) responderForListByResourceGroup(resp *http.Response) (result ListByResourceGroupOperationResponse, err error) { + type page struct { + Values []Cluster `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByResourceGroupOperationResponse, err error) { + req, err := c.preparerForListByResourceGroupWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByResourceGroup(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListByResourceGroup", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByResourceGroupComplete retrieves all of the results into a single object +func (c ClustersClient) ListByResourceGroupComplete(ctx context.Context, id commonids.ResourceGroupId) (ListByResourceGroupCompleteResult, error) { + return c.ListByResourceGroupCompleteMatchingPredicate(ctx, id, ClusterOperationPredicate{}) +} + +// ListByResourceGroupCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c ClustersClient) ListByResourceGroupCompleteMatchingPredicate(ctx context.Context, id commonids.ResourceGroupId, predicate ClusterOperationPredicate) (resp ListByResourceGroupCompleteResult, err error) { + items := make([]Cluster, 0) + + page, err := c.ListByResourceGroup(ctx, id) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByResourceGroupCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbysubscription_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbysubscription_autorest.go new file mode 100644 index 000000000000..1601cef41e9a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_listbysubscription_autorest.go @@ -0,0 +1,187 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListBySubscriptionOperationResponse struct { + HttpResponse *http.Response + Model *[]Cluster + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListBySubscriptionOperationResponse, error) +} + +type ListBySubscriptionCompleteResult struct { + Items []Cluster +} + +func (r ListBySubscriptionOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListBySubscriptionOperationResponse) LoadMore(ctx context.Context) (resp ListBySubscriptionOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +// ListBySubscription ... +func (c ClustersClient) ListBySubscription(ctx context.Context, id commonids.SubscriptionId) (resp ListBySubscriptionOperationResponse, err error) { + req, err := c.preparerForListBySubscription(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListBySubscription(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListBySubscription prepares the ListBySubscription request. +func (c ClustersClient) preparerForListBySubscription(ctx context.Context, id commonids.SubscriptionId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/providers/Microsoft.StreamAnalytics/clusters", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListBySubscriptionWithNextLink prepares the ListBySubscription request with the given nextLink token. +func (c ClustersClient) preparerForListBySubscriptionWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListBySubscription handles the response to the ListBySubscription request. The method always +// closes the http.Response Body. +func (c ClustersClient) responderForListBySubscription(resp *http.Response) (result ListBySubscriptionOperationResponse, err error) { + type page struct { + Values []Cluster `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListBySubscriptionOperationResponse, err error) { + req, err := c.preparerForListBySubscriptionWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListBySubscription(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListBySubscription", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListBySubscriptionComplete retrieves all of the results into a single object +func (c ClustersClient) ListBySubscriptionComplete(ctx context.Context, id commonids.SubscriptionId) (ListBySubscriptionCompleteResult, error) { + return c.ListBySubscriptionCompleteMatchingPredicate(ctx, id, ClusterOperationPredicate{}) +} + +// ListBySubscriptionCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c ClustersClient) ListBySubscriptionCompleteMatchingPredicate(ctx context.Context, id commonids.SubscriptionId, predicate ClusterOperationPredicate) (resp ListBySubscriptionCompleteResult, err error) { + items := make([]Cluster, 0) + + page, err := c.ListBySubscription(ctx, id) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListBySubscriptionCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_liststreamingjobs_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_liststreamingjobs_autorest.go new file mode 100644 index 000000000000..09d1fdaa06d2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_liststreamingjobs_autorest.go @@ -0,0 +1,186 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListStreamingJobsOperationResponse struct { + HttpResponse *http.Response + Model *[]ClusterJob + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListStreamingJobsOperationResponse, error) +} + +type ListStreamingJobsCompleteResult struct { + Items []ClusterJob +} + +func (r ListStreamingJobsOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListStreamingJobsOperationResponse) LoadMore(ctx context.Context) (resp ListStreamingJobsOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +// ListStreamingJobs ... +func (c ClustersClient) ListStreamingJobs(ctx context.Context, id ClusterId) (resp ListStreamingJobsOperationResponse, err error) { + req, err := c.preparerForListStreamingJobs(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListStreamingJobs(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListStreamingJobs prepares the ListStreamingJobs request. +func (c ClustersClient) preparerForListStreamingJobs(ctx context.Context, id ClusterId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/listStreamingJobs", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListStreamingJobsWithNextLink prepares the ListStreamingJobs request with the given nextLink token. +func (c ClustersClient) preparerForListStreamingJobsWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListStreamingJobs handles the response to the ListStreamingJobs request. The method always +// closes the http.Response Body. +func (c ClustersClient) responderForListStreamingJobs(resp *http.Response) (result ListStreamingJobsOperationResponse, err error) { + type page struct { + Values []ClusterJob `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListStreamingJobsOperationResponse, err error) { + req, err := c.preparerForListStreamingJobsWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListStreamingJobs(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "ListStreamingJobs", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListStreamingJobsComplete retrieves all of the results into a single object +func (c ClustersClient) ListStreamingJobsComplete(ctx context.Context, id ClusterId) (ListStreamingJobsCompleteResult, error) { + return c.ListStreamingJobsCompleteMatchingPredicate(ctx, id, ClusterJobOperationPredicate{}) +} + +// ListStreamingJobsCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c ClustersClient) ListStreamingJobsCompleteMatchingPredicate(ctx context.Context, id ClusterId, predicate ClusterJobOperationPredicate) (resp ListStreamingJobsCompleteResult, err error) { + items := make([]ClusterJob, 0) + + page, err := c.ListStreamingJobs(ctx, id) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListStreamingJobsCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_update_autorest.go new file mode 100644 index 000000000000..360afc5594e3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/method_update_autorest.go @@ -0,0 +1,108 @@ +package clusters + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c ClustersClient) Update(ctx context.Context, id ClusterId, input Cluster, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Update", nil, "Failure preparing request") + return + } + + result, err = c.senderForUpdate(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "clusters.ClustersClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// UpdateThenPoll performs Update then polls until it's completed +func (c ClustersClient) UpdateThenPoll(ctx context.Context, id ClusterId, input Cluster, options UpdateOperationOptions) error { + result, err := c.Update(ctx, id, input, options) + if err != nil { + return fmt.Errorf("performing Update: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Update: %+v", err) + } + + return nil +} + +// preparerForUpdate prepares the Update request. +func (c ClustersClient) preparerForUpdate(ctx context.Context, id ClusterId, input Cluster, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForUpdate sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (c ClustersClient) senderForUpdate(ctx context.Context, req *http.Request) (future UpdateOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_cluster.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_cluster.go new file mode 100644 index 000000000000..aefc9217e75a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_cluster.go @@ -0,0 +1,15 @@ +package clusters + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Cluster struct { + Etag *string `json:"etag,omitempty"` + Id *string `json:"id,omitempty"` + Location *string `json:"location,omitempty"` + Name *string `json:"name,omitempty"` + Properties *ClusterProperties `json:"properties,omitempty"` + Sku *ClusterSku `json:"sku,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterjob.go new file mode 100644 index 000000000000..c65d6b75c79e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterjob.go @@ -0,0 +1,10 @@ +package clusters + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClusterJob struct { + Id *string `json:"id,omitempty"` + JobState *JobState `json:"jobState,omitempty"` + StreamingUnits *int64 `json:"streamingUnits,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterproperties.go new file mode 100644 index 000000000000..efa294b2912d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clusterproperties.go @@ -0,0 +1,30 @@ +package clusters + +import ( + "time" + + "github.com/hashicorp/go-azure-helpers/lang/dates" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClusterProperties struct { + CapacityAllocated *int64 `json:"capacityAllocated,omitempty"` + CapacityAssigned *int64 `json:"capacityAssigned,omitempty"` + ClusterId *string `json:"clusterId,omitempty"` + CreatedDate *string `json:"createdDate,omitempty"` + ProvisioningState *ClusterProvisioningState `json:"provisioningState,omitempty"` +} + +func (o *ClusterProperties) GetCreatedDateAsTime() (*time.Time, error) { + if o.CreatedDate == nil { + return nil, nil + } + return dates.ParseAsFormat(o.CreatedDate, "2006-01-02T15:04:05Z07:00") +} + +func (o *ClusterProperties) SetCreatedDateAsTime(input time.Time) { + formatted := input.Format("2006-01-02T15:04:05Z07:00") + o.CreatedDate = &formatted +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clustersku.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clustersku.go new file mode 100644 index 000000000000..c37bade67463 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/model_clustersku.go @@ -0,0 +1,9 @@ +package clusters + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClusterSku struct { + Capacity *int64 `json:"capacity,omitempty"` + Name *ClusterSkuName `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/predicates.go new file mode 100644 index 000000000000..92da922b7ff1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/predicates.go @@ -0,0 +1,52 @@ +package clusters + +type ClusterOperationPredicate struct { + Etag *string + Id *string + Location *string + Name *string + Type *string +} + +func (p ClusterOperationPredicate) Matches(input Cluster) bool { + + if p.Etag != nil && (input.Etag == nil && *p.Etag != *input.Etag) { + return false + } + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Location != nil && (input.Location == nil && *p.Location != *input.Location) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} + +type ClusterJobOperationPredicate struct { + Id *string + StreamingUnits *int64 +} + +func (p ClusterJobOperationPredicate) Matches(input ClusterJob) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.StreamingUnits != nil && (input.StreamingUnits == nil && *p.StreamingUnits != *input.StreamingUnits) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/version.go new file mode 100644 index 000000000000..f01857557675 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/clusters/version.go @@ -0,0 +1,12 @@ +package clusters + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/clusters/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/README.md new file mode 100644 index 000000000000..ef877301eaf4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/README.md @@ -0,0 +1,149 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions` Documentation + +The `functions` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions" +``` + + +### Client Initialization + +```go +client := functions.NewFunctionsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `FunctionsClient.CreateOrReplace` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +payload := functions.Function{ + // ... +} + + +read, err := client.CreateOrReplace(ctx, id, payload, functions.DefaultCreateOrReplaceOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `FunctionsClient.Delete` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +read, err := client.Delete(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `FunctionsClient.Get` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `FunctionsClient.ListByStreamingJob` + +```go +ctx := context.TODO() +id := functions.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +// alternatively `client.ListByStreamingJob(ctx, id, functions.DefaultListByStreamingJobOperationOptions())` can be used to do batched pagination +items, err := client.ListByStreamingJobComplete(ctx, id, functions.DefaultListByStreamingJobOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `FunctionsClient.RetrieveDefaultDefinition` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +payload := functions.FunctionRetrieveDefaultDefinitionParameters{ + // ... +} + + +read, err := client.RetrieveDefaultDefinition(ctx, id, payload) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `FunctionsClient.Test` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +payload := functions.Function{ + // ... +} + + +if err := client.TestThenPoll(ctx, id, payload); err != nil { + // handle the error +} +``` + + +### Example Usage: `FunctionsClient.Update` + +```go +ctx := context.TODO() +id := functions.NewFunctionID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "functionValue") + +payload := functions.Function{ + // ... +} + + +read, err := client.Update(ctx, id, payload, functions.DefaultUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/client.go new file mode 100644 index 000000000000..d8478486d96a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/client.go @@ -0,0 +1,18 @@ +package functions + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionsClient struct { + Client autorest.Client + baseUri string +} + +func NewFunctionsClientWithBaseURI(endpoint string) FunctionsClient { + return FunctionsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/constants.go new file mode 100644 index 000000000000..48971992b3db --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/constants.go @@ -0,0 +1,31 @@ +package functions + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UdfType string + +const ( + UdfTypeScalar UdfType = "Scalar" +) + +func PossibleValuesForUdfType() []string { + return []string{ + string(UdfTypeScalar), + } +} + +func parseUdfType(input string) (*UdfType, error) { + vals := map[string]UdfType{ + "scalar": UdfTypeScalar, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := UdfType(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_function.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_function.go new file mode 100644 index 000000000000..815a51ac48e7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_function.go @@ -0,0 +1,137 @@ +package functions + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = FunctionId{} + +// FunctionId is a struct representing the Resource ID for a Function +type FunctionId struct { + SubscriptionId string + ResourceGroupName string + JobName string + FunctionName string +} + +// NewFunctionID returns a new FunctionId struct +func NewFunctionID(subscriptionId string, resourceGroupName string, jobName string, functionName string) FunctionId { + return FunctionId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + FunctionName: functionName, + } +} + +// ParseFunctionID parses 'input' into a FunctionId +func ParseFunctionID(input string) (*FunctionId, error) { + parser := resourceids.NewParserFromResourceIdType(FunctionId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := FunctionId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.FunctionName, ok = parsed.Parsed["functionName"]; !ok { + return nil, fmt.Errorf("the segment 'functionName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseFunctionIDInsensitively parses 'input' case-insensitively into a FunctionId +// note: this method should only be used for API response data and not user input +func ParseFunctionIDInsensitively(input string) (*FunctionId, error) { + parser := resourceids.NewParserFromResourceIdType(FunctionId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := FunctionId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.FunctionName, ok = parsed.Parsed["functionName"]; !ok { + return nil, fmt.Errorf("the segment 'functionName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateFunctionID checks that 'input' can be parsed as a Function ID +func ValidateFunctionID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseFunctionID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Function ID +func (id FunctionId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s/functions/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.FunctionName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Function ID +func (id FunctionId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + resourceids.StaticSegment("staticFunctions", "functions", "functions"), + resourceids.UserSpecifiedSegment("functionName", "functionValue"), + } +} + +// String returns a human-readable description of this Function ID +func (id FunctionId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + fmt.Sprintf("Function Name: %q", id.FunctionName), + } + return fmt.Sprintf("Function (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_streamingjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_streamingjob.go new file mode 100644 index 000000000000..fa26324129b1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/id_streamingjob.go @@ -0,0 +1,124 @@ +package functions + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = StreamingJobId{} + +// StreamingJobId is a struct representing the Resource ID for a Streaming Job +type StreamingJobId struct { + SubscriptionId string + ResourceGroupName string + JobName string +} + +// NewStreamingJobID returns a new StreamingJobId struct +func NewStreamingJobID(subscriptionId string, resourceGroupName string, jobName string) StreamingJobId { + return StreamingJobId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + } +} + +// ParseStreamingJobID parses 'input' into a StreamingJobId +func ParseStreamingJobID(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseStreamingJobIDInsensitively parses 'input' case-insensitively into a StreamingJobId +// note: this method should only be used for API response data and not user input +func ParseStreamingJobIDInsensitively(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateStreamingJobID checks that 'input' can be parsed as a Streaming Job ID +func ValidateStreamingJobID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseStreamingJobID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Streaming Job ID +func (id StreamingJobId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Streaming Job ID +func (id StreamingJobId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + } +} + +// String returns a human-readable description of this Streaming Job ID +func (id StreamingJobId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + } + return fmt.Sprintf("Streaming Job (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_createorreplace_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_createorreplace_autorest.go new file mode 100644 index 000000000000..96bd230c74e5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_createorreplace_autorest.go @@ -0,0 +1,103 @@ +package functions + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrReplaceOperationResponse struct { + HttpResponse *http.Response + Model *Function +} + +type CreateOrReplaceOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrReplaceOperationOptions() CreateOrReplaceOperationOptions { + return CreateOrReplaceOperationOptions{} +} + +func (o CreateOrReplaceOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrReplaceOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrReplace ... +func (c FunctionsClient) CreateOrReplace(ctx context.Context, id FunctionId, input Function, options CreateOrReplaceOperationOptions) (result CreateOrReplaceOperationResponse, err error) { + req, err := c.preparerForCreateOrReplace(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "CreateOrReplace", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrReplace(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "CreateOrReplace", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrReplace prepares the CreateOrReplace request. +func (c FunctionsClient) preparerForCreateOrReplace(ctx context.Context, id FunctionId, input Function, options CreateOrReplaceOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrReplace handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForCreateOrReplace(resp *http.Response) (result CreateOrReplaceOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_delete_autorest.go new file mode 100644 index 000000000000..460f6ac3da91 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_delete_autorest.go @@ -0,0 +1,66 @@ +package functions + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + HttpResponse *http.Response +} + +// Delete ... +func (c FunctionsClient) Delete(ctx context.Context, id FunctionId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Delete", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForDelete(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Delete", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForDelete prepares the Delete request. +func (c FunctionsClient) preparerForDelete(ctx context.Context, id FunctionId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForDelete handles the response to the Delete request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForDelete(resp *http.Response) (result DeleteOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusOK), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_get_autorest.go new file mode 100644 index 000000000000..b531eff94b5a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_get_autorest.go @@ -0,0 +1,68 @@ +package functions + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *Function +} + +// Get ... +func (c FunctionsClient) Get(ctx context.Context, id FunctionId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c FunctionsClient) preparerForGet(ctx context.Context, id FunctionId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_listbystreamingjob_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_listbystreamingjob_autorest.go new file mode 100644 index 000000000000..2f3271a3c780 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_listbystreamingjob_autorest.go @@ -0,0 +1,215 @@ +package functions + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByStreamingJobOperationResponse struct { + HttpResponse *http.Response + Model *[]Function + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByStreamingJobOperationResponse, error) +} + +type ListByStreamingJobCompleteResult struct { + Items []Function +} + +func (r ListByStreamingJobOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByStreamingJobOperationResponse) LoadMore(ctx context.Context) (resp ListByStreamingJobOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListByStreamingJobOperationOptions struct { + Select *string +} + +func DefaultListByStreamingJobOperationOptions() ListByStreamingJobOperationOptions { + return ListByStreamingJobOperationOptions{} +} + +func (o ListByStreamingJobOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListByStreamingJobOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Select != nil { + out["$select"] = *o.Select + } + + return out +} + +// ListByStreamingJob ... +func (c FunctionsClient) ListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (resp ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJob(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByStreamingJob(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByStreamingJob prepares the ListByStreamingJob request. +func (c FunctionsClient) preparerForListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/functions", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByStreamingJobWithNextLink prepares the ListByStreamingJob request with the given nextLink token. +func (c FunctionsClient) preparerForListByStreamingJobWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByStreamingJob handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForListByStreamingJob(resp *http.Response) (result ListByStreamingJobOperationResponse, err error) { + type page struct { + Values []Function `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJobWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByStreamingJob(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "ListByStreamingJob", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByStreamingJobComplete retrieves all of the results into a single object +func (c FunctionsClient) ListByStreamingJobComplete(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (ListByStreamingJobCompleteResult, error) { + return c.ListByStreamingJobCompleteMatchingPredicate(ctx, id, options, FunctionOperationPredicate{}) +} + +// ListByStreamingJobCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c FunctionsClient) ListByStreamingJobCompleteMatchingPredicate(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions, predicate FunctionOperationPredicate) (resp ListByStreamingJobCompleteResult, err error) { + items := make([]Function, 0) + + page, err := c.ListByStreamingJob(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByStreamingJobCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_retrievedefaultdefinition_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_retrievedefaultdefinition_autorest.go new file mode 100644 index 000000000000..3be1fb89bd62 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_retrievedefaultdefinition_autorest.go @@ -0,0 +1,70 @@ +package functions + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type RetrieveDefaultDefinitionOperationResponse struct { + HttpResponse *http.Response + Model *Function +} + +// RetrieveDefaultDefinition ... +func (c FunctionsClient) RetrieveDefaultDefinition(ctx context.Context, id FunctionId, input FunctionRetrieveDefaultDefinitionParameters) (result RetrieveDefaultDefinitionOperationResponse, err error) { + req, err := c.preparerForRetrieveDefaultDefinition(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "RetrieveDefaultDefinition", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "RetrieveDefaultDefinition", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForRetrieveDefaultDefinition(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "RetrieveDefaultDefinition", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForRetrieveDefaultDefinition prepares the RetrieveDefaultDefinition request. +func (c FunctionsClient) preparerForRetrieveDefaultDefinition(ctx context.Context, id FunctionId, input FunctionRetrieveDefaultDefinitionParameters) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/retrieveDefaultDefinition", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForRetrieveDefaultDefinition handles the response to the RetrieveDefaultDefinition request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForRetrieveDefaultDefinition(resp *http.Response) (result RetrieveDefaultDefinitionOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_test_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_test_autorest.go new file mode 100644 index 000000000000..60b4169fa011 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_test_autorest.go @@ -0,0 +1,79 @@ +package functions + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TestOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Test ... +func (c FunctionsClient) Test(ctx context.Context, id FunctionId, input Function) (result TestOperationResponse, err error) { + req, err := c.preparerForTest(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = c.senderForTest(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Test", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// TestThenPoll performs Test then polls until it's completed +func (c FunctionsClient) TestThenPoll(ctx context.Context, id FunctionId, input Function) error { + result, err := c.Test(ctx, id, input) + if err != nil { + return fmt.Errorf("performing Test: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Test: %+v", err) + } + + return nil +} + +// preparerForTest prepares the Test request. +func (c FunctionsClient) preparerForTest(ctx context.Context, id FunctionId, input Function) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/test", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForTest sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (c FunctionsClient) senderForTest(ctx context.Context, req *http.Request) (future TestOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_update_autorest.go new file mode 100644 index 000000000000..07b0dd63c19a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/method_update_autorest.go @@ -0,0 +1,98 @@ +package functions + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + HttpResponse *http.Response + Model *Function +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c FunctionsClient) Update(ctx context.Context, id FunctionId, input Function, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Update", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "functions.FunctionsClient", "Update", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForUpdate prepares the Update request. +func (c FunctionsClient) preparerForUpdate(ctx context.Context, id FunctionId, input Function, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForUpdate handles the response to the Update request. The method always +// closes the http.Response Body. +func (c FunctionsClient) responderForUpdate(resp *http.Response) (result UpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_aggregatefunctionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_aggregatefunctionproperties.go new file mode 100644 index 000000000000..86a1998c8f9e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_aggregatefunctionproperties.go @@ -0,0 +1,42 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionProperties = AggregateFunctionProperties{} + +type AggregateFunctionProperties struct { + + // Fields inherited from FunctionProperties + Etag *string `json:"etag,omitempty"` + Properties *FunctionConfiguration `json:"properties,omitempty"` +} + +var _ json.Marshaler = AggregateFunctionProperties{} + +func (s AggregateFunctionProperties) MarshalJSON() ([]byte, error) { + type wrapper AggregateFunctionProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AggregateFunctionProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AggregateFunctionProperties: %+v", err) + } + decoded["type"] = "Aggregate" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AggregateFunctionProperties: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbinding.go new file mode 100644 index 000000000000..8581f5be64c3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbinding.go @@ -0,0 +1,41 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionBinding = AzureMachineLearningWebServiceFunctionBinding{} + +type AzureMachineLearningWebServiceFunctionBinding struct { + Properties *AzureMachineLearningWebServiceFunctionBindingProperties `json:"properties,omitempty"` + + // Fields inherited from FunctionBinding +} + +var _ json.Marshaler = AzureMachineLearningWebServiceFunctionBinding{} + +func (s AzureMachineLearningWebServiceFunctionBinding) MarshalJSON() ([]byte, error) { + type wrapper AzureMachineLearningWebServiceFunctionBinding + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + decoded["type"] = "Microsoft.MachineLearning/WebService" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingproperties.go new file mode 100644 index 000000000000..938715b12344 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingproperties.go @@ -0,0 +1,12 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceFunctionBindingProperties struct { + ApiKey *string `json:"apiKey,omitempty"` + BatchSize *int64 `json:"batchSize,omitempty"` + Endpoint *string `json:"endpoint,omitempty"` + Inputs *AzureMachineLearningWebServiceInputs `json:"inputs,omitempty"` + Outputs *[]AzureMachineLearningWebServiceOutputColumn `json:"outputs,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingretrievalproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingretrievalproperties.go new file mode 100644 index 000000000000..d486a71f96b3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionbindingretrievalproperties.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceFunctionBindingRetrievalProperties struct { + ExecuteEndpoint *string `json:"executeEndpoint,omitempty"` + UdfType *UdfType `json:"udfType,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionretrievedefaultdefinitionparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionretrievedefaultdefinitionparameters.go new file mode 100644 index 000000000000..f64e7b2570ba --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebservicefunctionretrievedefaultdefinitionparameters.go @@ -0,0 +1,41 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionRetrieveDefaultDefinitionParameters = AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters{} + +type AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters struct { + BindingRetrievalProperties *AzureMachineLearningWebServiceFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + + // Fields inherited from FunctionRetrieveDefaultDefinitionParameters +} + +var _ json.Marshaler = AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters{} + +func (s AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + type wrapper AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + decoded["bindingType"] = "Microsoft.MachineLearning/WebService" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputcolumn.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputcolumn.go new file mode 100644 index 000000000000..50a2884187ce --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputcolumn.go @@ -0,0 +1,10 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceInputColumn struct { + DataType *string `json:"dataType,omitempty"` + MapTo *int64 `json:"mapTo,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputs.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputs.go new file mode 100644 index 000000000000..86fd7fa0a244 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceinputs.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceInputs struct { + ColumnNames *[]AzureMachineLearningWebServiceInputColumn `json:"columnNames,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceoutputcolumn.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceoutputcolumn.go new file mode 100644 index 000000000000..db028f52b90c --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_azuremachinelearningwebserviceoutputcolumn.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceOutputColumn struct { + DataType *string `json:"dataType,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_errorresponse.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_errorresponse.go new file mode 100644 index 000000000000..000f9387b50a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_errorresponse.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ErrorResponse struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_function.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_function.go new file mode 100644 index 000000000000..2abf7cfc9d39 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_function.go @@ -0,0 +1,44 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Function struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties FunctionProperties `json:"properties"` + Type *string `json:"type,omitempty"` +} + +var _ json.Unmarshaler = &Function{} + +func (s *Function) UnmarshalJSON(bytes []byte) error { + type alias Function + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into Function: %+v", err) + } + + s.Id = decoded.Id + s.Name = decoded.Name + s.Type = decoded.Type + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling Function into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["properties"]; ok { + impl, err := unmarshalFunctionPropertiesImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Properties' for 'Function': %+v", err) + } + s.Properties = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionbinding.go new file mode 100644 index 000000000000..9b174ef02e68 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionbinding.go @@ -0,0 +1,56 @@ +package functions + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionBinding interface { +} + +func unmarshalFunctionBindingImplementation(input []byte) (FunctionBinding, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling FunctionBinding into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.MachineLearning/WebService") { + var out AzureMachineLearningWebServiceFunctionBinding + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.StreamAnalytics/JavascriptUdf") { + var out JavaScriptFunctionBinding + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JavaScriptFunctionBinding: %+v", err) + } + return out, nil + } + + type RawFunctionBindingImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawFunctionBindingImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionconfiguration.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionconfiguration.go new file mode 100644 index 000000000000..d6bfb45f5224 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionconfiguration.go @@ -0,0 +1,42 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionConfiguration struct { + Binding FunctionBinding `json:"binding"` + Inputs *[]FunctionInput `json:"inputs,omitempty"` + Output *FunctionOutput `json:"output,omitempty"` +} + +var _ json.Unmarshaler = &FunctionConfiguration{} + +func (s *FunctionConfiguration) UnmarshalJSON(bytes []byte) error { + type alias FunctionConfiguration + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into FunctionConfiguration: %+v", err) + } + + s.Inputs = decoded.Inputs + s.Output = decoded.Output + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling FunctionConfiguration into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["binding"]; ok { + impl, err := unmarshalFunctionBindingImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Binding' for 'FunctionConfiguration': %+v", err) + } + s.Binding = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functioninput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functioninput.go new file mode 100644 index 000000000000..d62d6932e853 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functioninput.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionInput struct { + DataType *string `json:"dataType,omitempty"` + IsConfigurationParameter *bool `json:"isConfigurationParameter,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionoutput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionoutput.go new file mode 100644 index 000000000000..858a36775553 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionoutput.go @@ -0,0 +1,8 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionOutput struct { + DataType *string `json:"dataType,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionproperties.go new file mode 100644 index 000000000000..f5dd40940012 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionproperties.go @@ -0,0 +1,56 @@ +package functions + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionProperties interface { +} + +func unmarshalFunctionPropertiesImplementation(input []byte) (FunctionProperties, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling FunctionProperties into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Aggregate") { + var out AggregateFunctionProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AggregateFunctionProperties: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Scalar") { + var out ScalarFunctionProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ScalarFunctionProperties: %+v", err) + } + return out, nil + } + + type RawFunctionPropertiesImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawFunctionPropertiesImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionretrievedefaultdefinitionparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionretrievedefaultdefinitionparameters.go new file mode 100644 index 000000000000..fbba8811f7bd --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_functionretrievedefaultdefinitionparameters.go @@ -0,0 +1,56 @@ +package functions + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionRetrieveDefaultDefinitionParameters interface { +} + +func unmarshalFunctionRetrieveDefaultDefinitionParametersImplementation(input []byte) (FunctionRetrieveDefaultDefinitionParameters, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling FunctionRetrieveDefaultDefinitionParameters into map[string]interface: %+v", err) + } + + value, ok := temp["bindingType"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.MachineLearning/WebService") { + var out AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.StreamAnalytics/JavascriptUdf") { + var out JavaScriptFunctionRetrieveDefaultDefinitionParameters + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JavaScriptFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + return out, nil + } + + type RawFunctionRetrieveDefaultDefinitionParametersImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawFunctionRetrieveDefaultDefinitionParametersImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbinding.go new file mode 100644 index 000000000000..8ba9643dac73 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbinding.go @@ -0,0 +1,41 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionBinding = JavaScriptFunctionBinding{} + +type JavaScriptFunctionBinding struct { + Properties *JavaScriptFunctionBindingProperties `json:"properties,omitempty"` + + // Fields inherited from FunctionBinding +} + +var _ json.Marshaler = JavaScriptFunctionBinding{} + +func (s JavaScriptFunctionBinding) MarshalJSON() ([]byte, error) { + type wrapper JavaScriptFunctionBinding + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JavaScriptFunctionBinding: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JavaScriptFunctionBinding: %+v", err) + } + decoded["type"] = "Microsoft.StreamAnalytics/JavascriptUdf" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JavaScriptFunctionBinding: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingproperties.go new file mode 100644 index 000000000000..cb03787f70b2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingproperties.go @@ -0,0 +1,8 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JavaScriptFunctionBindingProperties struct { + Script *string `json:"script,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingretrievalproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingretrievalproperties.go new file mode 100644 index 000000000000..e300eb2b2e62 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionbindingretrievalproperties.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JavaScriptFunctionBindingRetrievalProperties struct { + Script *string `json:"script,omitempty"` + UdfType *UdfType `json:"udfType,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionretrievedefaultdefinitionparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionretrievedefaultdefinitionparameters.go new file mode 100644 index 000000000000..e1c65cb6e12e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_javascriptfunctionretrievedefaultdefinitionparameters.go @@ -0,0 +1,41 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionRetrieveDefaultDefinitionParameters = JavaScriptFunctionRetrieveDefaultDefinitionParameters{} + +type JavaScriptFunctionRetrieveDefaultDefinitionParameters struct { + BindingRetrievalProperties *JavaScriptFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` + + // Fields inherited from FunctionRetrieveDefaultDefinitionParameters +} + +var _ json.Marshaler = JavaScriptFunctionRetrieveDefaultDefinitionParameters{} + +func (s JavaScriptFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { + type wrapper JavaScriptFunctionRetrieveDefaultDefinitionParameters + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JavaScriptFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JavaScriptFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + decoded["bindingType"] = "Microsoft.StreamAnalytics/JavascriptUdf" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JavaScriptFunctionRetrieveDefaultDefinitionParameters: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_resourceteststatus.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_resourceteststatus.go new file mode 100644 index 000000000000..c4b7bf5cba23 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_resourceteststatus.go @@ -0,0 +1,9 @@ +package functions + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ResourceTestStatus struct { + Error *ErrorResponse `json:"error,omitempty"` + Status *string `json:"status,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_scalarfunctionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_scalarfunctionproperties.go new file mode 100644 index 000000000000..bf0d985730be --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/model_scalarfunctionproperties.go @@ -0,0 +1,42 @@ +package functions + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionProperties = ScalarFunctionProperties{} + +type ScalarFunctionProperties struct { + + // Fields inherited from FunctionProperties + Etag *string `json:"etag,omitempty"` + Properties *FunctionConfiguration `json:"properties,omitempty"` +} + +var _ json.Marshaler = ScalarFunctionProperties{} + +func (s ScalarFunctionProperties) MarshalJSON() ([]byte, error) { + type wrapper ScalarFunctionProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ScalarFunctionProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ScalarFunctionProperties: %+v", err) + } + decoded["type"] = "Scalar" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ScalarFunctionProperties: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/predicates.go new file mode 100644 index 000000000000..9b9f2f4c0dfd --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/predicates.go @@ -0,0 +1,24 @@ +package functions + +type FunctionOperationPredicate struct { + Id *string + Name *string + Type *string +} + +func (p FunctionOperationPredicate) Matches(input Function) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/version.go new file mode 100644 index 000000000000..072d1a764e5f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/functions/version.go @@ -0,0 +1,12 @@ +package functions + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/functions/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/README.md new file mode 100644 index 000000000000..0fa04f027e9d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/README.md @@ -0,0 +1,128 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs` Documentation + +The `inputs` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" +``` + + +### Client Initialization + +```go +client := inputs.NewInputsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `InputsClient.CreateOrReplace` + +```go +ctx := context.TODO() +id := inputs.NewInputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "inputValue") + +payload := inputs.Input{ + // ... +} + + +read, err := client.CreateOrReplace(ctx, id, payload, inputs.DefaultCreateOrReplaceOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `InputsClient.Delete` + +```go +ctx := context.TODO() +id := inputs.NewInputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "inputValue") + +read, err := client.Delete(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `InputsClient.Get` + +```go +ctx := context.TODO() +id := inputs.NewInputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "inputValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `InputsClient.ListByStreamingJob` + +```go +ctx := context.TODO() +id := inputs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +// alternatively `client.ListByStreamingJob(ctx, id, inputs.DefaultListByStreamingJobOperationOptions())` can be used to do batched pagination +items, err := client.ListByStreamingJobComplete(ctx, id, inputs.DefaultListByStreamingJobOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `InputsClient.Test` + +```go +ctx := context.TODO() +id := inputs.NewInputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "inputValue") + +payload := inputs.Input{ + // ... +} + + +if err := client.TestThenPoll(ctx, id, payload); err != nil { + // handle the error +} +``` + + +### Example Usage: `InputsClient.Update` + +```go +ctx := context.TODO() +id := inputs.NewInputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "inputValue") + +payload := inputs.Input{ + // ... +} + + +read, err := client.Update(ctx, id, payload, inputs.DefaultUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/client.go new file mode 100644 index 000000000000..859c2802c38a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/client.go @@ -0,0 +1,18 @@ +package inputs + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type InputsClient struct { + Client autorest.Client + baseUri string +} + +func NewInputsClientWithBaseURI(endpoint string) InputsClient { + return InputsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/constants.go new file mode 100644 index 000000000000..6ba7df2d4180 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/constants.go @@ -0,0 +1,186 @@ +package inputs + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AuthenticationMode string + +const ( + AuthenticationModeConnectionString AuthenticationMode = "ConnectionString" + AuthenticationModeMsi AuthenticationMode = "Msi" + AuthenticationModeUserToken AuthenticationMode = "UserToken" +) + +func PossibleValuesForAuthenticationMode() []string { + return []string{ + string(AuthenticationModeConnectionString), + string(AuthenticationModeMsi), + string(AuthenticationModeUserToken), + } +} + +func parseAuthenticationMode(input string) (*AuthenticationMode, error) { + vals := map[string]AuthenticationMode{ + "connectionstring": AuthenticationModeConnectionString, + "msi": AuthenticationModeMsi, + "usertoken": AuthenticationModeUserToken, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := AuthenticationMode(input) + return &out, nil +} + +type CompressionType string + +const ( + CompressionTypeDeflate CompressionType = "Deflate" + CompressionTypeGZip CompressionType = "GZip" + CompressionTypeNone CompressionType = "None" +) + +func PossibleValuesForCompressionType() []string { + return []string{ + string(CompressionTypeDeflate), + string(CompressionTypeGZip), + string(CompressionTypeNone), + } +} + +func parseCompressionType(input string) (*CompressionType, error) { + vals := map[string]CompressionType{ + "deflate": CompressionTypeDeflate, + "gzip": CompressionTypeGZip, + "none": CompressionTypeNone, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := CompressionType(input) + return &out, nil +} + +type Encoding string + +const ( + EncodingUTFEight Encoding = "UTF8" +) + +func PossibleValuesForEncoding() []string { + return []string{ + string(EncodingUTFEight), + } +} + +func parseEncoding(input string) (*Encoding, error) { + vals := map[string]Encoding{ + "utf8": EncodingUTFEight, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := Encoding(input) + return &out, nil +} + +type EventSerializationType string + +const ( + EventSerializationTypeAvro EventSerializationType = "Avro" + EventSerializationTypeCsv EventSerializationType = "Csv" + EventSerializationTypeJson EventSerializationType = "Json" + EventSerializationTypeParquet EventSerializationType = "Parquet" +) + +func PossibleValuesForEventSerializationType() []string { + return []string{ + string(EventSerializationTypeAvro), + string(EventSerializationTypeCsv), + string(EventSerializationTypeJson), + string(EventSerializationTypeParquet), + } +} + +func parseEventSerializationType(input string) (*EventSerializationType, error) { + vals := map[string]EventSerializationType{ + "avro": EventSerializationTypeAvro, + "csv": EventSerializationTypeCsv, + "json": EventSerializationTypeJson, + "parquet": EventSerializationTypeParquet, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := EventSerializationType(input) + return &out, nil +} + +type JsonOutputSerializationFormat string + +const ( + JsonOutputSerializationFormatArray JsonOutputSerializationFormat = "Array" + JsonOutputSerializationFormatLineSeparated JsonOutputSerializationFormat = "LineSeparated" +) + +func PossibleValuesForJsonOutputSerializationFormat() []string { + return []string{ + string(JsonOutputSerializationFormatArray), + string(JsonOutputSerializationFormatLineSeparated), + } +} + +func parseJsonOutputSerializationFormat(input string) (*JsonOutputSerializationFormat, error) { + vals := map[string]JsonOutputSerializationFormat{ + "array": JsonOutputSerializationFormatArray, + "lineseparated": JsonOutputSerializationFormatLineSeparated, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := JsonOutputSerializationFormat(input) + return &out, nil +} + +type RefreshType string + +const ( + RefreshTypeRefreshPeriodicallyWithDelta RefreshType = "RefreshPeriodicallyWithDelta" + RefreshTypeRefreshPeriodicallyWithFull RefreshType = "RefreshPeriodicallyWithFull" + RefreshTypeStatic RefreshType = "Static" +) + +func PossibleValuesForRefreshType() []string { + return []string{ + string(RefreshTypeRefreshPeriodicallyWithDelta), + string(RefreshTypeRefreshPeriodicallyWithFull), + string(RefreshTypeStatic), + } +} + +func parseRefreshType(input string) (*RefreshType, error) { + vals := map[string]RefreshType{ + "refreshperiodicallywithdelta": RefreshTypeRefreshPeriodicallyWithDelta, + "refreshperiodicallywithfull": RefreshTypeRefreshPeriodicallyWithFull, + "static": RefreshTypeStatic, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := RefreshType(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_input.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_input.go new file mode 100644 index 000000000000..fef184ab6cd9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_input.go @@ -0,0 +1,137 @@ +package inputs + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = InputId{} + +// InputId is a struct representing the Resource ID for a Input +type InputId struct { + SubscriptionId string + ResourceGroupName string + JobName string + InputName string +} + +// NewInputID returns a new InputId struct +func NewInputID(subscriptionId string, resourceGroupName string, jobName string, inputName string) InputId { + return InputId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + InputName: inputName, + } +} + +// ParseInputID parses 'input' into a InputId +func ParseInputID(input string) (*InputId, error) { + parser := resourceids.NewParserFromResourceIdType(InputId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := InputId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.InputName, ok = parsed.Parsed["inputName"]; !ok { + return nil, fmt.Errorf("the segment 'inputName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseInputIDInsensitively parses 'input' case-insensitively into a InputId +// note: this method should only be used for API response data and not user input +func ParseInputIDInsensitively(input string) (*InputId, error) { + parser := resourceids.NewParserFromResourceIdType(InputId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := InputId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.InputName, ok = parsed.Parsed["inputName"]; !ok { + return nil, fmt.Errorf("the segment 'inputName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateInputID checks that 'input' can be parsed as a Input ID +func ValidateInputID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseInputID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Input ID +func (id InputId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s/inputs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.InputName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Input ID +func (id InputId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + resourceids.StaticSegment("staticInputs", "inputs", "inputs"), + resourceids.UserSpecifiedSegment("inputName", "inputValue"), + } +} + +// String returns a human-readable description of this Input ID +func (id InputId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + fmt.Sprintf("Input Name: %q", id.InputName), + } + return fmt.Sprintf("Input (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_streamingjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_streamingjob.go new file mode 100644 index 000000000000..0d727b6a32fb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/id_streamingjob.go @@ -0,0 +1,124 @@ +package inputs + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = StreamingJobId{} + +// StreamingJobId is a struct representing the Resource ID for a Streaming Job +type StreamingJobId struct { + SubscriptionId string + ResourceGroupName string + JobName string +} + +// NewStreamingJobID returns a new StreamingJobId struct +func NewStreamingJobID(subscriptionId string, resourceGroupName string, jobName string) StreamingJobId { + return StreamingJobId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + } +} + +// ParseStreamingJobID parses 'input' into a StreamingJobId +func ParseStreamingJobID(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseStreamingJobIDInsensitively parses 'input' case-insensitively into a StreamingJobId +// note: this method should only be used for API response data and not user input +func ParseStreamingJobIDInsensitively(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateStreamingJobID checks that 'input' can be parsed as a Streaming Job ID +func ValidateStreamingJobID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseStreamingJobID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Streaming Job ID +func (id StreamingJobId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Streaming Job ID +func (id StreamingJobId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + } +} + +// String returns a human-readable description of this Streaming Job ID +func (id StreamingJobId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + } + return fmt.Sprintf("Streaming Job (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_createorreplace_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_createorreplace_autorest.go new file mode 100644 index 000000000000..ea3a65c34267 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_createorreplace_autorest.go @@ -0,0 +1,103 @@ +package inputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrReplaceOperationResponse struct { + HttpResponse *http.Response + Model *Input +} + +type CreateOrReplaceOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrReplaceOperationOptions() CreateOrReplaceOperationOptions { + return CreateOrReplaceOperationOptions{} +} + +func (o CreateOrReplaceOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrReplaceOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrReplace ... +func (c InputsClient) CreateOrReplace(ctx context.Context, id InputId, input Input, options CreateOrReplaceOperationOptions) (result CreateOrReplaceOperationResponse, err error) { + req, err := c.preparerForCreateOrReplace(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "CreateOrReplace", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrReplace(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "CreateOrReplace", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrReplace prepares the CreateOrReplace request. +func (c InputsClient) preparerForCreateOrReplace(ctx context.Context, id InputId, input Input, options CreateOrReplaceOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrReplace handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (c InputsClient) responderForCreateOrReplace(resp *http.Response) (result CreateOrReplaceOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_delete_autorest.go new file mode 100644 index 000000000000..0df6db9b503a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_delete_autorest.go @@ -0,0 +1,66 @@ +package inputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + HttpResponse *http.Response +} + +// Delete ... +func (c InputsClient) Delete(ctx context.Context, id InputId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Delete", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForDelete(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Delete", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForDelete prepares the Delete request. +func (c InputsClient) preparerForDelete(ctx context.Context, id InputId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForDelete handles the response to the Delete request. The method always +// closes the http.Response Body. +func (c InputsClient) responderForDelete(resp *http.Response) (result DeleteOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusOK), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_get_autorest.go new file mode 100644 index 000000000000..bb6e886dfcba --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_get_autorest.go @@ -0,0 +1,68 @@ +package inputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *Input +} + +// Get ... +func (c InputsClient) Get(ctx context.Context, id InputId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c InputsClient) preparerForGet(ctx context.Context, id InputId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c InputsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_listbystreamingjob_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_listbystreamingjob_autorest.go new file mode 100644 index 000000000000..a86ec9237867 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_listbystreamingjob_autorest.go @@ -0,0 +1,215 @@ +package inputs + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByStreamingJobOperationResponse struct { + HttpResponse *http.Response + Model *[]Input + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByStreamingJobOperationResponse, error) +} + +type ListByStreamingJobCompleteResult struct { + Items []Input +} + +func (r ListByStreamingJobOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByStreamingJobOperationResponse) LoadMore(ctx context.Context) (resp ListByStreamingJobOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListByStreamingJobOperationOptions struct { + Select *string +} + +func DefaultListByStreamingJobOperationOptions() ListByStreamingJobOperationOptions { + return ListByStreamingJobOperationOptions{} +} + +func (o ListByStreamingJobOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListByStreamingJobOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Select != nil { + out["$select"] = *o.Select + } + + return out +} + +// ListByStreamingJob ... +func (c InputsClient) ListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (resp ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJob(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByStreamingJob(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByStreamingJob prepares the ListByStreamingJob request. +func (c InputsClient) preparerForListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/inputs", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByStreamingJobWithNextLink prepares the ListByStreamingJob request with the given nextLink token. +func (c InputsClient) preparerForListByStreamingJobWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByStreamingJob handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (c InputsClient) responderForListByStreamingJob(resp *http.Response) (result ListByStreamingJobOperationResponse, err error) { + type page struct { + Values []Input `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJobWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByStreamingJob(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "ListByStreamingJob", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByStreamingJobComplete retrieves all of the results into a single object +func (c InputsClient) ListByStreamingJobComplete(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (ListByStreamingJobCompleteResult, error) { + return c.ListByStreamingJobCompleteMatchingPredicate(ctx, id, options, InputOperationPredicate{}) +} + +// ListByStreamingJobCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c InputsClient) ListByStreamingJobCompleteMatchingPredicate(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions, predicate InputOperationPredicate) (resp ListByStreamingJobCompleteResult, err error) { + items := make([]Input, 0) + + page, err := c.ListByStreamingJob(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByStreamingJobCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_test_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_test_autorest.go new file mode 100644 index 000000000000..14eb744fd6bb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_test_autorest.go @@ -0,0 +1,79 @@ +package inputs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TestOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Test ... +func (c InputsClient) Test(ctx context.Context, id InputId, input Input) (result TestOperationResponse, err error) { + req, err := c.preparerForTest(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = c.senderForTest(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Test", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// TestThenPoll performs Test then polls until it's completed +func (c InputsClient) TestThenPoll(ctx context.Context, id InputId, input Input) error { + result, err := c.Test(ctx, id, input) + if err != nil { + return fmt.Errorf("performing Test: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Test: %+v", err) + } + + return nil +} + +// preparerForTest prepares the Test request. +func (c InputsClient) preparerForTest(ctx context.Context, id InputId, input Input) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/test", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForTest sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (c InputsClient) senderForTest(ctx context.Context, req *http.Request) (future TestOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_update_autorest.go new file mode 100644 index 000000000000..a6e3027501a9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/method_update_autorest.go @@ -0,0 +1,98 @@ +package inputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + HttpResponse *http.Response + Model *Input +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c InputsClient) Update(ctx context.Context, id InputId, input Input, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Update", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "inputs.InputsClient", "Update", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForUpdate prepares the Update request. +func (c InputsClient) preparerForUpdate(ctx context.Context, id InputId, input Input, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForUpdate handles the response to the Update request. The method always +// closes the http.Response Body. +func (c InputsClient) responderForUpdate(resp *http.Response) (result UpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_avroserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_avroserialization.go new file mode 100644 index 000000000000..9b5e4003b9bd --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_avroserialization.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = AvroSerialization{} + +type AvroSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = AvroSerialization{} + +func (s AvroSerialization) MarshalJSON() ([]byte, error) { + type wrapper AvroSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AvroSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AvroSerialization: %+v", err) + } + decoded["type"] = "Avro" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AvroSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasource.go new file mode 100644 index 000000000000..a79836d10691 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = AzureSqlReferenceInputDataSource{} + +type AzureSqlReferenceInputDataSource struct { + Properties *AzureSqlReferenceInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = AzureSqlReferenceInputDataSource{} + +func (s AzureSqlReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSqlReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSqlReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSqlReferenceInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/Database" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSqlReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasourceproperties.go new file mode 100644 index 000000000000..bf05a2481be3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_azuresqlreferenceinputdatasourceproperties.go @@ -0,0 +1,16 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSqlReferenceInputDataSourceProperties struct { + Database *string `json:"database,omitempty"` + DeltaSnapshotQuery *string `json:"deltaSnapshotQuery,omitempty"` + FullSnapshotQuery *string `json:"fullSnapshotQuery,omitempty"` + Password *string `json:"password,omitempty"` + RefreshRate *string `json:"refreshRate,omitempty"` + RefreshType *RefreshType `json:"refreshType,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobdatasourceproperties.go new file mode 100644 index 000000000000..e59bef5cd996 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobdatasourceproperties.go @@ -0,0 +1,13 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobreferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobreferenceinputdatasource.go new file mode 100644 index 000000000000..274d914fba87 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobreferenceinputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = BlobReferenceInputDataSource{} + +type BlobReferenceInputDataSource struct { + Properties *BlobDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = BlobReferenceInputDataSource{} + +func (s BlobReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobReferenceInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasource.go new file mode 100644 index 000000000000..c619735bae5d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = BlobStreamInputDataSource{} + +type BlobStreamInputDataSource struct { + Properties *BlobStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = BlobStreamInputDataSource{} + +func (s BlobStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..58691c37b220 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_blobstreaminputdatasourceproperties.go @@ -0,0 +1,14 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobStreamInputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + SourcePartitionCount *int64 `json:"sourcePartitionCount,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_compression.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_compression.go new file mode 100644 index 000000000000..198a65a99493 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_compression.go @@ -0,0 +1,8 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Compression struct { + Type CompressionType `json:"type"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserialization.go new file mode 100644 index 000000000000..a1712b42d98d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserialization.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = CsvSerialization{} + +type CsvSerialization struct { + Properties *CsvSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = CsvSerialization{} + +func (s CsvSerialization) MarshalJSON() ([]byte, error) { + type wrapper CsvSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling CsvSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling CsvSerialization: %+v", err) + } + decoded["type"] = "Csv" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling CsvSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserializationproperties.go new file mode 100644 index 000000000000..14a33c781470 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_csvserializationproperties.go @@ -0,0 +1,9 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CsvSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + FieldDelimiter *string `json:"fieldDelimiter,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnosticcondition.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnosticcondition.go new file mode 100644 index 000000000000..a7d5b8bbad62 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnosticcondition.go @@ -0,0 +1,10 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DiagnosticCondition struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` + Since *string `json:"since,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnostics.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnostics.go new file mode 100644 index 000000000000..ba76e45664ee --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_diagnostics.go @@ -0,0 +1,8 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Diagnostics struct { + Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_errorresponse.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_errorresponse.go new file mode 100644 index 000000000000..bd05a7d15517 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_errorresponse.go @@ -0,0 +1,9 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ErrorResponse struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasource.go new file mode 100644 index 000000000000..da4a5db65500 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = EventHubStreamInputDataSource{} + +type EventHubStreamInputDataSource struct { + Properties *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = EventHubStreamInputDataSource{} + +func (s EventHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..064789385f85 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubstreaminputdatasourceproperties.go @@ -0,0 +1,13 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type EventHubStreamInputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + EventHubName *string `json:"eventHubName,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubv2streaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubv2streaminputdatasource.go new file mode 100644 index 000000000000..1344f077e5ae --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_eventhubv2streaminputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = EventHubV2StreamInputDataSource{} + +type EventHubV2StreamInputDataSource struct { + Properties *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = EventHubV2StreamInputDataSource{} + +func (s EventHubV2StreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubV2StreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubV2StreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubV2StreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.EventHub/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubV2StreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasource.go new file mode 100644 index 000000000000..bca612a4940f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = FileReferenceInputDataSource{} + +type FileReferenceInputDataSource struct { + Properties *FileReferenceInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = FileReferenceInputDataSource{} + +func (s FileReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper FileReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling FileReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling FileReferenceInputDataSource: %+v", err) + } + decoded["type"] = "File" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling FileReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasourceproperties.go new file mode 100644 index 000000000000..e3d4e2d70751 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_filereferenceinputdatasourceproperties.go @@ -0,0 +1,8 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FileReferenceInputDataSourceProperties struct { + Path *string `json:"path,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebussourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebussourceproperties.go new file mode 100644 index 000000000000..5b16a0493192 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebussourceproperties.go @@ -0,0 +1,8 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GatewayMessageBusSourceProperties struct { + Topic *string `json:"topic,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebusstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebusstreaminputdatasource.go new file mode 100644 index 000000000000..4f16a09b403b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_gatewaymessagebusstreaminputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = GatewayMessageBusStreamInputDataSource{} + +type GatewayMessageBusStreamInputDataSource struct { + Properties *GatewayMessageBusSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = GatewayMessageBusStreamInputDataSource{} + +func (s GatewayMessageBusStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper GatewayMessageBusStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + decoded["type"] = "GatewayMessageBus" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_input.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_input.go new file mode 100644 index 000000000000..450ad8eb7389 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_input.go @@ -0,0 +1,44 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Input struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties InputProperties `json:"properties"` + Type *string `json:"type,omitempty"` +} + +var _ json.Unmarshaler = &Input{} + +func (s *Input) UnmarshalJSON(bytes []byte) error { + type alias Input + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into Input: %+v", err) + } + + s.Id = decoded.Id + s.Name = decoded.Name + s.Type = decoded.Type + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling Input into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["properties"]; ok { + impl, err := unmarshalInputPropertiesImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Properties' for 'Input': %+v", err) + } + s.Properties = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_inputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_inputproperties.go new file mode 100644 index 000000000000..f45189091e14 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_inputproperties.go @@ -0,0 +1,56 @@ +package inputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type InputProperties interface { +} + +func unmarshalInputPropertiesImplementation(input []byte) (InputProperties, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling InputProperties into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Reference") { + var out ReferenceInputProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ReferenceInputProperties: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Stream") { + var out StreamInputProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into StreamInputProperties: %+v", err) + } + return out, nil + } + + type RawInputPropertiesImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawInputPropertiesImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasource.go new file mode 100644 index 000000000000..392aaf849dd1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasource.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = IoTHubStreamInputDataSource{} + +type IoTHubStreamInputDataSource struct { + Properties *IoTHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = IoTHubStreamInputDataSource{} + +func (s IoTHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper IoTHubStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling IoTHubStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling IoTHubStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Devices/IotHubs" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling IoTHubStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..02347bfcc8bc --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_iothubstreaminputdatasourceproperties.go @@ -0,0 +1,12 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type IoTHubStreamInputDataSourceProperties struct { + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + Endpoint *string `json:"endpoint,omitempty"` + IotHubNamespace *string `json:"iotHubNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserialization.go new file mode 100644 index 000000000000..a6e3035718c0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserialization.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = JsonSerialization{} + +type JsonSerialization struct { + Properties *JsonSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = JsonSerialization{} + +func (s JsonSerialization) MarshalJSON() ([]byte, error) { + type wrapper JsonSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JsonSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JsonSerialization: %+v", err) + } + decoded["type"] = "Json" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JsonSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserializationproperties.go new file mode 100644 index 000000000000..0e3472a2a2a2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_jsonserializationproperties.go @@ -0,0 +1,9 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JsonSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + Format *JsonOutputSerializationFormat `json:"format,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_parquetserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_parquetserialization.go new file mode 100644 index 000000000000..35fb8ddf4d58 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_parquetserialization.go @@ -0,0 +1,41 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = ParquetSerialization{} + +type ParquetSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = ParquetSerialization{} + +func (s ParquetSerialization) MarshalJSON() ([]byte, error) { + type wrapper ParquetSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ParquetSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ParquetSerialization: %+v", err) + } + decoded["type"] = "Parquet" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ParquetSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputdatasource.go new file mode 100644 index 000000000000..fb9d23fa4314 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputdatasource.go @@ -0,0 +1,64 @@ +package inputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ReferenceInputDataSource interface { +} + +func unmarshalReferenceInputDataSourceImplementation(input []byte) (ReferenceInputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling ReferenceInputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/Database") { + var out AzureSqlReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSqlReferenceInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobReferenceInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "File") { + var out FileReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into FileReferenceInputDataSource: %+v", err) + } + return out, nil + } + + type RawReferenceInputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawReferenceInputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputproperties.go new file mode 100644 index 000000000000..d7e8ae7b225c --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_referenceinputproperties.go @@ -0,0 +1,83 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ InputProperties = ReferenceInputProperties{} + +type ReferenceInputProperties struct { + Datasource ReferenceInputDataSource `json:"datasource"` + + // Fields inherited from InputProperties + Compression *Compression `json:"compression,omitempty"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + Serialization Serialization `json:"serialization"` +} + +var _ json.Marshaler = ReferenceInputProperties{} + +func (s ReferenceInputProperties) MarshalJSON() ([]byte, error) { + type wrapper ReferenceInputProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ReferenceInputProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ReferenceInputProperties: %+v", err) + } + decoded["type"] = "Reference" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ReferenceInputProperties: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &ReferenceInputProperties{} + +func (s *ReferenceInputProperties) UnmarshalJSON(bytes []byte) error { + type alias ReferenceInputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into ReferenceInputProperties: %+v", err) + } + + s.Compression = decoded.Compression + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.PartitionKey = decoded.PartitionKey + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling ReferenceInputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalReferenceInputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'ReferenceInputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'ReferenceInputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_resourceteststatus.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_resourceteststatus.go new file mode 100644 index 000000000000..c9634f34dd6d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_resourceteststatus.go @@ -0,0 +1,9 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ResourceTestStatus struct { + Error *ErrorResponse `json:"error,omitempty"` + Status *string `json:"status,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_serialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_serialization.go new file mode 100644 index 000000000000..a6977bcbdb06 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_serialization.go @@ -0,0 +1,72 @@ +package inputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Serialization interface { +} + +func unmarshalSerializationImplementation(input []byte) (Serialization, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling Serialization into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Avro") { + var out AvroSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AvroSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Csv") { + var out CsvSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into CsvSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Json") { + var out JsonSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JsonSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Parquet") { + var out ParquetSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ParquetSerialization: %+v", err) + } + return out, nil + } + + type RawSerializationImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawSerializationImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_storageaccount.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_storageaccount.go new file mode 100644 index 000000000000..5b9050916257 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_storageaccount.go @@ -0,0 +1,9 @@ +package inputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StorageAccount struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputdatasource.go new file mode 100644 index 000000000000..b83fa28944c8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputdatasource.go @@ -0,0 +1,80 @@ +package inputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StreamInputDataSource interface { +} + +func unmarshalStreamInputDataSourceImplementation(input []byte) (StreamInputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling StreamInputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/EventHub") { + var out EventHubStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.EventHub/EventHub") { + var out EventHubV2StreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubV2StreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "GatewayMessageBus") { + var out GatewayMessageBusStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into GatewayMessageBusStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Devices/IotHubs") { + var out IoTHubStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into IoTHubStreamInputDataSource: %+v", err) + } + return out, nil + } + + type RawStreamInputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawStreamInputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputproperties.go new file mode 100644 index 000000000000..cd1a278a50c7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/model_streaminputproperties.go @@ -0,0 +1,83 @@ +package inputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ InputProperties = StreamInputProperties{} + +type StreamInputProperties struct { + Datasource StreamInputDataSource `json:"datasource"` + + // Fields inherited from InputProperties + Compression *Compression `json:"compression,omitempty"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + Serialization Serialization `json:"serialization"` +} + +var _ json.Marshaler = StreamInputProperties{} + +func (s StreamInputProperties) MarshalJSON() ([]byte, error) { + type wrapper StreamInputProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling StreamInputProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling StreamInputProperties: %+v", err) + } + decoded["type"] = "Stream" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling StreamInputProperties: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &StreamInputProperties{} + +func (s *StreamInputProperties) UnmarshalJSON(bytes []byte) error { + type alias StreamInputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into StreamInputProperties: %+v", err) + } + + s.Compression = decoded.Compression + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.PartitionKey = decoded.PartitionKey + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling StreamInputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalStreamInputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'StreamInputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'StreamInputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/predicates.go new file mode 100644 index 000000000000..2091914227f8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/predicates.go @@ -0,0 +1,24 @@ +package inputs + +type InputOperationPredicate struct { + Id *string + Name *string + Type *string +} + +func (p InputOperationPredicate) Matches(input Input) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/version.go new file mode 100644 index 000000000000..7553750ba9f5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs/version.go @@ -0,0 +1,12 @@ +package inputs + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/inputs/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/README.md new file mode 100644 index 000000000000..4d6725ffc018 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/README.md @@ -0,0 +1,128 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs` Documentation + +The `outputs` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs" +``` + + +### Client Initialization + +```go +client := outputs.NewOutputsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `OutputsClient.CreateOrReplace` + +```go +ctx := context.TODO() +id := outputs.NewOutputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "outputValue") + +payload := outputs.Output{ + // ... +} + + +read, err := client.CreateOrReplace(ctx, id, payload, outputs.DefaultCreateOrReplaceOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `OutputsClient.Delete` + +```go +ctx := context.TODO() +id := outputs.NewOutputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "outputValue") + +read, err := client.Delete(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `OutputsClient.Get` + +```go +ctx := context.TODO() +id := outputs.NewOutputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "outputValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `OutputsClient.ListByStreamingJob` + +```go +ctx := context.TODO() +id := outputs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +// alternatively `client.ListByStreamingJob(ctx, id, outputs.DefaultListByStreamingJobOperationOptions())` can be used to do batched pagination +items, err := client.ListByStreamingJobComplete(ctx, id, outputs.DefaultListByStreamingJobOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `OutputsClient.Test` + +```go +ctx := context.TODO() +id := outputs.NewOutputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "outputValue") + +payload := outputs.Output{ + // ... +} + + +if err := client.TestThenPoll(ctx, id, payload); err != nil { + // handle the error +} +``` + + +### Example Usage: `OutputsClient.Update` + +```go +ctx := context.TODO() +id := outputs.NewOutputID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "outputValue") + +payload := outputs.Output{ + // ... +} + + +read, err := client.Update(ctx, id, payload, outputs.DefaultUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/client.go new file mode 100644 index 000000000000..90cdaf9a2f33 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/client.go @@ -0,0 +1,18 @@ +package outputs + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type OutputsClient struct { + Client autorest.Client + baseUri string +} + +func NewOutputsClientWithBaseURI(endpoint string) OutputsClient { + return OutputsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/constants.go new file mode 100644 index 000000000000..d4d4105abc63 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/constants.go @@ -0,0 +1,124 @@ +package outputs + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AuthenticationMode string + +const ( + AuthenticationModeConnectionString AuthenticationMode = "ConnectionString" + AuthenticationModeMsi AuthenticationMode = "Msi" + AuthenticationModeUserToken AuthenticationMode = "UserToken" +) + +func PossibleValuesForAuthenticationMode() []string { + return []string{ + string(AuthenticationModeConnectionString), + string(AuthenticationModeMsi), + string(AuthenticationModeUserToken), + } +} + +func parseAuthenticationMode(input string) (*AuthenticationMode, error) { + vals := map[string]AuthenticationMode{ + "connectionstring": AuthenticationModeConnectionString, + "msi": AuthenticationModeMsi, + "usertoken": AuthenticationModeUserToken, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := AuthenticationMode(input) + return &out, nil +} + +type Encoding string + +const ( + EncodingUTFEight Encoding = "UTF8" +) + +func PossibleValuesForEncoding() []string { + return []string{ + string(EncodingUTFEight), + } +} + +func parseEncoding(input string) (*Encoding, error) { + vals := map[string]Encoding{ + "utf8": EncodingUTFEight, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := Encoding(input) + return &out, nil +} + +type EventSerializationType string + +const ( + EventSerializationTypeAvro EventSerializationType = "Avro" + EventSerializationTypeCsv EventSerializationType = "Csv" + EventSerializationTypeJson EventSerializationType = "Json" + EventSerializationTypeParquet EventSerializationType = "Parquet" +) + +func PossibleValuesForEventSerializationType() []string { + return []string{ + string(EventSerializationTypeAvro), + string(EventSerializationTypeCsv), + string(EventSerializationTypeJson), + string(EventSerializationTypeParquet), + } +} + +func parseEventSerializationType(input string) (*EventSerializationType, error) { + vals := map[string]EventSerializationType{ + "avro": EventSerializationTypeAvro, + "csv": EventSerializationTypeCsv, + "json": EventSerializationTypeJson, + "parquet": EventSerializationTypeParquet, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := EventSerializationType(input) + return &out, nil +} + +type JsonOutputSerializationFormat string + +const ( + JsonOutputSerializationFormatArray JsonOutputSerializationFormat = "Array" + JsonOutputSerializationFormatLineSeparated JsonOutputSerializationFormat = "LineSeparated" +) + +func PossibleValuesForJsonOutputSerializationFormat() []string { + return []string{ + string(JsonOutputSerializationFormatArray), + string(JsonOutputSerializationFormatLineSeparated), + } +} + +func parseJsonOutputSerializationFormat(input string) (*JsonOutputSerializationFormat, error) { + vals := map[string]JsonOutputSerializationFormat{ + "array": JsonOutputSerializationFormatArray, + "lineseparated": JsonOutputSerializationFormatLineSeparated, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := JsonOutputSerializationFormat(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_output.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_output.go new file mode 100644 index 000000000000..09dcca0920f9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_output.go @@ -0,0 +1,137 @@ +package outputs + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = OutputId{} + +// OutputId is a struct representing the Resource ID for a Output +type OutputId struct { + SubscriptionId string + ResourceGroupName string + JobName string + OutputName string +} + +// NewOutputID returns a new OutputId struct +func NewOutputID(subscriptionId string, resourceGroupName string, jobName string, outputName string) OutputId { + return OutputId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + OutputName: outputName, + } +} + +// ParseOutputID parses 'input' into a OutputId +func ParseOutputID(input string) (*OutputId, error) { + parser := resourceids.NewParserFromResourceIdType(OutputId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := OutputId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.OutputName, ok = parsed.Parsed["outputName"]; !ok { + return nil, fmt.Errorf("the segment 'outputName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseOutputIDInsensitively parses 'input' case-insensitively into a OutputId +// note: this method should only be used for API response data and not user input +func ParseOutputIDInsensitively(input string) (*OutputId, error) { + parser := resourceids.NewParserFromResourceIdType(OutputId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := OutputId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.OutputName, ok = parsed.Parsed["outputName"]; !ok { + return nil, fmt.Errorf("the segment 'outputName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateOutputID checks that 'input' can be parsed as a Output ID +func ValidateOutputID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseOutputID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Output ID +func (id OutputId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s/outputs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.OutputName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Output ID +func (id OutputId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + resourceids.StaticSegment("staticOutputs", "outputs", "outputs"), + resourceids.UserSpecifiedSegment("outputName", "outputValue"), + } +} + +// String returns a human-readable description of this Output ID +func (id OutputId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + fmt.Sprintf("Output Name: %q", id.OutputName), + } + return fmt.Sprintf("Output (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_streamingjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_streamingjob.go new file mode 100644 index 000000000000..99698a809e45 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/id_streamingjob.go @@ -0,0 +1,124 @@ +package outputs + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = StreamingJobId{} + +// StreamingJobId is a struct representing the Resource ID for a Streaming Job +type StreamingJobId struct { + SubscriptionId string + ResourceGroupName string + JobName string +} + +// NewStreamingJobID returns a new StreamingJobId struct +func NewStreamingJobID(subscriptionId string, resourceGroupName string, jobName string) StreamingJobId { + return StreamingJobId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + } +} + +// ParseStreamingJobID parses 'input' into a StreamingJobId +func ParseStreamingJobID(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseStreamingJobIDInsensitively parses 'input' case-insensitively into a StreamingJobId +// note: this method should only be used for API response data and not user input +func ParseStreamingJobIDInsensitively(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateStreamingJobID checks that 'input' can be parsed as a Streaming Job ID +func ValidateStreamingJobID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseStreamingJobID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Streaming Job ID +func (id StreamingJobId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Streaming Job ID +func (id StreamingJobId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + } +} + +// String returns a human-readable description of this Streaming Job ID +func (id StreamingJobId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + } + return fmt.Sprintf("Streaming Job (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_createorreplace_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_createorreplace_autorest.go new file mode 100644 index 000000000000..4d25b5c58779 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_createorreplace_autorest.go @@ -0,0 +1,103 @@ +package outputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrReplaceOperationResponse struct { + HttpResponse *http.Response + Model *Output +} + +type CreateOrReplaceOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrReplaceOperationOptions() CreateOrReplaceOperationOptions { + return CreateOrReplaceOperationOptions{} +} + +func (o CreateOrReplaceOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrReplaceOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrReplace ... +func (c OutputsClient) CreateOrReplace(ctx context.Context, id OutputId, input Output, options CreateOrReplaceOperationOptions) (result CreateOrReplaceOperationResponse, err error) { + req, err := c.preparerForCreateOrReplace(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "CreateOrReplace", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrReplace(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "CreateOrReplace", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrReplace prepares the CreateOrReplace request. +func (c OutputsClient) preparerForCreateOrReplace(ctx context.Context, id OutputId, input Output, options CreateOrReplaceOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrReplace handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (c OutputsClient) responderForCreateOrReplace(resp *http.Response) (result CreateOrReplaceOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_delete_autorest.go new file mode 100644 index 000000000000..9ba461f884f9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_delete_autorest.go @@ -0,0 +1,66 @@ +package outputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + HttpResponse *http.Response +} + +// Delete ... +func (c OutputsClient) Delete(ctx context.Context, id OutputId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Delete", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForDelete(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Delete", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForDelete prepares the Delete request. +func (c OutputsClient) preparerForDelete(ctx context.Context, id OutputId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForDelete handles the response to the Delete request. The method always +// closes the http.Response Body. +func (c OutputsClient) responderForDelete(resp *http.Response) (result DeleteOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusOK), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_get_autorest.go new file mode 100644 index 000000000000..11f1639925b6 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_get_autorest.go @@ -0,0 +1,68 @@ +package outputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *Output +} + +// Get ... +func (c OutputsClient) Get(ctx context.Context, id OutputId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c OutputsClient) preparerForGet(ctx context.Context, id OutputId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c OutputsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_listbystreamingjob_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_listbystreamingjob_autorest.go new file mode 100644 index 000000000000..b0dae7269a00 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_listbystreamingjob_autorest.go @@ -0,0 +1,215 @@ +package outputs + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByStreamingJobOperationResponse struct { + HttpResponse *http.Response + Model *[]Output + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByStreamingJobOperationResponse, error) +} + +type ListByStreamingJobCompleteResult struct { + Items []Output +} + +func (r ListByStreamingJobOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByStreamingJobOperationResponse) LoadMore(ctx context.Context) (resp ListByStreamingJobOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListByStreamingJobOperationOptions struct { + Select *string +} + +func DefaultListByStreamingJobOperationOptions() ListByStreamingJobOperationOptions { + return ListByStreamingJobOperationOptions{} +} + +func (o ListByStreamingJobOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListByStreamingJobOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Select != nil { + out["$select"] = *o.Select + } + + return out +} + +// ListByStreamingJob ... +func (c OutputsClient) ListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (resp ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJob(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByStreamingJob(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByStreamingJob prepares the ListByStreamingJob request. +func (c OutputsClient) preparerForListByStreamingJob(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/outputs", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByStreamingJobWithNextLink prepares the ListByStreamingJob request with the given nextLink token. +func (c OutputsClient) preparerForListByStreamingJobWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByStreamingJob handles the response to the ListByStreamingJob request. The method always +// closes the http.Response Body. +func (c OutputsClient) responderForListByStreamingJob(resp *http.Response) (result ListByStreamingJobOperationResponse, err error) { + type page struct { + Values []Output `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByStreamingJobOperationResponse, err error) { + req, err := c.preparerForListByStreamingJobWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByStreamingJob(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "ListByStreamingJob", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByStreamingJobComplete retrieves all of the results into a single object +func (c OutputsClient) ListByStreamingJobComplete(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions) (ListByStreamingJobCompleteResult, error) { + return c.ListByStreamingJobCompleteMatchingPredicate(ctx, id, options, OutputOperationPredicate{}) +} + +// ListByStreamingJobCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c OutputsClient) ListByStreamingJobCompleteMatchingPredicate(ctx context.Context, id StreamingJobId, options ListByStreamingJobOperationOptions, predicate OutputOperationPredicate) (resp ListByStreamingJobCompleteResult, err error) { + items := make([]Output, 0) + + page, err := c.ListByStreamingJob(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByStreamingJobCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_test_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_test_autorest.go new file mode 100644 index 000000000000..b6aa6415a2cb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_test_autorest.go @@ -0,0 +1,79 @@ +package outputs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TestOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Test ... +func (c OutputsClient) Test(ctx context.Context, id OutputId, input Output) (result TestOperationResponse, err error) { + req, err := c.preparerForTest(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Test", nil, "Failure preparing request") + return + } + + result, err = c.senderForTest(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Test", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// TestThenPoll performs Test then polls until it's completed +func (c OutputsClient) TestThenPoll(ctx context.Context, id OutputId, input Output) error { + result, err := c.Test(ctx, id, input) + if err != nil { + return fmt.Errorf("performing Test: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Test: %+v", err) + } + + return nil +} + +// preparerForTest prepares the Test request. +func (c OutputsClient) preparerForTest(ctx context.Context, id OutputId, input Output) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/test", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForTest sends the Test request. The method will close the +// http.Response Body if it receives an error. +func (c OutputsClient) senderForTest(ctx context.Context, req *http.Request) (future TestOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_update_autorest.go new file mode 100644 index 000000000000..6ca83b47aa91 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/method_update_autorest.go @@ -0,0 +1,98 @@ +package outputs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + HttpResponse *http.Response + Model *Output +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c OutputsClient) Update(ctx context.Context, id OutputId, input Output, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Update", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "outputs.OutputsClient", "Update", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForUpdate prepares the Update request. +func (c OutputsClient) preparerForUpdate(ctx context.Context, id OutputId, input Output, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForUpdate handles the response to the Update request. The method always +// closes the http.Response Body. +func (c OutputsClient) responderForUpdate(resp *http.Response) (result UpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_avroserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_avroserialization.go new file mode 100644 index 000000000000..8f3a707a598f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_avroserialization.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = AvroSerialization{} + +type AvroSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = AvroSerialization{} + +func (s AvroSerialization) MarshalJSON() ([]byte, error) { + type wrapper AvroSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AvroSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AvroSerialization: %+v", err) + } + decoded["type"] = "Avro" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AvroSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasource.go new file mode 100644 index 000000000000..58cad18e4d18 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureDataLakeStoreOutputDataSource{} + +type AzureDataLakeStoreOutputDataSource struct { + Properties *AzureDataLakeStoreOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureDataLakeStoreOutputDataSource{} + +func (s AzureDataLakeStoreOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureDataLakeStoreOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.DataLake/Accounts" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasourceproperties.go new file mode 100644 index 000000000000..137ffc5f7acf --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuredatalakestoreoutputdatasourceproperties.go @@ -0,0 +1,16 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureDataLakeStoreOutputDataSourceProperties struct { + AccountName *string `json:"accountName,omitempty"` + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + FilePathPrefix *string `json:"filePathPrefix,omitempty"` + RefreshToken *string `json:"refreshToken,omitempty"` + TenantId *string `json:"tenantId,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasource.go new file mode 100644 index 000000000000..c1092f203073 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureFunctionOutputDataSource{} + +type AzureFunctionOutputDataSource struct { + Properties *AzureFunctionOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureFunctionOutputDataSource{} + +func (s AzureFunctionOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureFunctionOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureFunctionOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureFunctionOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.AzureFunction" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureFunctionOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasourceproperties.go new file mode 100644 index 000000000000..81ea50b88aa5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azurefunctionoutputdatasourceproperties.go @@ -0,0 +1,12 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureFunctionOutputDataSourceProperties struct { + ApiKey *string `json:"apiKey,omitempty"` + FunctionAppName *string `json:"functionAppName,omitempty"` + FunctionName *string `json:"functionName,omitempty"` + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + MaxBatchSize *float64 `json:"maxBatchSize,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabasedatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabasedatasourceproperties.go new file mode 100644 index 000000000000..60d68996a096 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabasedatasourceproperties.go @@ -0,0 +1,15 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSqlDatabaseDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Database *string `json:"database,omitempty"` + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` + Password *string `json:"password,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabaseoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabaseoutputdatasource.go new file mode 100644 index 000000000000..4918aa547ee1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresqldatabaseoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureSqlDatabaseOutputDataSource{} + +type AzureSqlDatabaseOutputDataSource struct { + Properties *AzureSqlDatabaseDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureSqlDatabaseOutputDataSource{} + +func (s AzureSqlDatabaseOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSqlDatabaseOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/Database" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapsedatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapsedatasourceproperties.go new file mode 100644 index 000000000000..2a1d340c529f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapsedatasourceproperties.go @@ -0,0 +1,12 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSynapseDataSourceProperties struct { + Database *string `json:"database,omitempty"` + Password *string `json:"password,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapseoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapseoutputdatasource.go new file mode 100644 index 000000000000..857db7d04816 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuresynapseoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureSynapseOutputDataSource{} + +type AzureSynapseOutputDataSource struct { + Properties *AzureSynapseDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureSynapseOutputDataSource{} + +func (s AzureSynapseOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSynapseOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSynapseOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSynapseOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/DataWarehouse" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSynapseOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasource.go new file mode 100644 index 000000000000..865e2512bb6b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureTableOutputDataSource{} + +type AzureTableOutputDataSource struct { + Properties *AzureTableOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureTableOutputDataSource{} + +func (s AzureTableOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureTableOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureTableOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureTableOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Table" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureTableOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasourceproperties.go new file mode 100644 index 000000000000..41b4521e1ae4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_azuretableoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureTableOutputDataSourceProperties struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` + BatchSize *int64 `json:"batchSize,omitempty"` + ColumnsToRemove *[]string `json:"columnsToRemove,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + RowKey *string `json:"rowKey,omitempty"` + Table *string `json:"table,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasource.go new file mode 100644 index 000000000000..73ede46ee3ee --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = BlobOutputDataSource{} + +type BlobOutputDataSource struct { + Properties *BlobOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = BlobOutputDataSource{} + +func (s BlobOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasourceproperties.go new file mode 100644 index 000000000000..c1b92efb2385 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_bloboutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + BlobPathPrefix *string `json:"blobPathPrefix,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserialization.go new file mode 100644 index 000000000000..8bd09587b21a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserialization.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = CsvSerialization{} + +type CsvSerialization struct { + Properties *CsvSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = CsvSerialization{} + +func (s CsvSerialization) MarshalJSON() ([]byte, error) { + type wrapper CsvSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling CsvSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling CsvSerialization: %+v", err) + } + decoded["type"] = "Csv" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling CsvSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserializationproperties.go new file mode 100644 index 000000000000..13e3dab215cb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_csvserializationproperties.go @@ -0,0 +1,9 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CsvSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + FieldDelimiter *string `json:"fieldDelimiter,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnosticcondition.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnosticcondition.go new file mode 100644 index 000000000000..422e425f69aa --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnosticcondition.go @@ -0,0 +1,10 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DiagnosticCondition struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` + Since *string `json:"since,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnostics.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnostics.go new file mode 100644 index 000000000000..a963ca015f68 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_diagnostics.go @@ -0,0 +1,8 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Diagnostics struct { + Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasource.go new file mode 100644 index 000000000000..bb4868131b10 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = DocumentDbOutputDataSource{} + +type DocumentDbOutputDataSource struct { + Properties *DocumentDbOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = DocumentDbOutputDataSource{} + +func (s DocumentDbOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper DocumentDbOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling DocumentDbOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling DocumentDbOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/DocumentDB" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling DocumentDbOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasourceproperties.go new file mode 100644 index 000000000000..67b3ee033e80 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_documentdboutputdatasourceproperties.go @@ -0,0 +1,13 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DocumentDbOutputDataSourceProperties struct { + AccountId *string `json:"accountId,omitempty"` + AccountKey *string `json:"accountKey,omitempty"` + CollectionNamePattern *string `json:"collectionNamePattern,omitempty"` + Database *string `json:"database,omitempty"` + DocumentId *string `json:"documentId,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_errorresponse.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_errorresponse.go new file mode 100644 index 000000000000..3f07c9683d48 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_errorresponse.go @@ -0,0 +1,9 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ErrorResponse struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasource.go new file mode 100644 index 000000000000..08bce058c236 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = EventHubOutputDataSource{} + +type EventHubOutputDataSource struct { + Properties *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = EventHubOutputDataSource{} + +func (s EventHubOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasourceproperties.go new file mode 100644 index 000000000000..0e8342d7cd60 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhuboutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type EventHubOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + EventHubName *string `json:"eventHubName,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhubv2outputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhubv2outputdatasource.go new file mode 100644 index 000000000000..4f2a3a8e7fca --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_eventhubv2outputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = EventHubV2OutputDataSource{} + +type EventHubV2OutputDataSource struct { + Properties *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = EventHubV2OutputDataSource{} + +func (s EventHubV2OutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubV2OutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubV2OutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubV2OutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.EventHub/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubV2OutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebusoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebusoutputdatasource.go new file mode 100644 index 000000000000..25ab36657286 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebusoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = GatewayMessageBusOutputDataSource{} + +type GatewayMessageBusOutputDataSource struct { + Properties *GatewayMessageBusSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = GatewayMessageBusOutputDataSource{} + +func (s GatewayMessageBusOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper GatewayMessageBusOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling GatewayMessageBusOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling GatewayMessageBusOutputDataSource: %+v", err) + } + decoded["type"] = "GatewayMessageBus" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling GatewayMessageBusOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebussourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebussourceproperties.go new file mode 100644 index 000000000000..5b75b8c9943e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_gatewaymessagebussourceproperties.go @@ -0,0 +1,8 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GatewayMessageBusSourceProperties struct { + Topic *string `json:"topic,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserialization.go new file mode 100644 index 000000000000..6c7efa499b37 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserialization.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = JsonSerialization{} + +type JsonSerialization struct { + Properties *JsonSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = JsonSerialization{} + +func (s JsonSerialization) MarshalJSON() ([]byte, error) { + type wrapper JsonSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JsonSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JsonSerialization: %+v", err) + } + decoded["type"] = "Json" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JsonSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserializationproperties.go new file mode 100644 index 000000000000..7ec305f1821f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_jsonserializationproperties.go @@ -0,0 +1,9 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JsonSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + Format *JsonOutputSerializationFormat `json:"format,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_output.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_output.go new file mode 100644 index 000000000000..3acf975d1a0f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_output.go @@ -0,0 +1,11 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Output struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties *OutputProperties `json:"properties,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputdatasource.go new file mode 100644 index 000000000000..4c43de2e458a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputdatasource.go @@ -0,0 +1,144 @@ +package outputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type OutputDataSource interface { +} + +func unmarshalOutputDataSourceImplementation(input []byte) (OutputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling OutputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.DataLake/Accounts") { + var out AzureDataLakeStoreOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureDataLakeStoreOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.AzureFunction") { + var out AzureFunctionOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureFunctionOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/Database") { + var out AzureSqlDatabaseOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSqlDatabaseOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/DataWarehouse") { + var out AzureSynapseOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSynapseOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Table") { + var out AzureTableOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureTableOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/DocumentDB") { + var out DocumentDbOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into DocumentDbOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/EventHub") { + var out EventHubOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.EventHub/EventHub") { + var out EventHubV2OutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubV2OutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "GatewayMessageBus") { + var out GatewayMessageBusOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into GatewayMessageBusOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "PowerBI") { + var out PowerBIOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into PowerBIOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/Queue") { + var out ServiceBusQueueOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServiceBusQueueOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/Topic") { + var out ServiceBusTopicOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServiceBusTopicOutputDataSource: %+v", err) + } + return out, nil + } + + type RawOutputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawOutputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputproperties.go new file mode 100644 index 000000000000..0d32a2c62777 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_outputproperties.go @@ -0,0 +1,55 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type OutputProperties struct { + Datasource OutputDataSource `json:"datasource"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + Serialization Serialization `json:"serialization"` + SizeWindow *float64 `json:"sizeWindow,omitempty"` + TimeWindow *string `json:"timeWindow,omitempty"` +} + +var _ json.Unmarshaler = &OutputProperties{} + +func (s *OutputProperties) UnmarshalJSON(bytes []byte) error { + type alias OutputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into OutputProperties: %+v", err) + } + + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.SizeWindow = decoded.SizeWindow + s.TimeWindow = decoded.TimeWindow + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling OutputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalOutputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'OutputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'OutputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_parquetserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_parquetserialization.go new file mode 100644 index 000000000000..00891b61aa70 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_parquetserialization.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = ParquetSerialization{} + +type ParquetSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = ParquetSerialization{} + +func (s ParquetSerialization) MarshalJSON() ([]byte, error) { + type wrapper ParquetSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ParquetSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ParquetSerialization: %+v", err) + } + decoded["type"] = "Parquet" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ParquetSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasource.go new file mode 100644 index 000000000000..adcbb6fe8a06 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = PowerBIOutputDataSource{} + +type PowerBIOutputDataSource struct { + Properties *PowerBIOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = PowerBIOutputDataSource{} + +func (s PowerBIOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper PowerBIOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling PowerBIOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling PowerBIOutputDataSource: %+v", err) + } + decoded["type"] = "PowerBI" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling PowerBIOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasourceproperties.go new file mode 100644 index 000000000000..ac3859d230df --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_powerbioutputdatasourceproperties.go @@ -0,0 +1,15 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PowerBIOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Dataset *string `json:"dataset,omitempty"` + GroupId *string `json:"groupId,omitempty"` + GroupName *string `json:"groupName,omitempty"` + RefreshToken *string `json:"refreshToken,omitempty"` + Table *string `json:"table,omitempty"` + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_resourceteststatus.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_resourceteststatus.go new file mode 100644 index 000000000000..d94ed4cc8e39 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_resourceteststatus.go @@ -0,0 +1,9 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ResourceTestStatus struct { + Error *ErrorResponse `json:"error,omitempty"` + Status *string `json:"status,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_serialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_serialization.go new file mode 100644 index 000000000000..4c3a22bf0b30 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_serialization.go @@ -0,0 +1,72 @@ +package outputs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Serialization interface { +} + +func unmarshalSerializationImplementation(input []byte) (Serialization, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling Serialization into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Avro") { + var out AvroSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AvroSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Csv") { + var out CsvSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into CsvSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Json") { + var out JsonSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JsonSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Parquet") { + var out ParquetSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ParquetSerialization: %+v", err) + } + return out, nil + } + + type RawSerializationImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawSerializationImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasource.go new file mode 100644 index 000000000000..ce896a400231 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = ServiceBusQueueOutputDataSource{} + +type ServiceBusQueueOutputDataSource struct { + Properties *ServiceBusQueueOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = ServiceBusQueueOutputDataSource{} + +func (s ServiceBusQueueOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper ServiceBusQueueOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServiceBusQueueOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServiceBusQueueOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/Queue" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServiceBusQueueOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasourceproperties.go new file mode 100644 index 000000000000..a8a617ce1ea3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebusqueueoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ServiceBusQueueOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + QueueName *string `json:"queueName,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + SystemPropertyColumns *interface{} `json:"systemPropertyColumns,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasource.go new file mode 100644 index 000000000000..6d62ab02c182 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasource.go @@ -0,0 +1,41 @@ +package outputs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = ServiceBusTopicOutputDataSource{} + +type ServiceBusTopicOutputDataSource struct { + Properties *ServiceBusTopicOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = ServiceBusTopicOutputDataSource{} + +func (s ServiceBusTopicOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper ServiceBusTopicOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServiceBusTopicOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServiceBusTopicOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/Topic" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServiceBusTopicOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasourceproperties.go new file mode 100644 index 000000000000..2d8cfa879414 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_servicebustopicoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ServiceBusTopicOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + SystemPropertyColumns *map[string]string `json:"systemPropertyColumns,omitempty"` + TopicName *string `json:"topicName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_storageaccount.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_storageaccount.go new file mode 100644 index 000000000000..b6a549b163fe --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/model_storageaccount.go @@ -0,0 +1,9 @@ +package outputs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StorageAccount struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/predicates.go new file mode 100644 index 000000000000..4b9acfeac90d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/predicates.go @@ -0,0 +1,24 @@ +package outputs + +type OutputOperationPredicate struct { + Id *string + Name *string + Type *string +} + +func (p OutputOperationPredicate) Matches(input Output) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/version.go new file mode 100644 index 000000000000..aab5a67daef9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/outputs/version.go @@ -0,0 +1,12 @@ +package outputs + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/outputs/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/README.md new file mode 100644 index 000000000000..e82d7839b47d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/README.md @@ -0,0 +1,86 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints` Documentation + +The `privateendpoints` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints" +``` + + +### Client Initialization + +```go +client := privateendpoints.NewPrivateEndpointsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `PrivateEndpointsClient.CreateOrUpdate` + +```go +ctx := context.TODO() +id := privateendpoints.NewPrivateEndpointID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue", "privateEndpointValue") + +payload := privateendpoints.PrivateEndpoint{ + // ... +} + + +read, err := client.CreateOrUpdate(ctx, id, payload, privateendpoints.DefaultCreateOrUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `PrivateEndpointsClient.Delete` + +```go +ctx := context.TODO() +id := privateendpoints.NewPrivateEndpointID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue", "privateEndpointValue") + +if err := client.DeleteThenPoll(ctx, id); err != nil { + // handle the error +} +``` + + +### Example Usage: `PrivateEndpointsClient.Get` + +```go +ctx := context.TODO() +id := privateendpoints.NewPrivateEndpointID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue", "privateEndpointValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `PrivateEndpointsClient.ListByCluster` + +```go +ctx := context.TODO() +id := privateendpoints.NewClusterID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") + +// alternatively `client.ListByCluster(ctx, id)` can be used to do batched pagination +items, err := client.ListByClusterComplete(ctx, id) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/client.go new file mode 100644 index 000000000000..fa0a5ad4e663 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/client.go @@ -0,0 +1,18 @@ +package privateendpoints + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateEndpointsClient struct { + Client autorest.Client + baseUri string +} + +func NewPrivateEndpointsClientWithBaseURI(endpoint string) PrivateEndpointsClient { + return PrivateEndpointsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_cluster.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_cluster.go new file mode 100644 index 000000000000..b6d780a02962 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_cluster.go @@ -0,0 +1,124 @@ +package privateendpoints + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = ClusterId{} + +// ClusterId is a struct representing the Resource ID for a Cluster +type ClusterId struct { + SubscriptionId string + ResourceGroupName string + ClusterName string +} + +// NewClusterID returns a new ClusterId struct +func NewClusterID(subscriptionId string, resourceGroupName string, clusterName string) ClusterId { + return ClusterId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + ClusterName: clusterName, + } +} + +// ParseClusterID parses 'input' into a ClusterId +func ParseClusterID(input string) (*ClusterId, error) { + parser := resourceids.NewParserFromResourceIdType(ClusterId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := ClusterId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseClusterIDInsensitively parses 'input' case-insensitively into a ClusterId +// note: this method should only be used for API response data and not user input +func ParseClusterIDInsensitively(input string) (*ClusterId, error) { + parser := resourceids.NewParserFromResourceIdType(ClusterId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := ClusterId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateClusterID checks that 'input' can be parsed as a Cluster ID +func ValidateClusterID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseClusterID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Cluster ID +func (id ClusterId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.ClusterName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Cluster ID +func (id ClusterId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticClusters", "clusters", "clusters"), + resourceids.UserSpecifiedSegment("clusterName", "clusterValue"), + } +} + +// String returns a human-readable description of this Cluster ID +func (id ClusterId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Cluster Name: %q", id.ClusterName), + } + return fmt.Sprintf("Cluster (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_privateendpoint.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_privateendpoint.go new file mode 100644 index 000000000000..630e4aa240a4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/id_privateendpoint.go @@ -0,0 +1,137 @@ +package privateendpoints + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = PrivateEndpointId{} + +// PrivateEndpointId is a struct representing the Resource ID for a Private Endpoint +type PrivateEndpointId struct { + SubscriptionId string + ResourceGroupName string + ClusterName string + PrivateEndpointName string +} + +// NewPrivateEndpointID returns a new PrivateEndpointId struct +func NewPrivateEndpointID(subscriptionId string, resourceGroupName string, clusterName string, privateEndpointName string) PrivateEndpointId { + return PrivateEndpointId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + ClusterName: clusterName, + PrivateEndpointName: privateEndpointName, + } +} + +// ParsePrivateEndpointID parses 'input' into a PrivateEndpointId +func ParsePrivateEndpointID(input string) (*PrivateEndpointId, error) { + parser := resourceids.NewParserFromResourceIdType(PrivateEndpointId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := PrivateEndpointId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + if id.PrivateEndpointName, ok = parsed.Parsed["privateEndpointName"]; !ok { + return nil, fmt.Errorf("the segment 'privateEndpointName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParsePrivateEndpointIDInsensitively parses 'input' case-insensitively into a PrivateEndpointId +// note: this method should only be used for API response data and not user input +func ParsePrivateEndpointIDInsensitively(input string) (*PrivateEndpointId, error) { + parser := resourceids.NewParserFromResourceIdType(PrivateEndpointId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := PrivateEndpointId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.ClusterName, ok = parsed.Parsed["clusterName"]; !ok { + return nil, fmt.Errorf("the segment 'clusterName' was not found in the resource id %q", input) + } + + if id.PrivateEndpointName, ok = parsed.Parsed["privateEndpointName"]; !ok { + return nil, fmt.Errorf("the segment 'privateEndpointName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidatePrivateEndpointID checks that 'input' can be parsed as a Private Endpoint ID +func ValidatePrivateEndpointID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParsePrivateEndpointID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Private Endpoint ID +func (id PrivateEndpointId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/clusters/%s/privateEndpoints/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.ClusterName, id.PrivateEndpointName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Private Endpoint ID +func (id PrivateEndpointId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticClusters", "clusters", "clusters"), + resourceids.UserSpecifiedSegment("clusterName", "clusterValue"), + resourceids.StaticSegment("staticPrivateEndpoints", "privateEndpoints", "privateEndpoints"), + resourceids.UserSpecifiedSegment("privateEndpointName", "privateEndpointValue"), + } +} + +// String returns a human-readable description of this Private Endpoint ID +func (id PrivateEndpointId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Cluster Name: %q", id.ClusterName), + fmt.Sprintf("Private Endpoint Name: %q", id.PrivateEndpointName), + } + return fmt.Sprintf("Private Endpoint (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_createorupdate_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_createorupdate_autorest.go new file mode 100644 index 000000000000..d9fd2fd7fd29 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_createorupdate_autorest.go @@ -0,0 +1,103 @@ +package privateendpoints + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrUpdateOperationResponse struct { + HttpResponse *http.Response + Model *PrivateEndpoint +} + +type CreateOrUpdateOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrUpdateOperationOptions() CreateOrUpdateOperationOptions { + return CreateOrUpdateOperationOptions{} +} + +func (o CreateOrUpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrUpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrUpdate ... +func (c PrivateEndpointsClient) CreateOrUpdate(ctx context.Context, id PrivateEndpointId, input PrivateEndpoint, options CreateOrUpdateOperationOptions) (result CreateOrUpdateOperationResponse, err error) { + req, err := c.preparerForCreateOrUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "CreateOrUpdate", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "CreateOrUpdate", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrUpdate prepares the CreateOrUpdate request. +func (c PrivateEndpointsClient) preparerForCreateOrUpdate(ctx context.Context, id PrivateEndpointId, input PrivateEndpoint, options CreateOrUpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrUpdate handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (c PrivateEndpointsClient) responderForCreateOrUpdate(resp *http.Response) (result CreateOrUpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_delete_autorest.go new file mode 100644 index 000000000000..975fcaec88f7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_delete_autorest.go @@ -0,0 +1,78 @@ +package privateendpoints + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Delete ... +func (c PrivateEndpointsClient) Delete(ctx context.Context, id PrivateEndpointId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = c.senderForDelete(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// DeleteThenPoll performs Delete then polls until it's completed +func (c PrivateEndpointsClient) DeleteThenPoll(ctx context.Context, id PrivateEndpointId) error { + result, err := c.Delete(ctx, id) + if err != nil { + return fmt.Errorf("performing Delete: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Delete: %+v", err) + } + + return nil +} + +// preparerForDelete prepares the Delete request. +func (c PrivateEndpointsClient) preparerForDelete(ctx context.Context, id PrivateEndpointId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForDelete sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (c PrivateEndpointsClient) senderForDelete(ctx context.Context, req *http.Request) (future DeleteOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_get_autorest.go new file mode 100644 index 000000000000..df15ef43fdc2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_get_autorest.go @@ -0,0 +1,68 @@ +package privateendpoints + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *PrivateEndpoint +} + +// Get ... +func (c PrivateEndpointsClient) Get(ctx context.Context, id PrivateEndpointId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c PrivateEndpointsClient) preparerForGet(ctx context.Context, id PrivateEndpointId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c PrivateEndpointsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_listbycluster_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_listbycluster_autorest.go new file mode 100644 index 000000000000..4d3fb50466ae --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/method_listbycluster_autorest.go @@ -0,0 +1,186 @@ +package privateendpoints + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByClusterOperationResponse struct { + HttpResponse *http.Response + Model *[]PrivateEndpoint + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByClusterOperationResponse, error) +} + +type ListByClusterCompleteResult struct { + Items []PrivateEndpoint +} + +func (r ListByClusterOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByClusterOperationResponse) LoadMore(ctx context.Context) (resp ListByClusterOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +// ListByCluster ... +func (c PrivateEndpointsClient) ListByCluster(ctx context.Context, id ClusterId) (resp ListByClusterOperationResponse, err error) { + req, err := c.preparerForListByCluster(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByCluster(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByCluster prepares the ListByCluster request. +func (c PrivateEndpointsClient) preparerForListByCluster(ctx context.Context, id ClusterId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/privateEndpoints", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByClusterWithNextLink prepares the ListByCluster request with the given nextLink token. +func (c PrivateEndpointsClient) preparerForListByClusterWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByCluster handles the response to the ListByCluster request. The method always +// closes the http.Response Body. +func (c PrivateEndpointsClient) responderForListByCluster(resp *http.Response) (result ListByClusterOperationResponse, err error) { + type page struct { + Values []PrivateEndpoint `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByClusterOperationResponse, err error) { + req, err := c.preparerForListByClusterWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByCluster(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "privateendpoints.PrivateEndpointsClient", "ListByCluster", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByClusterComplete retrieves all of the results into a single object +func (c PrivateEndpointsClient) ListByClusterComplete(ctx context.Context, id ClusterId) (ListByClusterCompleteResult, error) { + return c.ListByClusterCompleteMatchingPredicate(ctx, id, PrivateEndpointOperationPredicate{}) +} + +// ListByClusterCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c PrivateEndpointsClient) ListByClusterCompleteMatchingPredicate(ctx context.Context, id ClusterId, predicate PrivateEndpointOperationPredicate) (resp ListByClusterCompleteResult, err error) { + items := make([]PrivateEndpoint, 0) + + page, err := c.ListByCluster(ctx, id) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByClusterCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpoint.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpoint.go new file mode 100644 index 000000000000..98a5a3b9fcfa --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpoint.go @@ -0,0 +1,12 @@ +package privateendpoints + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateEndpoint struct { + Etag *string `json:"etag,omitempty"` + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties *PrivateEndpointProperties `json:"properties,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpointproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpointproperties.go new file mode 100644 index 000000000000..44593496a39b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privateendpointproperties.go @@ -0,0 +1,9 @@ +package privateendpoints + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateEndpointProperties struct { + CreatedDate *string `json:"createdDate,omitempty"` + ManualPrivateLinkServiceConnections *[]PrivateLinkServiceConnection `json:"manualPrivateLinkServiceConnections,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkconnectionstate.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkconnectionstate.go new file mode 100644 index 000000000000..d47ab7cdf5f8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkconnectionstate.go @@ -0,0 +1,10 @@ +package privateendpoints + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateLinkConnectionState struct { + ActionsRequired *string `json:"actionsRequired,omitempty"` + Description *string `json:"description,omitempty"` + Status *string `json:"status,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnection.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnection.go new file mode 100644 index 000000000000..697a6cafc591 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnection.go @@ -0,0 +1,8 @@ +package privateendpoints + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateLinkServiceConnection struct { + Properties *PrivateLinkServiceConnectionProperties `json:"properties,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnectionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnectionproperties.go new file mode 100644 index 000000000000..c1687bf345f0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/model_privatelinkserviceconnectionproperties.go @@ -0,0 +1,11 @@ +package privateendpoints + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PrivateLinkServiceConnectionProperties struct { + GroupIds *[]string `json:"groupIds,omitempty"` + PrivateLinkServiceConnectionState *PrivateLinkConnectionState `json:"privateLinkServiceConnectionState,omitempty"` + PrivateLinkServiceId *string `json:"privateLinkServiceId,omitempty"` + RequestMessage *string `json:"requestMessage,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/predicates.go new file mode 100644 index 000000000000..4f9b8f2b730f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/predicates.go @@ -0,0 +1,29 @@ +package privateendpoints + +type PrivateEndpointOperationPredicate struct { + Etag *string + Id *string + Name *string + Type *string +} + +func (p PrivateEndpointOperationPredicate) Matches(input PrivateEndpoint) bool { + + if p.Etag != nil && (input.Etag == nil && *p.Etag != *input.Etag) { + return false + } + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/version.go new file mode 100644 index 000000000000..86fff8201db2 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/privateendpoints/version.go @@ -0,0 +1,12 @@ +package privateendpoints + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/privateendpoints/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/README.md new file mode 100644 index 000000000000..f6b4179ad042 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/README.md @@ -0,0 +1,166 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs` Documentation + +The `streamingjobs` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" +``` + + +### Client Initialization + +```go +client := streamingjobs.NewStreamingJobsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `StreamingJobsClient.CreateOrReplace` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +payload := streamingjobs.StreamingJob{ + // ... +} + + +if err := client.CreateOrReplaceThenPoll(ctx, id, payload, streamingjobs.DefaultCreateOrReplaceOperationOptions()); err != nil { + // handle the error +} +``` + + +### Example Usage: `StreamingJobsClient.Delete` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +if err := client.DeleteThenPoll(ctx, id); err != nil { + // handle the error +} +``` + + +### Example Usage: `StreamingJobsClient.Get` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +read, err := client.Get(ctx, id, streamingjobs.DefaultGetOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `StreamingJobsClient.List` + +```go +ctx := context.TODO() +id := streamingjobs.NewSubscriptionID("12345678-1234-9876-4563-123456789012") + +// alternatively `client.List(ctx, id, streamingjobs.DefaultListOperationOptions())` can be used to do batched pagination +items, err := client.ListComplete(ctx, id, streamingjobs.DefaultListOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `StreamingJobsClient.ListByResourceGroup` + +```go +ctx := context.TODO() +id := streamingjobs.NewResourceGroupID("12345678-1234-9876-4563-123456789012", "example-resource-group") + +// alternatively `client.ListByResourceGroup(ctx, id, streamingjobs.DefaultListByResourceGroupOperationOptions())` can be used to do batched pagination +items, err := client.ListByResourceGroupComplete(ctx, id, streamingjobs.DefaultListByResourceGroupOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `StreamingJobsClient.Scale` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +payload := streamingjobs.ScaleStreamingJobParameters{ + // ... +} + + +if err := client.ScaleThenPoll(ctx, id, payload); err != nil { + // handle the error +} +``` + + +### Example Usage: `StreamingJobsClient.Start` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +payload := streamingjobs.StartStreamingJobParameters{ + // ... +} + + +if err := client.StartThenPoll(ctx, id, payload); err != nil { + // handle the error +} +``` + + +### Example Usage: `StreamingJobsClient.Stop` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +if err := client.StopThenPoll(ctx, id); err != nil { + // handle the error +} +``` + + +### Example Usage: `StreamingJobsClient.Update` + +```go +ctx := context.TODO() +id := streamingjobs.NewStreamingJobID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue") + +payload := streamingjobs.StreamingJob{ + // ... +} + + +read, err := client.Update(ctx, id, payload, streamingjobs.DefaultUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/client.go new file mode 100644 index 000000000000..0e0a69762911 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/client.go @@ -0,0 +1,18 @@ +package streamingjobs + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StreamingJobsClient struct { + Client autorest.Client + baseUri string +} + +func NewStreamingJobsClientWithBaseURI(endpoint string) StreamingJobsClient { + return StreamingJobsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/constants.go new file mode 100644 index 000000000000..74654076221e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/constants.go @@ -0,0 +1,382 @@ +package streamingjobs + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AuthenticationMode string + +const ( + AuthenticationModeConnectionString AuthenticationMode = "ConnectionString" + AuthenticationModeMsi AuthenticationMode = "Msi" + AuthenticationModeUserToken AuthenticationMode = "UserToken" +) + +func PossibleValuesForAuthenticationMode() []string { + return []string{ + string(AuthenticationModeConnectionString), + string(AuthenticationModeMsi), + string(AuthenticationModeUserToken), + } +} + +func parseAuthenticationMode(input string) (*AuthenticationMode, error) { + vals := map[string]AuthenticationMode{ + "connectionstring": AuthenticationModeConnectionString, + "msi": AuthenticationModeMsi, + "usertoken": AuthenticationModeUserToken, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := AuthenticationMode(input) + return &out, nil +} + +type CompatibilityLevel string + +const ( + CompatibilityLevelOnePointTwo CompatibilityLevel = "1.2" + CompatibilityLevelOnePointZero CompatibilityLevel = "1.0" +) + +func PossibleValuesForCompatibilityLevel() []string { + return []string{ + string(CompatibilityLevelOnePointTwo), + string(CompatibilityLevelOnePointZero), + } +} + +func parseCompatibilityLevel(input string) (*CompatibilityLevel, error) { + vals := map[string]CompatibilityLevel{ + "1.2": CompatibilityLevelOnePointTwo, + "1.0": CompatibilityLevelOnePointZero, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := CompatibilityLevel(input) + return &out, nil +} + +type CompressionType string + +const ( + CompressionTypeDeflate CompressionType = "Deflate" + CompressionTypeGZip CompressionType = "GZip" + CompressionTypeNone CompressionType = "None" +) + +func PossibleValuesForCompressionType() []string { + return []string{ + string(CompressionTypeDeflate), + string(CompressionTypeGZip), + string(CompressionTypeNone), + } +} + +func parseCompressionType(input string) (*CompressionType, error) { + vals := map[string]CompressionType{ + "deflate": CompressionTypeDeflate, + "gzip": CompressionTypeGZip, + "none": CompressionTypeNone, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := CompressionType(input) + return &out, nil +} + +type ContentStoragePolicy string + +const ( + ContentStoragePolicyJobStorageAccount ContentStoragePolicy = "JobStorageAccount" + ContentStoragePolicySystemAccount ContentStoragePolicy = "SystemAccount" +) + +func PossibleValuesForContentStoragePolicy() []string { + return []string{ + string(ContentStoragePolicyJobStorageAccount), + string(ContentStoragePolicySystemAccount), + } +} + +func parseContentStoragePolicy(input string) (*ContentStoragePolicy, error) { + vals := map[string]ContentStoragePolicy{ + "jobstorageaccount": ContentStoragePolicyJobStorageAccount, + "systemaccount": ContentStoragePolicySystemAccount, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := ContentStoragePolicy(input) + return &out, nil +} + +type Encoding string + +const ( + EncodingUTFEight Encoding = "UTF8" +) + +func PossibleValuesForEncoding() []string { + return []string{ + string(EncodingUTFEight), + } +} + +func parseEncoding(input string) (*Encoding, error) { + vals := map[string]Encoding{ + "utf8": EncodingUTFEight, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := Encoding(input) + return &out, nil +} + +type EventSerializationType string + +const ( + EventSerializationTypeAvro EventSerializationType = "Avro" + EventSerializationTypeCsv EventSerializationType = "Csv" + EventSerializationTypeJson EventSerializationType = "Json" + EventSerializationTypeParquet EventSerializationType = "Parquet" +) + +func PossibleValuesForEventSerializationType() []string { + return []string{ + string(EventSerializationTypeAvro), + string(EventSerializationTypeCsv), + string(EventSerializationTypeJson), + string(EventSerializationTypeParquet), + } +} + +func parseEventSerializationType(input string) (*EventSerializationType, error) { + vals := map[string]EventSerializationType{ + "avro": EventSerializationTypeAvro, + "csv": EventSerializationTypeCsv, + "json": EventSerializationTypeJson, + "parquet": EventSerializationTypeParquet, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := EventSerializationType(input) + return &out, nil +} + +type EventsOutOfOrderPolicy string + +const ( + EventsOutOfOrderPolicyAdjust EventsOutOfOrderPolicy = "Adjust" + EventsOutOfOrderPolicyDrop EventsOutOfOrderPolicy = "Drop" +) + +func PossibleValuesForEventsOutOfOrderPolicy() []string { + return []string{ + string(EventsOutOfOrderPolicyAdjust), + string(EventsOutOfOrderPolicyDrop), + } +} + +func parseEventsOutOfOrderPolicy(input string) (*EventsOutOfOrderPolicy, error) { + vals := map[string]EventsOutOfOrderPolicy{ + "adjust": EventsOutOfOrderPolicyAdjust, + "drop": EventsOutOfOrderPolicyDrop, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := EventsOutOfOrderPolicy(input) + return &out, nil +} + +type JobType string + +const ( + JobTypeCloud JobType = "Cloud" + JobTypeEdge JobType = "Edge" +) + +func PossibleValuesForJobType() []string { + return []string{ + string(JobTypeCloud), + string(JobTypeEdge), + } +} + +func parseJobType(input string) (*JobType, error) { + vals := map[string]JobType{ + "cloud": JobTypeCloud, + "edge": JobTypeEdge, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := JobType(input) + return &out, nil +} + +type JsonOutputSerializationFormat string + +const ( + JsonOutputSerializationFormatArray JsonOutputSerializationFormat = "Array" + JsonOutputSerializationFormatLineSeparated JsonOutputSerializationFormat = "LineSeparated" +) + +func PossibleValuesForJsonOutputSerializationFormat() []string { + return []string{ + string(JsonOutputSerializationFormatArray), + string(JsonOutputSerializationFormatLineSeparated), + } +} + +func parseJsonOutputSerializationFormat(input string) (*JsonOutputSerializationFormat, error) { + vals := map[string]JsonOutputSerializationFormat{ + "array": JsonOutputSerializationFormatArray, + "lineseparated": JsonOutputSerializationFormatLineSeparated, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := JsonOutputSerializationFormat(input) + return &out, nil +} + +type OutputErrorPolicy string + +const ( + OutputErrorPolicyDrop OutputErrorPolicy = "Drop" + OutputErrorPolicyStop OutputErrorPolicy = "Stop" +) + +func PossibleValuesForOutputErrorPolicy() []string { + return []string{ + string(OutputErrorPolicyDrop), + string(OutputErrorPolicyStop), + } +} + +func parseOutputErrorPolicy(input string) (*OutputErrorPolicy, error) { + vals := map[string]OutputErrorPolicy{ + "drop": OutputErrorPolicyDrop, + "stop": OutputErrorPolicyStop, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := OutputErrorPolicy(input) + return &out, nil +} + +type OutputStartMode string + +const ( + OutputStartModeCustomTime OutputStartMode = "CustomTime" + OutputStartModeJobStartTime OutputStartMode = "JobStartTime" + OutputStartModeLastOutputEventTime OutputStartMode = "LastOutputEventTime" +) + +func PossibleValuesForOutputStartMode() []string { + return []string{ + string(OutputStartModeCustomTime), + string(OutputStartModeJobStartTime), + string(OutputStartModeLastOutputEventTime), + } +} + +func parseOutputStartMode(input string) (*OutputStartMode, error) { + vals := map[string]OutputStartMode{ + "customtime": OutputStartModeCustomTime, + "jobstarttime": OutputStartModeJobStartTime, + "lastoutputeventtime": OutputStartModeLastOutputEventTime, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := OutputStartMode(input) + return &out, nil +} + +type RefreshType string + +const ( + RefreshTypeRefreshPeriodicallyWithDelta RefreshType = "RefreshPeriodicallyWithDelta" + RefreshTypeRefreshPeriodicallyWithFull RefreshType = "RefreshPeriodicallyWithFull" + RefreshTypeStatic RefreshType = "Static" +) + +func PossibleValuesForRefreshType() []string { + return []string{ + string(RefreshTypeRefreshPeriodicallyWithDelta), + string(RefreshTypeRefreshPeriodicallyWithFull), + string(RefreshTypeStatic), + } +} + +func parseRefreshType(input string) (*RefreshType, error) { + vals := map[string]RefreshType{ + "refreshperiodicallywithdelta": RefreshTypeRefreshPeriodicallyWithDelta, + "refreshperiodicallywithfull": RefreshTypeRefreshPeriodicallyWithFull, + "static": RefreshTypeStatic, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := RefreshType(input) + return &out, nil +} + +type SkuName string + +const ( + SkuNameStandard SkuName = "Standard" +) + +func PossibleValuesForSkuName() []string { + return []string{ + string(SkuNameStandard), + } +} + +func parseSkuName(input string) (*SkuName, error) { + vals := map[string]SkuName{ + "standard": SkuNameStandard, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := SkuName(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/id_streamingjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/id_streamingjob.go new file mode 100644 index 000000000000..49d9cdc05869 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/id_streamingjob.go @@ -0,0 +1,124 @@ +package streamingjobs + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = StreamingJobId{} + +// StreamingJobId is a struct representing the Resource ID for a Streaming Job +type StreamingJobId struct { + SubscriptionId string + ResourceGroupName string + JobName string +} + +// NewStreamingJobID returns a new StreamingJobId struct +func NewStreamingJobID(subscriptionId string, resourceGroupName string, jobName string) StreamingJobId { + return StreamingJobId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + } +} + +// ParseStreamingJobID parses 'input' into a StreamingJobId +func ParseStreamingJobID(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseStreamingJobIDInsensitively parses 'input' case-insensitively into a StreamingJobId +// note: this method should only be used for API response data and not user input +func ParseStreamingJobIDInsensitively(input string) (*StreamingJobId, error) { + parser := resourceids.NewParserFromResourceIdType(StreamingJobId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := StreamingJobId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateStreamingJobID checks that 'input' can be parsed as a Streaming Job ID +func ValidateStreamingJobID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseStreamingJobID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Streaming Job ID +func (id StreamingJobId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Streaming Job ID +func (id StreamingJobId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + } +} + +// String returns a human-readable description of this Streaming Job ID +func (id StreamingJobId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + } + return fmt.Sprintf("Streaming Job (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_createorreplace_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_createorreplace_autorest.go new file mode 100644 index 000000000000..10ce1e3e941d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_createorreplace_autorest.go @@ -0,0 +1,113 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrReplaceOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +type CreateOrReplaceOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrReplaceOperationOptions() CreateOrReplaceOperationOptions { + return CreateOrReplaceOperationOptions{} +} + +func (o CreateOrReplaceOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrReplaceOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrReplace ... +func (c StreamingJobsClient) CreateOrReplace(ctx context.Context, id StreamingJobId, input StreamingJob, options CreateOrReplaceOperationOptions) (result CreateOrReplaceOperationResponse, err error) { + req, err := c.preparerForCreateOrReplace(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result, err = c.senderForCreateOrReplace(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "CreateOrReplace", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// CreateOrReplaceThenPoll performs CreateOrReplace then polls until it's completed +func (c StreamingJobsClient) CreateOrReplaceThenPoll(ctx context.Context, id StreamingJobId, input StreamingJob, options CreateOrReplaceOperationOptions) error { + result, err := c.CreateOrReplace(ctx, id, input, options) + if err != nil { + return fmt.Errorf("performing CreateOrReplace: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after CreateOrReplace: %+v", err) + } + + return nil +} + +// preparerForCreateOrReplace prepares the CreateOrReplace request. +func (c StreamingJobsClient) preparerForCreateOrReplace(ctx context.Context, id StreamingJobId, input StreamingJob, options CreateOrReplaceOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForCreateOrReplace sends the CreateOrReplace request. The method will close the +// http.Response Body if it receives an error. +func (c StreamingJobsClient) senderForCreateOrReplace(ctx context.Context, req *http.Request) (future CreateOrReplaceOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_delete_autorest.go new file mode 100644 index 000000000000..d619132add70 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_delete_autorest.go @@ -0,0 +1,78 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Delete ... +func (c StreamingJobsClient) Delete(ctx context.Context, id StreamingJobId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = c.senderForDelete(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// DeleteThenPoll performs Delete then polls until it's completed +func (c StreamingJobsClient) DeleteThenPoll(ctx context.Context, id StreamingJobId) error { + result, err := c.Delete(ctx, id) + if err != nil { + return fmt.Errorf("performing Delete: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Delete: %+v", err) + } + + return nil +} + +// preparerForDelete prepares the Delete request. +func (c StreamingJobsClient) preparerForDelete(ctx context.Context, id StreamingJobId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForDelete sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (c StreamingJobsClient) senderForDelete(ctx context.Context, req *http.Request) (future DeleteOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_get_autorest.go new file mode 100644 index 000000000000..f50e1aaa9e8a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_get_autorest.go @@ -0,0 +1,97 @@ +package streamingjobs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *StreamingJob +} + +type GetOperationOptions struct { + Expand *string +} + +func DefaultGetOperationOptions() GetOperationOptions { + return GetOperationOptions{} +} + +func (o GetOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o GetOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Expand != nil { + out["$expand"] = *o.Expand + } + + return out +} + +// Get ... +func (c StreamingJobsClient) Get(ctx context.Context, id StreamingJobId, options GetOperationOptions) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c StreamingJobsClient) preparerForGet(ctx context.Context, id StreamingJobId, options GetOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c StreamingJobsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_list_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_list_autorest.go new file mode 100644 index 000000000000..9c325f2f2216 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_list_autorest.go @@ -0,0 +1,216 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListOperationResponse struct { + HttpResponse *http.Response + Model *[]StreamingJob + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListOperationResponse, error) +} + +type ListCompleteResult struct { + Items []StreamingJob +} + +func (r ListOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListOperationResponse) LoadMore(ctx context.Context) (resp ListOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListOperationOptions struct { + Expand *string +} + +func DefaultListOperationOptions() ListOperationOptions { + return ListOperationOptions{} +} + +func (o ListOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Expand != nil { + out["$expand"] = *o.Expand + } + + return out +} + +// List ... +func (c StreamingJobsClient) List(ctx context.Context, id commonids.SubscriptionId, options ListOperationOptions) (resp ListOperationResponse, err error) { + req, err := c.preparerForList(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForList(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForList prepares the List request. +func (c StreamingJobsClient) preparerForList(ctx context.Context, id commonids.SubscriptionId, options ListOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/providers/Microsoft.StreamAnalytics/streamingJobs", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListWithNextLink prepares the List request with the given nextLink token. +func (c StreamingJobsClient) preparerForListWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForList handles the response to the List request. The method always +// closes the http.Response Body. +func (c StreamingJobsClient) responderForList(resp *http.Response) (result ListOperationResponse, err error) { + type page struct { + Values []StreamingJob `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListOperationResponse, err error) { + req, err := c.preparerForListWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForList(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "List", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListComplete retrieves all of the results into a single object +func (c StreamingJobsClient) ListComplete(ctx context.Context, id commonids.SubscriptionId, options ListOperationOptions) (ListCompleteResult, error) { + return c.ListCompleteMatchingPredicate(ctx, id, options, StreamingJobOperationPredicate{}) +} + +// ListCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c StreamingJobsClient) ListCompleteMatchingPredicate(ctx context.Context, id commonids.SubscriptionId, options ListOperationOptions, predicate StreamingJobOperationPredicate) (resp ListCompleteResult, err error) { + items := make([]StreamingJob, 0) + + page, err := c.List(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_listbyresourcegroup_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_listbyresourcegroup_autorest.go new file mode 100644 index 000000000000..97037ff29751 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_listbyresourcegroup_autorest.go @@ -0,0 +1,216 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListByResourceGroupOperationResponse struct { + HttpResponse *http.Response + Model *[]StreamingJob + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListByResourceGroupOperationResponse, error) +} + +type ListByResourceGroupCompleteResult struct { + Items []StreamingJob +} + +func (r ListByResourceGroupOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListByResourceGroupOperationResponse) LoadMore(ctx context.Context) (resp ListByResourceGroupOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListByResourceGroupOperationOptions struct { + Expand *string +} + +func DefaultListByResourceGroupOperationOptions() ListByResourceGroupOperationOptions { + return ListByResourceGroupOperationOptions{} +} + +func (o ListByResourceGroupOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListByResourceGroupOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Expand != nil { + out["$expand"] = *o.Expand + } + + return out +} + +// ListByResourceGroup ... +func (c StreamingJobsClient) ListByResourceGroup(ctx context.Context, id commonids.ResourceGroupId, options ListByResourceGroupOperationOptions) (resp ListByResourceGroupOperationResponse, err error) { + req, err := c.preparerForListByResourceGroup(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForListByResourceGroup(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForListByResourceGroup prepares the ListByResourceGroup request. +func (c StreamingJobsClient) preparerForListByResourceGroup(ctx context.Context, id commonids.ResourceGroupId, options ListByResourceGroupOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/providers/Microsoft.StreamAnalytics/streamingJobs", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListByResourceGroupWithNextLink prepares the ListByResourceGroup request with the given nextLink token. +func (c StreamingJobsClient) preparerForListByResourceGroupWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListByResourceGroup handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (c StreamingJobsClient) responderForListByResourceGroup(resp *http.Response) (result ListByResourceGroupOperationResponse, err error) { + type page struct { + Values []StreamingJob `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListByResourceGroupOperationResponse, err error) { + req, err := c.preparerForListByResourceGroupWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListByResourceGroup(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "ListByResourceGroup", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListByResourceGroupComplete retrieves all of the results into a single object +func (c StreamingJobsClient) ListByResourceGroupComplete(ctx context.Context, id commonids.ResourceGroupId, options ListByResourceGroupOperationOptions) (ListByResourceGroupCompleteResult, error) { + return c.ListByResourceGroupCompleteMatchingPredicate(ctx, id, options, StreamingJobOperationPredicate{}) +} + +// ListByResourceGroupCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c StreamingJobsClient) ListByResourceGroupCompleteMatchingPredicate(ctx context.Context, id commonids.ResourceGroupId, options ListByResourceGroupOperationOptions, predicate StreamingJobOperationPredicate) (resp ListByResourceGroupCompleteResult, err error) { + items := make([]StreamingJob, 0) + + page, err := c.ListByResourceGroup(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListByResourceGroupCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_scale_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_scale_autorest.go new file mode 100644 index 000000000000..5a54d0da2f4c --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_scale_autorest.go @@ -0,0 +1,79 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ScaleOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Scale ... +func (c StreamingJobsClient) Scale(ctx context.Context, id StreamingJobId, input ScaleStreamingJobParameters) (result ScaleOperationResponse, err error) { + req, err := c.preparerForScale(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Scale", nil, "Failure preparing request") + return + } + + result, err = c.senderForScale(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Scale", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// ScaleThenPoll performs Scale then polls until it's completed +func (c StreamingJobsClient) ScaleThenPoll(ctx context.Context, id StreamingJobId, input ScaleStreamingJobParameters) error { + result, err := c.Scale(ctx, id, input) + if err != nil { + return fmt.Errorf("performing Scale: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Scale: %+v", err) + } + + return nil +} + +// preparerForScale prepares the Scale request. +func (c StreamingJobsClient) preparerForScale(ctx context.Context, id StreamingJobId, input ScaleStreamingJobParameters) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/scale", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForScale sends the Scale request. The method will close the +// http.Response Body if it receives an error. +func (c StreamingJobsClient) senderForScale(ctx context.Context, req *http.Request) (future ScaleOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_start_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_start_autorest.go new file mode 100644 index 000000000000..eb10a45f0983 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_start_autorest.go @@ -0,0 +1,79 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StartOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Start ... +func (c StreamingJobsClient) Start(ctx context.Context, id StreamingJobId, input StartStreamingJobParameters) (result StartOperationResponse, err error) { + req, err := c.preparerForStart(ctx, id, input) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Start", nil, "Failure preparing request") + return + } + + result, err = c.senderForStart(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Start", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// StartThenPoll performs Start then polls until it's completed +func (c StreamingJobsClient) StartThenPoll(ctx context.Context, id StreamingJobId, input StartStreamingJobParameters) error { + result, err := c.Start(ctx, id, input) + if err != nil { + return fmt.Errorf("performing Start: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Start: %+v", err) + } + + return nil +} + +// preparerForStart prepares the Start request. +func (c StreamingJobsClient) preparerForStart(ctx context.Context, id StreamingJobId, input StartStreamingJobParameters) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/start", id.ID())), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForStart sends the Start request. The method will close the +// http.Response Body if it receives an error. +func (c StreamingJobsClient) senderForStart(ctx context.Context, req *http.Request) (future StartOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_stop_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_stop_autorest.go new file mode 100644 index 000000000000..1203d28307a6 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_stop_autorest.go @@ -0,0 +1,78 @@ +package streamingjobs + +import ( + "context" + "fmt" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/polling" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StopOperationResponse struct { + Poller polling.LongRunningPoller + HttpResponse *http.Response +} + +// Stop ... +func (c StreamingJobsClient) Stop(ctx context.Context, id StreamingJobId) (result StopOperationResponse, err error) { + req, err := c.preparerForStop(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Stop", nil, "Failure preparing request") + return + } + + result, err = c.senderForStop(ctx, req) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Stop", result.HttpResponse, "Failure sending request") + return + } + + return +} + +// StopThenPoll performs Stop then polls until it's completed +func (c StreamingJobsClient) StopThenPoll(ctx context.Context, id StreamingJobId) error { + result, err := c.Stop(ctx, id) + if err != nil { + return fmt.Errorf("performing Stop: %+v", err) + } + + if err := result.Poller.PollUntilDone(); err != nil { + return fmt.Errorf("polling after Stop: %+v", err) + } + + return nil +} + +// preparerForStop prepares the Stop request. +func (c StreamingJobsClient) preparerForStop(ctx context.Context, id StreamingJobId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/stop", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// senderForStop sends the Stop request. The method will close the +// http.Response Body if it receives an error. +func (c StreamingJobsClient) senderForStop(ctx context.Context, req *http.Request) (future StopOperationResponse, err error) { + var resp *http.Response + resp, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + return + } + + future.Poller, err = polling.NewPollerFromResponse(ctx, resp, c.Client, req.Method) + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_update_autorest.go new file mode 100644 index 000000000000..863564082e15 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/method_update_autorest.go @@ -0,0 +1,98 @@ +package streamingjobs + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + HttpResponse *http.Response + Model *StreamingJob +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c StreamingJobsClient) Update(ctx context.Context, id StreamingJobId, input StreamingJob, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Update", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "streamingjobs.StreamingJobsClient", "Update", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForUpdate prepares the Update request. +func (c StreamingJobsClient) preparerForUpdate(ctx context.Context, id StreamingJobId, input StreamingJob, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForUpdate handles the response to the Update request. The method always +// closes the http.Response Body. +func (c StreamingJobsClient) responderForUpdate(resp *http.Response) (result UpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_aggregatefunctionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_aggregatefunctionproperties.go new file mode 100644 index 000000000000..0c646de6aa7e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_aggregatefunctionproperties.go @@ -0,0 +1,42 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionProperties = AggregateFunctionProperties{} + +type AggregateFunctionProperties struct { + + // Fields inherited from FunctionProperties + Etag *string `json:"etag,omitempty"` + Properties *FunctionConfiguration `json:"properties,omitempty"` +} + +var _ json.Marshaler = AggregateFunctionProperties{} + +func (s AggregateFunctionProperties) MarshalJSON() ([]byte, error) { + type wrapper AggregateFunctionProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AggregateFunctionProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AggregateFunctionProperties: %+v", err) + } + decoded["type"] = "Aggregate" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AggregateFunctionProperties: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_avroserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_avroserialization.go new file mode 100644 index 000000000000..2ee800b1446b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_avroserialization.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = AvroSerialization{} + +type AvroSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = AvroSerialization{} + +func (s AvroSerialization) MarshalJSON() ([]byte, error) { + type wrapper AvroSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AvroSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AvroSerialization: %+v", err) + } + decoded["type"] = "Avro" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AvroSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasource.go new file mode 100644 index 000000000000..b4972c83a448 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureDataLakeStoreOutputDataSource{} + +type AzureDataLakeStoreOutputDataSource struct { + Properties *AzureDataLakeStoreOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureDataLakeStoreOutputDataSource{} + +func (s AzureDataLakeStoreOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureDataLakeStoreOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.DataLake/Accounts" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureDataLakeStoreOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasourceproperties.go new file mode 100644 index 000000000000..69069fcc8ada --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuredatalakestoreoutputdatasourceproperties.go @@ -0,0 +1,16 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureDataLakeStoreOutputDataSourceProperties struct { + AccountName *string `json:"accountName,omitempty"` + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + FilePathPrefix *string `json:"filePathPrefix,omitempty"` + RefreshToken *string `json:"refreshToken,omitempty"` + TenantId *string `json:"tenantId,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasource.go new file mode 100644 index 000000000000..659bcd759a45 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureFunctionOutputDataSource{} + +type AzureFunctionOutputDataSource struct { + Properties *AzureFunctionOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureFunctionOutputDataSource{} + +func (s AzureFunctionOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureFunctionOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureFunctionOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureFunctionOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.AzureFunction" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureFunctionOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasourceproperties.go new file mode 100644 index 000000000000..b73fb94709b4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azurefunctionoutputdatasourceproperties.go @@ -0,0 +1,12 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureFunctionOutputDataSourceProperties struct { + ApiKey *string `json:"apiKey,omitempty"` + FunctionAppName *string `json:"functionAppName,omitempty"` + FunctionName *string `json:"functionName,omitempty"` + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + MaxBatchSize *float64 `json:"maxBatchSize,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbinding.go new file mode 100644 index 000000000000..0da4fea551ec --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbinding.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionBinding = AzureMachineLearningWebServiceFunctionBinding{} + +type AzureMachineLearningWebServiceFunctionBinding struct { + Properties *AzureMachineLearningWebServiceFunctionBindingProperties `json:"properties,omitempty"` + + // Fields inherited from FunctionBinding +} + +var _ json.Marshaler = AzureMachineLearningWebServiceFunctionBinding{} + +func (s AzureMachineLearningWebServiceFunctionBinding) MarshalJSON() ([]byte, error) { + type wrapper AzureMachineLearningWebServiceFunctionBinding + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + decoded["type"] = "Microsoft.MachineLearning/WebService" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbindingproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbindingproperties.go new file mode 100644 index 000000000000..8d05b266f686 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebservicefunctionbindingproperties.go @@ -0,0 +1,12 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceFunctionBindingProperties struct { + ApiKey *string `json:"apiKey,omitempty"` + BatchSize *int64 `json:"batchSize,omitempty"` + Endpoint *string `json:"endpoint,omitempty"` + Inputs *AzureMachineLearningWebServiceInputs `json:"inputs,omitempty"` + Outputs *[]AzureMachineLearningWebServiceOutputColumn `json:"outputs,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputcolumn.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputcolumn.go new file mode 100644 index 000000000000..130e0ba9fe67 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputcolumn.go @@ -0,0 +1,10 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceInputColumn struct { + DataType *string `json:"dataType,omitempty"` + MapTo *int64 `json:"mapTo,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputs.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputs.go new file mode 100644 index 000000000000..9cd1b57859cc --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceinputs.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceInputs struct { + ColumnNames *[]AzureMachineLearningWebServiceInputColumn `json:"columnNames,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceoutputcolumn.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceoutputcolumn.go new file mode 100644 index 000000000000..979ce7de4bd0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuremachinelearningwebserviceoutputcolumn.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureMachineLearningWebServiceOutputColumn struct { + DataType *string `json:"dataType,omitempty"` + Name *string `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabasedatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabasedatasourceproperties.go new file mode 100644 index 000000000000..3b228484ce1a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabasedatasourceproperties.go @@ -0,0 +1,15 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSqlDatabaseDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Database *string `json:"database,omitempty"` + MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` + MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` + Password *string `json:"password,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabaseoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabaseoutputdatasource.go new file mode 100644 index 000000000000..586193d027a1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqldatabaseoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureSqlDatabaseOutputDataSource{} + +type AzureSqlDatabaseOutputDataSource struct { + Properties *AzureSqlDatabaseDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureSqlDatabaseOutputDataSource{} + +func (s AzureSqlDatabaseOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSqlDatabaseOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/Database" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSqlDatabaseOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasource.go new file mode 100644 index 000000000000..e0ced0b72072 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = AzureSqlReferenceInputDataSource{} + +type AzureSqlReferenceInputDataSource struct { + Properties *AzureSqlReferenceInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = AzureSqlReferenceInputDataSource{} + +func (s AzureSqlReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSqlReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSqlReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSqlReferenceInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/Database" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSqlReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasourceproperties.go new file mode 100644 index 000000000000..37159e0dfd5b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresqlreferenceinputdatasourceproperties.go @@ -0,0 +1,16 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSqlReferenceInputDataSourceProperties struct { + Database *string `json:"database,omitempty"` + DeltaSnapshotQuery *string `json:"deltaSnapshotQuery,omitempty"` + FullSnapshotQuery *string `json:"fullSnapshotQuery,omitempty"` + Password *string `json:"password,omitempty"` + RefreshRate *string `json:"refreshRate,omitempty"` + RefreshType *RefreshType `json:"refreshType,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapsedatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapsedatasourceproperties.go new file mode 100644 index 000000000000..ca545cbf783b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapsedatasourceproperties.go @@ -0,0 +1,12 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureSynapseDataSourceProperties struct { + Database *string `json:"database,omitempty"` + Password *string `json:"password,omitempty"` + Server *string `json:"server,omitempty"` + Table *string `json:"table,omitempty"` + User *string `json:"user,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapseoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapseoutputdatasource.go new file mode 100644 index 000000000000..903fb2032157 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuresynapseoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureSynapseOutputDataSource{} + +type AzureSynapseOutputDataSource struct { + Properties *AzureSynapseDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureSynapseOutputDataSource{} + +func (s AzureSynapseOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureSynapseOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureSynapseOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureSynapseOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Sql/Server/DataWarehouse" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureSynapseOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasource.go new file mode 100644 index 000000000000..6fbde873ddcc --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = AzureTableOutputDataSource{} + +type AzureTableOutputDataSource struct { + Properties *AzureTableOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = AzureTableOutputDataSource{} + +func (s AzureTableOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper AzureTableOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureTableOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureTableOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Table" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureTableOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasourceproperties.go new file mode 100644 index 000000000000..38aa77d9be24 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_azuretableoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type AzureTableOutputDataSourceProperties struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` + BatchSize *int64 `json:"batchSize,omitempty"` + ColumnsToRemove *[]string `json:"columnsToRemove,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + RowKey *string `json:"rowKey,omitempty"` + Table *string `json:"table,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobdatasourceproperties.go new file mode 100644 index 000000000000..554f05675623 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobdatasourceproperties.go @@ -0,0 +1,13 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasource.go new file mode 100644 index 000000000000..97864687f916 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = BlobOutputDataSource{} + +type BlobOutputDataSource struct { + Properties *BlobOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = BlobOutputDataSource{} + +func (s BlobOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasourceproperties.go new file mode 100644 index 000000000000..7de605abf421 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_bloboutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + BlobPathPrefix *string `json:"blobPathPrefix,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobreferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobreferenceinputdatasource.go new file mode 100644 index 000000000000..24afd52e61fa --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobreferenceinputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = BlobReferenceInputDataSource{} + +type BlobReferenceInputDataSource struct { + Properties *BlobDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = BlobReferenceInputDataSource{} + +func (s BlobReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobReferenceInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasource.go new file mode 100644 index 000000000000..38e280e54b6e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = BlobStreamInputDataSource{} + +type BlobStreamInputDataSource struct { + Properties *BlobStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = BlobStreamInputDataSource{} + +func (s BlobStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper BlobStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling BlobStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling BlobStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/Blob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling BlobStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..ca1b3b4a7156 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_blobstreaminputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type BlobStreamInputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Container *string `json:"container,omitempty"` + DateFormat *string `json:"dateFormat,omitempty"` + PathPattern *string `json:"pathPattern,omitempty"` + SourcePartitionCount *int64 `json:"sourcePartitionCount,omitempty"` + StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` + TimeFormat *string `json:"timeFormat,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_clusterinfo.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_clusterinfo.go new file mode 100644 index 000000000000..cc5d388f2f09 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_clusterinfo.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ClusterInfo struct { + Id *string `json:"id,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_compression.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_compression.go new file mode 100644 index 000000000000..e39a8efc2c6a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_compression.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Compression struct { + Type CompressionType `json:"type"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserialization.go new file mode 100644 index 000000000000..ca3eacfc0714 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserialization.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = CsvSerialization{} + +type CsvSerialization struct { + Properties *CsvSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = CsvSerialization{} + +func (s CsvSerialization) MarshalJSON() ([]byte, error) { + type wrapper CsvSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling CsvSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling CsvSerialization: %+v", err) + } + decoded["type"] = "Csv" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling CsvSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserializationproperties.go new file mode 100644 index 000000000000..af4352dafd9a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_csvserializationproperties.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CsvSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + FieldDelimiter *string `json:"fieldDelimiter,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnosticcondition.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnosticcondition.go new file mode 100644 index 000000000000..0604396868e4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnosticcondition.go @@ -0,0 +1,10 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DiagnosticCondition struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` + Since *string `json:"since,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnostics.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnostics.go new file mode 100644 index 000000000000..aca09fcef944 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_diagnostics.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Diagnostics struct { + Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasource.go new file mode 100644 index 000000000000..15079a002be4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = DocumentDbOutputDataSource{} + +type DocumentDbOutputDataSource struct { + Properties *DocumentDbOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = DocumentDbOutputDataSource{} + +func (s DocumentDbOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper DocumentDbOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling DocumentDbOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling DocumentDbOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Storage/DocumentDB" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling DocumentDbOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasourceproperties.go new file mode 100644 index 000000000000..627ffac184b3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_documentdboutputdatasourceproperties.go @@ -0,0 +1,13 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DocumentDbOutputDataSourceProperties struct { + AccountId *string `json:"accountId,omitempty"` + AccountKey *string `json:"accountKey,omitempty"` + CollectionNamePattern *string `json:"collectionNamePattern,omitempty"` + Database *string `json:"database,omitempty"` + DocumentId *string `json:"documentId,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasource.go new file mode 100644 index 000000000000..291857248825 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = EventHubOutputDataSource{} + +type EventHubOutputDataSource struct { + Properties *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = EventHubOutputDataSource{} + +func (s EventHubOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasourceproperties.go new file mode 100644 index 000000000000..356546748867 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhuboutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type EventHubOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + EventHubName *string `json:"eventHubName,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasource.go new file mode 100644 index 000000000000..cacc6f7ee788 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = EventHubStreamInputDataSource{} + +type EventHubStreamInputDataSource struct { + Properties *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = EventHubStreamInputDataSource{} + +func (s EventHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..c797e4ef47be --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubstreaminputdatasourceproperties.go @@ -0,0 +1,13 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type EventHubStreamInputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + EventHubName *string `json:"eventHubName,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2outputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2outputdatasource.go new file mode 100644 index 000000000000..eb9c2affe6ea --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2outputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = EventHubV2OutputDataSource{} + +type EventHubV2OutputDataSource struct { + Properties *EventHubOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = EventHubV2OutputDataSource{} + +func (s EventHubV2OutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubV2OutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubV2OutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubV2OutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.EventHub/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubV2OutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2streaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2streaminputdatasource.go new file mode 100644 index 000000000000..9236cbf7ed4f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_eventhubv2streaminputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = EventHubV2StreamInputDataSource{} + +type EventHubV2StreamInputDataSource struct { + Properties *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = EventHubV2StreamInputDataSource{} + +func (s EventHubV2StreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper EventHubV2StreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling EventHubV2StreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling EventHubV2StreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.EventHub/EventHub" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling EventHubV2StreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasource.go new file mode 100644 index 000000000000..1d09bf0cc6f0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ ReferenceInputDataSource = FileReferenceInputDataSource{} + +type FileReferenceInputDataSource struct { + Properties *FileReferenceInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from ReferenceInputDataSource +} + +var _ json.Marshaler = FileReferenceInputDataSource{} + +func (s FileReferenceInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper FileReferenceInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling FileReferenceInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling FileReferenceInputDataSource: %+v", err) + } + decoded["type"] = "File" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling FileReferenceInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasourceproperties.go new file mode 100644 index 000000000000..610e6a0d1183 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_filereferenceinputdatasourceproperties.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FileReferenceInputDataSourceProperties struct { + Path *string `json:"path,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_function.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_function.go new file mode 100644 index 000000000000..c7a271194d06 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_function.go @@ -0,0 +1,44 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Function struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties FunctionProperties `json:"properties"` + Type *string `json:"type,omitempty"` +} + +var _ json.Unmarshaler = &Function{} + +func (s *Function) UnmarshalJSON(bytes []byte) error { + type alias Function + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into Function: %+v", err) + } + + s.Id = decoded.Id + s.Name = decoded.Name + s.Type = decoded.Type + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling Function into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["properties"]; ok { + impl, err := unmarshalFunctionPropertiesImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Properties' for 'Function': %+v", err) + } + s.Properties = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionbinding.go new file mode 100644 index 000000000000..5574ecac8bbe --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionbinding.go @@ -0,0 +1,56 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionBinding interface { +} + +func unmarshalFunctionBindingImplementation(input []byte) (FunctionBinding, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling FunctionBinding into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.MachineLearning/WebService") { + var out AzureMachineLearningWebServiceFunctionBinding + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureMachineLearningWebServiceFunctionBinding: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.StreamAnalytics/JavascriptUdf") { + var out JavaScriptFunctionBinding + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JavaScriptFunctionBinding: %+v", err) + } + return out, nil + } + + type RawFunctionBindingImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawFunctionBindingImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionconfiguration.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionconfiguration.go new file mode 100644 index 000000000000..5216fe6262ab --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionconfiguration.go @@ -0,0 +1,42 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionConfiguration struct { + Binding FunctionBinding `json:"binding"` + Inputs *[]FunctionInput `json:"inputs,omitempty"` + Output *FunctionOutput `json:"output,omitempty"` +} + +var _ json.Unmarshaler = &FunctionConfiguration{} + +func (s *FunctionConfiguration) UnmarshalJSON(bytes []byte) error { + type alias FunctionConfiguration + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into FunctionConfiguration: %+v", err) + } + + s.Inputs = decoded.Inputs + s.Output = decoded.Output + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling FunctionConfiguration into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["binding"]; ok { + impl, err := unmarshalFunctionBindingImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Binding' for 'FunctionConfiguration': %+v", err) + } + s.Binding = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functioninput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functioninput.go new file mode 100644 index 000000000000..33c784c6688c --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functioninput.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionInput struct { + DataType *string `json:"dataType,omitempty"` + IsConfigurationParameter *bool `json:"isConfigurationParameter,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionoutput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionoutput.go new file mode 100644 index 000000000000..b61c0863dfbf --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionoutput.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionOutput struct { + DataType *string `json:"dataType,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionproperties.go new file mode 100644 index 000000000000..548b85141fe5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_functionproperties.go @@ -0,0 +1,56 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type FunctionProperties interface { +} + +func unmarshalFunctionPropertiesImplementation(input []byte) (FunctionProperties, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling FunctionProperties into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Aggregate") { + var out AggregateFunctionProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AggregateFunctionProperties: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Scalar") { + var out ScalarFunctionProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ScalarFunctionProperties: %+v", err) + } + return out, nil + } + + type RawFunctionPropertiesImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawFunctionPropertiesImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusoutputdatasource.go new file mode 100644 index 000000000000..9f157cca9317 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = GatewayMessageBusOutputDataSource{} + +type GatewayMessageBusOutputDataSource struct { + Properties *GatewayMessageBusSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = GatewayMessageBusOutputDataSource{} + +func (s GatewayMessageBusOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper GatewayMessageBusOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling GatewayMessageBusOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling GatewayMessageBusOutputDataSource: %+v", err) + } + decoded["type"] = "GatewayMessageBus" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling GatewayMessageBusOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebussourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebussourceproperties.go new file mode 100644 index 000000000000..8b2e278dc588 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebussourceproperties.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GatewayMessageBusSourceProperties struct { + Topic *string `json:"topic,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusstreaminputdatasource.go new file mode 100644 index 000000000000..5f9bcf69fcae --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_gatewaymessagebusstreaminputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = GatewayMessageBusStreamInputDataSource{} + +type GatewayMessageBusStreamInputDataSource struct { + Properties *GatewayMessageBusSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = GatewayMessageBusStreamInputDataSource{} + +func (s GatewayMessageBusStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper GatewayMessageBusStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + decoded["type"] = "GatewayMessageBus" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling GatewayMessageBusStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_identity.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_identity.go new file mode 100644 index 000000000000..aedf5b659e94 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_identity.go @@ -0,0 +1,10 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Identity struct { + PrincipalId *string `json:"principalId,omitempty"` + TenantId *string `json:"tenantId,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_input.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_input.go new file mode 100644 index 000000000000..a68319e701f1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_input.go @@ -0,0 +1,44 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Input struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties InputProperties `json:"properties"` + Type *string `json:"type,omitempty"` +} + +var _ json.Unmarshaler = &Input{} + +func (s *Input) UnmarshalJSON(bytes []byte) error { + type alias Input + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into Input: %+v", err) + } + + s.Id = decoded.Id + s.Name = decoded.Name + s.Type = decoded.Type + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling Input into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["properties"]; ok { + impl, err := unmarshalInputPropertiesImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Properties' for 'Input': %+v", err) + } + s.Properties = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_inputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_inputproperties.go new file mode 100644 index 000000000000..85734d0cdd01 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_inputproperties.go @@ -0,0 +1,56 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type InputProperties interface { +} + +func unmarshalInputPropertiesImplementation(input []byte) (InputProperties, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling InputProperties into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Reference") { + var out ReferenceInputProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ReferenceInputProperties: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Stream") { + var out StreamInputProperties + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into StreamInputProperties: %+v", err) + } + return out, nil + } + + type RawInputPropertiesImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawInputPropertiesImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasource.go new file mode 100644 index 000000000000..dd9c76103f20 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ StreamInputDataSource = IoTHubStreamInputDataSource{} + +type IoTHubStreamInputDataSource struct { + Properties *IoTHubStreamInputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from StreamInputDataSource +} + +var _ json.Marshaler = IoTHubStreamInputDataSource{} + +func (s IoTHubStreamInputDataSource) MarshalJSON() ([]byte, error) { + type wrapper IoTHubStreamInputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling IoTHubStreamInputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling IoTHubStreamInputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.Devices/IotHubs" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling IoTHubStreamInputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasourceproperties.go new file mode 100644 index 000000000000..cbf5a58df7a8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_iothubstreaminputdatasourceproperties.go @@ -0,0 +1,12 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type IoTHubStreamInputDataSourceProperties struct { + ConsumerGroupName *string `json:"consumerGroupName,omitempty"` + Endpoint *string `json:"endpoint,omitempty"` + IotHubNamespace *string `json:"iotHubNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbinding.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbinding.go new file mode 100644 index 000000000000..72a1c06fd0a0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbinding.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionBinding = JavaScriptFunctionBinding{} + +type JavaScriptFunctionBinding struct { + Properties *JavaScriptFunctionBindingProperties `json:"properties,omitempty"` + + // Fields inherited from FunctionBinding +} + +var _ json.Marshaler = JavaScriptFunctionBinding{} + +func (s JavaScriptFunctionBinding) MarshalJSON() ([]byte, error) { + type wrapper JavaScriptFunctionBinding + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JavaScriptFunctionBinding: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JavaScriptFunctionBinding: %+v", err) + } + decoded["type"] = "Microsoft.StreamAnalytics/JavascriptUdf" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JavaScriptFunctionBinding: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbindingproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbindingproperties.go new file mode 100644 index 000000000000..e6188434e02a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_javascriptfunctionbindingproperties.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JavaScriptFunctionBindingProperties struct { + Script *string `json:"script,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jobstorageaccount.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jobstorageaccount.go new file mode 100644 index 000000000000..79335cf0ed88 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jobstorageaccount.go @@ -0,0 +1,10 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JobStorageAccount struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserialization.go new file mode 100644 index 000000000000..e1e25c3058e8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserialization.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = JsonSerialization{} + +type JsonSerialization struct { + Properties *JsonSerializationProperties `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = JsonSerialization{} + +func (s JsonSerialization) MarshalJSON() ([]byte, error) { + type wrapper JsonSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling JsonSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling JsonSerialization: %+v", err) + } + decoded["type"] = "Json" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling JsonSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserializationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserializationproperties.go new file mode 100644 index 000000000000..ee01fd28fd05 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_jsonserializationproperties.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type JsonSerializationProperties struct { + Encoding *Encoding `json:"encoding,omitempty"` + Format *JsonOutputSerializationFormat `json:"format,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_output.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_output.go new file mode 100644 index 000000000000..bb53d4ccbafb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_output.go @@ -0,0 +1,11 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Output struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties *OutputProperties `json:"properties,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputdatasource.go new file mode 100644 index 000000000000..dae4cb7ae29a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputdatasource.go @@ -0,0 +1,144 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type OutputDataSource interface { +} + +func unmarshalOutputDataSourceImplementation(input []byte) (OutputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling OutputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.DataLake/Accounts") { + var out AzureDataLakeStoreOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureDataLakeStoreOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.AzureFunction") { + var out AzureFunctionOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureFunctionOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/Database") { + var out AzureSqlDatabaseOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSqlDatabaseOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/DataWarehouse") { + var out AzureSynapseOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSynapseOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Table") { + var out AzureTableOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureTableOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/DocumentDB") { + var out DocumentDbOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into DocumentDbOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/EventHub") { + var out EventHubOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.EventHub/EventHub") { + var out EventHubV2OutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubV2OutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "GatewayMessageBus") { + var out GatewayMessageBusOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into GatewayMessageBusOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "PowerBI") { + var out PowerBIOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into PowerBIOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/Queue") { + var out ServiceBusQueueOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServiceBusQueueOutputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/Topic") { + var out ServiceBusTopicOutputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServiceBusTopicOutputDataSource: %+v", err) + } + return out, nil + } + + type RawOutputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawOutputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputproperties.go new file mode 100644 index 000000000000..a979ead0a94d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_outputproperties.go @@ -0,0 +1,55 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type OutputProperties struct { + Datasource OutputDataSource `json:"datasource"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + Serialization Serialization `json:"serialization"` + SizeWindow *float64 `json:"sizeWindow,omitempty"` + TimeWindow *string `json:"timeWindow,omitempty"` +} + +var _ json.Unmarshaler = &OutputProperties{} + +func (s *OutputProperties) UnmarshalJSON(bytes []byte) error { + type alias OutputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into OutputProperties: %+v", err) + } + + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.SizeWindow = decoded.SizeWindow + s.TimeWindow = decoded.TimeWindow + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling OutputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalOutputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'OutputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'OutputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_parquetserialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_parquetserialization.go new file mode 100644 index 000000000000..2e44cd8b9095 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_parquetserialization.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Serialization = ParquetSerialization{} + +type ParquetSerialization struct { + Properties *interface{} `json:"properties,omitempty"` + + // Fields inherited from Serialization +} + +var _ json.Marshaler = ParquetSerialization{} + +func (s ParquetSerialization) MarshalJSON() ([]byte, error) { + type wrapper ParquetSerialization + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ParquetSerialization: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ParquetSerialization: %+v", err) + } + decoded["type"] = "Parquet" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ParquetSerialization: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasource.go new file mode 100644 index 000000000000..2a64ef14547a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = PowerBIOutputDataSource{} + +type PowerBIOutputDataSource struct { + Properties *PowerBIOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = PowerBIOutputDataSource{} + +func (s PowerBIOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper PowerBIOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling PowerBIOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling PowerBIOutputDataSource: %+v", err) + } + decoded["type"] = "PowerBI" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling PowerBIOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasourceproperties.go new file mode 100644 index 000000000000..88be280fca5a --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_powerbioutputdatasourceproperties.go @@ -0,0 +1,15 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type PowerBIOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + Dataset *string `json:"dataset,omitempty"` + GroupId *string `json:"groupId,omitempty"` + GroupName *string `json:"groupName,omitempty"` + RefreshToken *string `json:"refreshToken,omitempty"` + Table *string `json:"table,omitempty"` + TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` + TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputdatasource.go new file mode 100644 index 000000000000..bf17be7eddb1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputdatasource.go @@ -0,0 +1,64 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ReferenceInputDataSource interface { +} + +func unmarshalReferenceInputDataSourceImplementation(input []byte) (ReferenceInputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling ReferenceInputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.Sql/Server/Database") { + var out AzureSqlReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureSqlReferenceInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobReferenceInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "File") { + var out FileReferenceInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into FileReferenceInputDataSource: %+v", err) + } + return out, nil + } + + type RawReferenceInputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawReferenceInputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputproperties.go new file mode 100644 index 000000000000..97fac7b527cc --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_referenceinputproperties.go @@ -0,0 +1,83 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ InputProperties = ReferenceInputProperties{} + +type ReferenceInputProperties struct { + Datasource ReferenceInputDataSource `json:"datasource"` + + // Fields inherited from InputProperties + Compression *Compression `json:"compression,omitempty"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + Serialization Serialization `json:"serialization"` +} + +var _ json.Marshaler = ReferenceInputProperties{} + +func (s ReferenceInputProperties) MarshalJSON() ([]byte, error) { + type wrapper ReferenceInputProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ReferenceInputProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ReferenceInputProperties: %+v", err) + } + decoded["type"] = "Reference" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ReferenceInputProperties: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &ReferenceInputProperties{} + +func (s *ReferenceInputProperties) UnmarshalJSON(bytes []byte) error { + type alias ReferenceInputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into ReferenceInputProperties: %+v", err) + } + + s.Compression = decoded.Compression + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.PartitionKey = decoded.PartitionKey + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling ReferenceInputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalReferenceInputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'ReferenceInputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'ReferenceInputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalarfunctionproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalarfunctionproperties.go new file mode 100644 index 000000000000..5804ccf9bcda --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalarfunctionproperties.go @@ -0,0 +1,42 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ FunctionProperties = ScalarFunctionProperties{} + +type ScalarFunctionProperties struct { + + // Fields inherited from FunctionProperties + Etag *string `json:"etag,omitempty"` + Properties *FunctionConfiguration `json:"properties,omitempty"` +} + +var _ json.Marshaler = ScalarFunctionProperties{} + +func (s ScalarFunctionProperties) MarshalJSON() ([]byte, error) { + type wrapper ScalarFunctionProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ScalarFunctionProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ScalarFunctionProperties: %+v", err) + } + decoded["type"] = "Scalar" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ScalarFunctionProperties: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalestreamingjobparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalestreamingjobparameters.go new file mode 100644 index 000000000000..f90851d7a4ea --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_scalestreamingjobparameters.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ScaleStreamingJobParameters struct { + StreamingUnits *int64 `json:"streamingUnits,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_serialization.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_serialization.go new file mode 100644 index 000000000000..bc423e8ce0ed --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_serialization.go @@ -0,0 +1,72 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Serialization interface { +} + +func unmarshalSerializationImplementation(input []byte) (Serialization, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling Serialization into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Avro") { + var out AvroSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AvroSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Csv") { + var out CsvSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into CsvSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Json") { + var out JsonSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into JsonSerialization: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Parquet") { + var out ParquetSerialization + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ParquetSerialization: %+v", err) + } + return out, nil + } + + type RawSerializationImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawSerializationImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasource.go new file mode 100644 index 000000000000..790e58f461bf --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = ServiceBusQueueOutputDataSource{} + +type ServiceBusQueueOutputDataSource struct { + Properties *ServiceBusQueueOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = ServiceBusQueueOutputDataSource{} + +func (s ServiceBusQueueOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper ServiceBusQueueOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServiceBusQueueOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServiceBusQueueOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/Queue" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServiceBusQueueOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasourceproperties.go new file mode 100644 index 000000000000..6043b3ac5144 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebusqueueoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ServiceBusQueueOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + QueueName *string `json:"queueName,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + SystemPropertyColumns *interface{} `json:"systemPropertyColumns,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasource.go new file mode 100644 index 000000000000..57f1938b5ee1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasource.go @@ -0,0 +1,41 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ OutputDataSource = ServiceBusTopicOutputDataSource{} + +type ServiceBusTopicOutputDataSource struct { + Properties *ServiceBusTopicOutputDataSourceProperties `json:"properties,omitempty"` + + // Fields inherited from OutputDataSource +} + +var _ json.Marshaler = ServiceBusTopicOutputDataSource{} + +func (s ServiceBusTopicOutputDataSource) MarshalJSON() ([]byte, error) { + type wrapper ServiceBusTopicOutputDataSource + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServiceBusTopicOutputDataSource: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServiceBusTopicOutputDataSource: %+v", err) + } + decoded["type"] = "Microsoft.ServiceBus/Topic" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServiceBusTopicOutputDataSource: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasourceproperties.go new file mode 100644 index 000000000000..aa19027e00f1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_servicebustopicoutputdatasourceproperties.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ServiceBusTopicOutputDataSourceProperties struct { + AuthenticationMode *AuthenticationMode `json:"authenticationMode,omitempty"` + PropertyColumns *[]string `json:"propertyColumns,omitempty"` + ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` + SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` + SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` + SystemPropertyColumns *map[string]string `json:"systemPropertyColumns,omitempty"` + TopicName *string `json:"topicName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_sku.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_sku.go new file mode 100644 index 000000000000..596a07156374 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_sku.go @@ -0,0 +1,8 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Sku struct { + Name *SkuName `json:"name,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_startstreamingjobparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_startstreamingjobparameters.go new file mode 100644 index 000000000000..d16eab920d25 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_startstreamingjobparameters.go @@ -0,0 +1,27 @@ +package streamingjobs + +import ( + "time" + + "github.com/hashicorp/go-azure-helpers/lang/dates" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StartStreamingJobParameters struct { + OutputStartMode *OutputStartMode `json:"outputStartMode,omitempty"` + OutputStartTime *string `json:"outputStartTime,omitempty"` +} + +func (o *StartStreamingJobParameters) GetOutputStartTimeAsTime() (*time.Time, error) { + if o.OutputStartTime == nil { + return nil, nil + } + return dates.ParseAsFormat(o.OutputStartTime, "2006-01-02T15:04:05Z07:00") +} + +func (o *StartStreamingJobParameters) SetOutputStartTimeAsTime(input time.Time) { + formatted := input.Format("2006-01-02T15:04:05Z07:00") + o.OutputStartTime = &formatted +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_storageaccount.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_storageaccount.go new file mode 100644 index 000000000000..62276225e2c0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_storageaccount.go @@ -0,0 +1,9 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StorageAccount struct { + AccountKey *string `json:"accountKey,omitempty"` + AccountName *string `json:"accountName,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjob.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjob.go new file mode 100644 index 000000000000..ac7a90da7728 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjob.go @@ -0,0 +1,14 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StreamingJob struct { + Id *string `json:"id,omitempty"` + Identity *Identity `json:"identity,omitempty"` + Location *string `json:"location,omitempty"` + Name *string `json:"name,omitempty"` + Properties *StreamingJobProperties `json:"properties,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjobproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjobproperties.go new file mode 100644 index 000000000000..ad27925424c7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streamingjobproperties.go @@ -0,0 +1,72 @@ +package streamingjobs + +import ( + "time" + + "github.com/hashicorp/go-azure-helpers/lang/dates" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StreamingJobProperties struct { + Cluster *ClusterInfo `json:"cluster,omitempty"` + CompatibilityLevel *CompatibilityLevel `json:"compatibilityLevel,omitempty"` + ContentStoragePolicy *ContentStoragePolicy `json:"contentStoragePolicy,omitempty"` + CreatedDate *string `json:"createdDate,omitempty"` + DataLocale *string `json:"dataLocale,omitempty"` + Etag *string `json:"etag,omitempty"` + EventsLateArrivalMaxDelayInSeconds *int64 `json:"eventsLateArrivalMaxDelayInSeconds,omitempty"` + EventsOutOfOrderMaxDelayInSeconds *int64 `json:"eventsOutOfOrderMaxDelayInSeconds,omitempty"` + EventsOutOfOrderPolicy *EventsOutOfOrderPolicy `json:"eventsOutOfOrderPolicy,omitempty"` + Functions *[]Function `json:"functions,omitempty"` + Inputs *[]Input `json:"inputs,omitempty"` + JobId *string `json:"jobId,omitempty"` + JobState *string `json:"jobState,omitempty"` + JobStorageAccount *JobStorageAccount `json:"jobStorageAccount,omitempty"` + JobType *JobType `json:"jobType,omitempty"` + LastOutputEventTime *string `json:"lastOutputEventTime,omitempty"` + OutputErrorPolicy *OutputErrorPolicy `json:"outputErrorPolicy,omitempty"` + OutputStartMode *OutputStartMode `json:"outputStartMode,omitempty"` + OutputStartTime *string `json:"outputStartTime,omitempty"` + Outputs *[]Output `json:"outputs,omitempty"` + ProvisioningState *string `json:"provisioningState,omitempty"` + Sku *Sku `json:"sku,omitempty"` + Transformation *Transformation `json:"transformation,omitempty"` +} + +func (o *StreamingJobProperties) GetCreatedDateAsTime() (*time.Time, error) { + if o.CreatedDate == nil { + return nil, nil + } + return dates.ParseAsFormat(o.CreatedDate, "2006-01-02T15:04:05Z07:00") +} + +func (o *StreamingJobProperties) SetCreatedDateAsTime(input time.Time) { + formatted := input.Format("2006-01-02T15:04:05Z07:00") + o.CreatedDate = &formatted +} + +func (o *StreamingJobProperties) GetLastOutputEventTimeAsTime() (*time.Time, error) { + if o.LastOutputEventTime == nil { + return nil, nil + } + return dates.ParseAsFormat(o.LastOutputEventTime, "2006-01-02T15:04:05Z07:00") +} + +func (o *StreamingJobProperties) SetLastOutputEventTimeAsTime(input time.Time) { + formatted := input.Format("2006-01-02T15:04:05Z07:00") + o.LastOutputEventTime = &formatted +} + +func (o *StreamingJobProperties) GetOutputStartTimeAsTime() (*time.Time, error) { + if o.OutputStartTime == nil { + return nil, nil + } + return dates.ParseAsFormat(o.OutputStartTime, "2006-01-02T15:04:05Z07:00") +} + +func (o *StreamingJobProperties) SetOutputStartTimeAsTime(input time.Time) { + formatted := input.Format("2006-01-02T15:04:05Z07:00") + o.OutputStartTime = &formatted +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputdatasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputdatasource.go new file mode 100644 index 000000000000..ea8260944bde --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputdatasource.go @@ -0,0 +1,80 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type StreamInputDataSource interface { +} + +func unmarshalStreamInputDataSourceImplementation(input []byte) (StreamInputDataSource, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling StreamInputDataSource into map[string]interface: %+v", err) + } + + value, ok := temp["type"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "Microsoft.Storage/Blob") { + var out BlobStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into BlobStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.ServiceBus/EventHub") { + var out EventHubStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.EventHub/EventHub") { + var out EventHubV2StreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into EventHubV2StreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "GatewayMessageBus") { + var out GatewayMessageBusStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into GatewayMessageBusStreamInputDataSource: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Microsoft.Devices/IotHubs") { + var out IoTHubStreamInputDataSource + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into IoTHubStreamInputDataSource: %+v", err) + } + return out, nil + } + + type RawStreamInputDataSourceImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawStreamInputDataSourceImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputproperties.go new file mode 100644 index 000000000000..5b50b35b40ba --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_streaminputproperties.go @@ -0,0 +1,83 @@ +package streamingjobs + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ InputProperties = StreamInputProperties{} + +type StreamInputProperties struct { + Datasource StreamInputDataSource `json:"datasource"` + + // Fields inherited from InputProperties + Compression *Compression `json:"compression,omitempty"` + Diagnostics *Diagnostics `json:"diagnostics,omitempty"` + Etag *string `json:"etag,omitempty"` + PartitionKey *string `json:"partitionKey,omitempty"` + Serialization Serialization `json:"serialization"` +} + +var _ json.Marshaler = StreamInputProperties{} + +func (s StreamInputProperties) MarshalJSON() ([]byte, error) { + type wrapper StreamInputProperties + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling StreamInputProperties: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling StreamInputProperties: %+v", err) + } + decoded["type"] = "Stream" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling StreamInputProperties: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &StreamInputProperties{} + +func (s *StreamInputProperties) UnmarshalJSON(bytes []byte) error { + type alias StreamInputProperties + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into StreamInputProperties: %+v", err) + } + + s.Compression = decoded.Compression + s.Diagnostics = decoded.Diagnostics + s.Etag = decoded.Etag + s.PartitionKey = decoded.PartitionKey + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling StreamInputProperties into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["datasource"]; ok { + impl, err := unmarshalStreamInputDataSourceImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Datasource' for 'StreamInputProperties': %+v", err) + } + s.Datasource = impl + } + + if v, ok := temp["serialization"]; ok { + impl, err := unmarshalSerializationImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Serialization' for 'StreamInputProperties': %+v", err) + } + s.Serialization = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformation.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformation.go new file mode 100644 index 000000000000..9f12c85252fa --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformation.go @@ -0,0 +1,11 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Transformation struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties *TransformationProperties `json:"properties,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformationproperties.go new file mode 100644 index 000000000000..383af40f4687 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/model_transformationproperties.go @@ -0,0 +1,11 @@ +package streamingjobs + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TransformationProperties struct { + Etag *string `json:"etag,omitempty"` + Query *string `json:"query,omitempty"` + StreamingUnits *int64 `json:"streamingUnits,omitempty"` + ValidStreamingUnits *[]int64 `json:"validStreamingUnits,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/predicates.go new file mode 100644 index 000000000000..1e49cca3b1dd --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/predicates.go @@ -0,0 +1,29 @@ +package streamingjobs + +type StreamingJobOperationPredicate struct { + Id *string + Location *string + Name *string + Type *string +} + +func (p StreamingJobOperationPredicate) Matches(input StreamingJob) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Location != nil && (input.Location == nil && *p.Location != *input.Location) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/version.go new file mode 100644 index 000000000000..2fcd3509c281 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs/version.go @@ -0,0 +1,12 @@ +package streamingjobs + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/streamingjobs/%s", defaultApiVersion) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/README.md new file mode 100644 index 000000000000..0e01725a5f72 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/README.md @@ -0,0 +1,78 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations` Documentation + +The `transformations` SDK allows for interaction with the Azure Resource Manager Service `streamanalytics` (API Version `2020-03-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations" +``` + + +### Client Initialization + +```go +client := transformations.NewTransformationsClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `TransformationsClient.CreateOrReplace` + +```go +ctx := context.TODO() +id := transformations.NewTransformationID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "transformationValue") + +payload := transformations.Transformation{ + // ... +} + + +read, err := client.CreateOrReplace(ctx, id, payload, transformations.DefaultCreateOrReplaceOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `TransformationsClient.Get` + +```go +ctx := context.TODO() +id := transformations.NewTransformationID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "transformationValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `TransformationsClient.Update` + +```go +ctx := context.TODO() +id := transformations.NewTransformationID("12345678-1234-9876-4563-123456789012", "example-resource-group", "jobValue", "transformationValue") + +payload := transformations.Transformation{ + // ... +} + + +read, err := client.Update(ctx, id, payload, transformations.DefaultUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/client.go new file mode 100644 index 000000000000..c2c425400f1f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/client.go @@ -0,0 +1,18 @@ +package transformations + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TransformationsClient struct { + Client autorest.Client + baseUri string +} + +func NewTransformationsClientWithBaseURI(endpoint string) TransformationsClient { + return TransformationsClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/id_transformation.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/id_transformation.go new file mode 100644 index 000000000000..19fe470b4f8e --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/id_transformation.go @@ -0,0 +1,137 @@ +package transformations + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = TransformationId{} + +// TransformationId is a struct representing the Resource ID for a Transformation +type TransformationId struct { + SubscriptionId string + ResourceGroupName string + JobName string + TransformationName string +} + +// NewTransformationID returns a new TransformationId struct +func NewTransformationID(subscriptionId string, resourceGroupName string, jobName string, transformationName string) TransformationId { + return TransformationId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + JobName: jobName, + TransformationName: transformationName, + } +} + +// ParseTransformationID parses 'input' into a TransformationId +func ParseTransformationID(input string) (*TransformationId, error) { + parser := resourceids.NewParserFromResourceIdType(TransformationId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := TransformationId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.TransformationName, ok = parsed.Parsed["transformationName"]; !ok { + return nil, fmt.Errorf("the segment 'transformationName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseTransformationIDInsensitively parses 'input' case-insensitively into a TransformationId +// note: this method should only be used for API response data and not user input +func ParseTransformationIDInsensitively(input string) (*TransformationId, error) { + parser := resourceids.NewParserFromResourceIdType(TransformationId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := TransformationId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.JobName, ok = parsed.Parsed["jobName"]; !ok { + return nil, fmt.Errorf("the segment 'jobName' was not found in the resource id %q", input) + } + + if id.TransformationName, ok = parsed.Parsed["transformationName"]; !ok { + return nil, fmt.Errorf("the segment 'transformationName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateTransformationID checks that 'input' can be parsed as a Transformation ID +func ValidateTransformationID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseTransformationID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Transformation ID +func (id TransformationId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.StreamAnalytics/streamingJobs/%s/transformations/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.JobName, id.TransformationName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Transformation ID +func (id TransformationId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftStreamAnalytics", "Microsoft.StreamAnalytics", "Microsoft.StreamAnalytics"), + resourceids.StaticSegment("staticStreamingJobs", "streamingJobs", "streamingJobs"), + resourceids.UserSpecifiedSegment("jobName", "jobValue"), + resourceids.StaticSegment("staticTransformations", "transformations", "transformations"), + resourceids.UserSpecifiedSegment("transformationName", "transformationValue"), + } +} + +// String returns a human-readable description of this Transformation ID +func (id TransformationId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Job Name: %q", id.JobName), + fmt.Sprintf("Transformation Name: %q", id.TransformationName), + } + return fmt.Sprintf("Transformation (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_createorreplace_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_createorreplace_autorest.go new file mode 100644 index 000000000000..aec7a5468157 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_createorreplace_autorest.go @@ -0,0 +1,103 @@ +package transformations + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrReplaceOperationResponse struct { + HttpResponse *http.Response + Model *Transformation +} + +type CreateOrReplaceOperationOptions struct { + IfMatch *string + IfNoneMatch *string +} + +func DefaultCreateOrReplaceOperationOptions() CreateOrReplaceOperationOptions { + return CreateOrReplaceOperationOptions{} +} + +func (o CreateOrReplaceOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + if o.IfNoneMatch != nil { + out["If-None-Match"] = *o.IfNoneMatch + } + + return out +} + +func (o CreateOrReplaceOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// CreateOrReplace ... +func (c TransformationsClient) CreateOrReplace(ctx context.Context, id TransformationId, input Transformation, options CreateOrReplaceOperationOptions) (result CreateOrReplaceOperationResponse, err error) { + req, err := c.preparerForCreateOrReplace(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "CreateOrReplace", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "CreateOrReplace", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrReplace(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "CreateOrReplace", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrReplace prepares the CreateOrReplace request. +func (c TransformationsClient) preparerForCreateOrReplace(ctx context.Context, id TransformationId, input Transformation, options CreateOrReplaceOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrReplace handles the response to the CreateOrReplace request. The method always +// closes the http.Response Body. +func (c TransformationsClient) responderForCreateOrReplace(resp *http.Response) (result CreateOrReplaceOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_get_autorest.go new file mode 100644 index 000000000000..d8ebe853c6ad --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_get_autorest.go @@ -0,0 +1,68 @@ +package transformations + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *Transformation +} + +// Get ... +func (c TransformationsClient) Get(ctx context.Context, id TransformationId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c TransformationsClient) preparerForGet(ctx context.Context, id TransformationId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c TransformationsClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_update_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_update_autorest.go new file mode 100644 index 000000000000..a5d5ea207987 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/method_update_autorest.go @@ -0,0 +1,98 @@ +package transformations + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type UpdateOperationResponse struct { + HttpResponse *http.Response + Model *Transformation +} + +type UpdateOperationOptions struct { + IfMatch *string +} + +func DefaultUpdateOperationOptions() UpdateOperationOptions { + return UpdateOperationOptions{} +} + +func (o UpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + if o.IfMatch != nil { + out["If-Match"] = *o.IfMatch + } + + return out +} + +func (o UpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +// Update ... +func (c TransformationsClient) Update(ctx context.Context, id TransformationId, input Transformation, options UpdateOperationOptions) (result UpdateOperationResponse, err error) { + req, err := c.preparerForUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Update", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Update", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "transformations.TransformationsClient", "Update", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForUpdate prepares the Update request. +func (c TransformationsClient) preparerForUpdate(ctx context.Context, id TransformationId, input Transformation, options UpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForUpdate handles the response to the Update request. The method always +// closes the http.Response Body. +func (c TransformationsClient) responderForUpdate(resp *http.Response) (result UpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformation.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformation.go new file mode 100644 index 000000000000..e98b3b23385d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformation.go @@ -0,0 +1,11 @@ +package transformations + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Transformation struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties *TransformationProperties `json:"properties,omitempty"` + Type *string `json:"type,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformationproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformationproperties.go new file mode 100644 index 000000000000..fb06ee8ac0f0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/model_transformationproperties.go @@ -0,0 +1,11 @@ +package transformations + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type TransformationProperties struct { + Etag *string `json:"etag,omitempty"` + Query *string `json:"query,omitempty"` + StreamingUnits *int64 `json:"streamingUnits,omitempty"` + ValidStreamingUnits *[]int64 `json:"validStreamingUnits,omitempty"` +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/version.go new file mode 100644 index 000000000000..fc341f6862f1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/transformations/version.go @@ -0,0 +1,12 @@ +package transformations + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2020-03-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/transformations/%s", defaultApiVersion) +} From 50f773491d3d83980cc09e81248e5e8fb036b710 Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 22 Nov 2022 18:09:35 +0100 Subject: [PATCH 05/14] revert changes to app service and firewall test --- .../services/appservice/source_control_token_resource_test.go | 2 +- .../firewall/firewall_network_rule_collection_resource_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/services/appservice/source_control_token_resource_test.go b/internal/services/appservice/source_control_token_resource_test.go index f7133fafc33d..19b6643101bd 100644 --- a/internal/services/appservice/source_control_token_resource_test.go +++ b/internal/services/appservice/source_control_token_resource_test.go @@ -65,7 +65,7 @@ func (r AppServiceGitHubTokenResource) Exists(ctx context.Context, client *clien resp, err := client.AppService.BaseClient.GetSourceControl(ctx, "GitHub") if err != nil { if utils.ResponseWasNotFound(resp.Response) { - return nil, err + return utils.Bool(false), err } return nil, fmt.Errorf("retrieving Source Control GitHub Token") } diff --git a/internal/services/firewall/firewall_network_rule_collection_resource_test.go b/internal/services/firewall/firewall_network_rule_collection_resource_test.go index 456b5517833f..1a32bfc91d69 100644 --- a/internal/services/firewall/firewall_network_rule_collection_resource_test.go +++ b/internal/services/firewall/firewall_network_rule_collection_resource_test.go @@ -395,7 +395,7 @@ func (FirewallNetworkRuleCollectionResource) Destroy(ctx context.Context, client read, err := clients.Firewall.AzureFirewallsClient.Get(ctx, resourceGroup, firewallName) if err != nil { - return nil, err + return utils.Bool(false), err } rules := make([]network.AzureFirewallNetworkRuleCollection, 0) From 462132dacd1d9519a5df3a2ee57c2ba5975d297f Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 22 Nov 2022 18:10:09 +0100 Subject: [PATCH 06/14] go mod vendor --- .../2020-03-01/streamanalytics/CHANGELOG.md | 48 - .../2020-03-01/streamanalytics/_meta.json | 11 - .../mgmt/2020-03-01/streamanalytics/client.go | 43 - .../2020-03-01/streamanalytics/clusters.go | 793 -- .../mgmt/2020-03-01/streamanalytics/enums.go | 450 - .../2020-03-01/streamanalytics/functions.go | 743 -- .../mgmt/2020-03-01/streamanalytics/inputs.go | 646 -- .../mgmt/2020-03-01/streamanalytics/models.go | 7670 ----------------- .../2020-03-01/streamanalytics/operations.go | 140 - .../2020-03-01/streamanalytics/outputs.go | 646 -- .../streamanalytics/privateendpoints.go | 444 - .../streamanalytics/streamingjobs.go | 954 -- .../streamanalytics/subscriptions.go | 113 - .../streamanalytics/transformations.go | 326 - .../2020-03-01/streamanalytics/version.go | 19 - vendor/modules.txt | 1 - 16 files changed, 13047 deletions(-) delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/CHANGELOG.md delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/_meta.json delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/client.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/clusters.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/enums.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/functions.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/inputs.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/models.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/operations.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/outputs.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/privateendpoints.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/streamingjobs.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/subscriptions.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/transformations.go delete mode 100644 vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/version.go diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/CHANGELOG.md b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/CHANGELOG.md deleted file mode 100644 index 1f2276184be6..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/CHANGELOG.md +++ /dev/null @@ -1,48 +0,0 @@ -# Change History - -## Breaking Changes - -### Removed Funcs - -1. *ScalarFunctionConfiguration.UnmarshalJSON([]byte) error - -### Struct Changes - -#### Removed Structs - -1. ScalarFunctionConfiguration - -#### Removed Struct Fields - -1. ScalarFunctionProperties.*ScalarFunctionConfiguration - -## Additive Changes - -### New Constants - -1. TypeBasicFunctionProperties.TypeBasicFunctionPropertiesTypeAggregate - -### New Funcs - -1. *AggregateFunctionProperties.UnmarshalJSON([]byte) error -1. *FunctionConfiguration.UnmarshalJSON([]byte) error -1. *FunctionProperties.UnmarshalJSON([]byte) error -1. AggregateFunctionProperties.AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) -1. AggregateFunctionProperties.AsBasicFunctionProperties() (BasicFunctionProperties, bool) -1. AggregateFunctionProperties.AsFunctionProperties() (*FunctionProperties, bool) -1. AggregateFunctionProperties.AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) -1. AggregateFunctionProperties.MarshalJSON() ([]byte, error) -1. FunctionProperties.AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) -1. ScalarFunctionProperties.AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) - -### Struct Changes - -#### New Structs - -1. AggregateFunctionProperties -1. FunctionConfiguration - -#### New Struct Fields - -1. FunctionProperties.*FunctionConfiguration -1. ScalarFunctionProperties.*FunctionConfiguration diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/_meta.json b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/_meta.json deleted file mode 100644 index 8438509bd773..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/_meta.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "commit": "61218420e95ad3435a029144877b110b56418958", - "readme": "/_/azure-rest-api-specs/specification/streamanalytics/resource-manager/readme.md", - "tag": "package-pure-2020-03", - "use": "@microsoft.azure/autorest.go@2.1.187", - "repository_url": "https://github.com/Azure/azure-rest-api-specs.git", - "autorest_command": "autorest --use=@microsoft.azure/autorest.go@2.1.187 --tag=package-pure-2020-03 --go-sdk-folder=/_/azure-sdk-for-go --go --verbose --use-onever --version=2.0.4421 --go.license-header=MICROSOFT_MIT_NO_VERSION --enum-prefix /_/azure-rest-api-specs/specification/streamanalytics/resource-manager/readme.md", - "additional_properties": { - "additional_options": "--go --verbose --use-onever --version=2.0.4421 --go.license-header=MICROSOFT_MIT_NO_VERSION --enum-prefix" - } -} \ No newline at end of file diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/client.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/client.go deleted file mode 100644 index 64ef541c065a..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/client.go +++ /dev/null @@ -1,43 +0,0 @@ -// Deprecated: Please note, this package has been deprecated. A replacement package is available [github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/streamanalytics/armstreamanalytics](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/streamanalytics/armstreamanalytics). We strongly encourage you to upgrade to continue receiving updates. See [Migration Guide](https://aka.ms/azsdk/golang/t2/migration) for guidance on upgrading. Refer to our [deprecation policy](https://azure.github.io/azure-sdk/policies_support.html) for more details. -// -// Package streamanalytics implements the Azure ARM Streamanalytics service API version 2020-03-01. -// -// Stream Analytics Client -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "github.com/Azure/go-autorest/autorest" -) - -const ( - // DefaultBaseURI is the default URI used for the service Streamanalytics - DefaultBaseURI = "https://management.azure.com" -) - -// BaseClient is the base client for Streamanalytics. -type BaseClient struct { - autorest.Client - BaseURI string - SubscriptionID string -} - -// New creates an instance of the BaseClient client. -func New(subscriptionID string) BaseClient { - return NewWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewWithBaseURI creates an instance of the BaseClient client using a custom endpoint. Use this when interacting with -// an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { - return BaseClient{ - Client: autorest.NewClientWithUserAgent(UserAgent()), - BaseURI: baseURI, - SubscriptionID: subscriptionID, - } -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/clusters.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/clusters.go deleted file mode 100644 index 65f6b03ff92a..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/clusters.go +++ /dev/null @@ -1,793 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// ClustersClient is the stream Analytics Client -type ClustersClient struct { - BaseClient -} - -// NewClustersClient creates an instance of the ClustersClient client. -func NewClustersClient(subscriptionID string) ClustersClient { - return NewClustersClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewClustersClientWithBaseURI creates an instance of the ClustersClient client using a custom endpoint. Use this -// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewClustersClientWithBaseURI(baseURI string, subscriptionID string) ClustersClient { - return ClustersClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrUpdate creates a Stream Analytics Cluster or replaces an already existing cluster. -// Parameters: -// cluster - the definition of the cluster that will be used to create a new cluster or replace the existing -// one. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new resource to be created, but to prevent updating an existing record -// set. Other values will result in a 412 Pre-condition Failed response. -func (client ClustersClient) CreateOrUpdate(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string, ifNoneMatch string) (result ClustersCreateOrUpdateFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.CreateOrUpdate") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: cluster, - Constraints: []validation.Constraint{{Target: "cluster.Sku", Name: validation.Null, Rule: false, - Chain: []validation.Constraint{{Target: "cluster.Sku.Capacity", Name: validation.Null, Rule: false, - Chain: []validation.Constraint{{Target: "cluster.Sku.Capacity", Name: validation.InclusiveMaximum, Rule: int64(216), Chain: nil}, - {Target: "cluster.Sku.Capacity", Name: validation.InclusiveMinimum, Rule: int64(36), Chain: nil}, - }}, - }}}}, - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "CreateOrUpdate", err.Error()) - } - - req, err := client.CreateOrUpdatePreparer(ctx, cluster, resourceGroupName, clusterName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "CreateOrUpdate", nil, "Failure preparing request") - return - } - - result, err = client.CreateOrUpdateSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "CreateOrUpdate", result.Response(), "Failure sending request") - return - } - - return -} - -// CreateOrUpdatePreparer prepares the CreateOrUpdate request. -func (client ClustersClient) CreateOrUpdatePreparer(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - cluster.Etag = nil - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), - autorest.WithJSON(cluster), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) CreateOrUpdateSender(req *http.Request) (future ClustersCreateOrUpdateFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always -// closes the http.Response Body. -func (client ClustersClient) CreateOrUpdateResponder(resp *http.Response) (result Cluster, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete deletes the specified cluster. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -func (client ClustersClient) Delete(ctx context.Context, resourceGroupName string, clusterName string) (result ClustersDeleteFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Delete") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, clusterName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Delete", nil, "Failure preparing request") - return - } - - result, err = client.DeleteSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Delete", result.Response(), "Failure sending request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client ClustersClient) DeletePreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) DeleteSender(req *http.Request) (future ClustersDeleteFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client ClustersClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets information about the specified cluster. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -func (client ClustersClient) Get(ctx context.Context, resourceGroupName string, clusterName string) (result Cluster, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, clusterName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client ClustersClient) GetPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client ClustersClient) GetResponder(resp *http.Response) (result Cluster, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// ListByResourceGroup lists all of the clusters in the given resource group. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -func (client ClustersClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ClusterListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListByResourceGroup") - defer func() { - sc := -1 - if result.clr.Response.Response != nil { - sc = result.clr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "ListByResourceGroup", err.Error()) - } - - result.fn = client.listByResourceGroupNextResults - req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", nil, "Failure preparing request") - return - } - - resp, err := client.ListByResourceGroupSender(req) - if err != nil { - result.clr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", resp, "Failure sending request") - return - } - - result.clr, err = client.ListByResourceGroupResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListByResourceGroup", resp, "Failure responding to request") - return - } - if result.clr.hasNextLink() && result.clr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByResourceGroupPreparer prepares the ListByResourceGroup request. -func (client ClustersClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always -// closes the http.Response Body. -func (client ClustersClient) ListByResourceGroupResponder(resp *http.Response) (result ClusterListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByResourceGroupNextResults retrieves the next set of results, if any. -func (client ClustersClient) listByResourceGroupNextResults(ctx context.Context, lastResults ClusterListResult) (result ClusterListResult, err error) { - req, err := lastResults.clusterListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByResourceGroupSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByResourceGroupResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. -func (client ClustersClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result ClusterListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListByResourceGroup") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByResourceGroup(ctx, resourceGroupName) - return -} - -// ListBySubscription lists all of the clusters in the given subscription. -func (client ClustersClient) ListBySubscription(ctx context.Context) (result ClusterListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListBySubscription") - defer func() { - sc := -1 - if result.clr.Response.Response != nil { - sc = result.clr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "ListBySubscription", err.Error()) - } - - result.fn = client.listBySubscriptionNextResults - req, err := client.ListBySubscriptionPreparer(ctx) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", nil, "Failure preparing request") - return - } - - resp, err := client.ListBySubscriptionSender(req) - if err != nil { - result.clr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", resp, "Failure sending request") - return - } - - result.clr, err = client.ListBySubscriptionResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListBySubscription", resp, "Failure responding to request") - return - } - if result.clr.hasNextLink() && result.clr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListBySubscriptionPreparer prepares the ListBySubscription request. -func (client ClustersClient) ListBySubscriptionPreparer(ctx context.Context) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListBySubscriptionSender sends the ListBySubscription request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) ListBySubscriptionSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListBySubscriptionResponder handles the response to the ListBySubscription request. The method always -// closes the http.Response Body. -func (client ClustersClient) ListBySubscriptionResponder(resp *http.Response) (result ClusterListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listBySubscriptionNextResults retrieves the next set of results, if any. -func (client ClustersClient) listBySubscriptionNextResults(ctx context.Context, lastResults ClusterListResult) (result ClusterListResult, err error) { - req, err := lastResults.clusterListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListBySubscriptionSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", resp, "Failure sending next results request") - } - result, err = client.ListBySubscriptionResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listBySubscriptionNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListBySubscriptionComplete enumerates all values, automatically crossing page boundaries as required. -func (client ClustersClient) ListBySubscriptionComplete(ctx context.Context) (result ClusterListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListBySubscription") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListBySubscription(ctx) - return -} - -// ListStreamingJobs lists all of the streaming jobs in the given cluster. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -func (client ClustersClient) ListStreamingJobs(ctx context.Context, resourceGroupName string, clusterName string) (result ClusterJobListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListStreamingJobs") - defer func() { - sc := -1 - if result.cjlr.Response.Response != nil { - sc = result.cjlr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "ListStreamingJobs", err.Error()) - } - - result.fn = client.listStreamingJobsNextResults - req, err := client.ListStreamingJobsPreparer(ctx, resourceGroupName, clusterName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", nil, "Failure preparing request") - return - } - - resp, err := client.ListStreamingJobsSender(req) - if err != nil { - result.cjlr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", resp, "Failure sending request") - return - } - - result.cjlr, err = client.ListStreamingJobsResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "ListStreamingJobs", resp, "Failure responding to request") - return - } - if result.cjlr.hasNextLink() && result.cjlr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListStreamingJobsPreparer prepares the ListStreamingJobs request. -func (client ClustersClient) ListStreamingJobsPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListStreamingJobsSender sends the ListStreamingJobs request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) ListStreamingJobsSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListStreamingJobsResponder handles the response to the ListStreamingJobs request. The method always -// closes the http.Response Body. -func (client ClustersClient) ListStreamingJobsResponder(resp *http.Response) (result ClusterJobListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listStreamingJobsNextResults retrieves the next set of results, if any. -func (client ClustersClient) listStreamingJobsNextResults(ctx context.Context, lastResults ClusterJobListResult) (result ClusterJobListResult, err error) { - req, err := lastResults.clusterJobListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListStreamingJobsSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", resp, "Failure sending next results request") - } - result, err = client.ListStreamingJobsResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "listStreamingJobsNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListStreamingJobsComplete enumerates all values, automatically crossing page boundaries as required. -func (client ClustersClient) ListStreamingJobsComplete(ctx context.Context, resourceGroupName string, clusterName string) (result ClusterJobListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.ListStreamingJobs") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListStreamingJobs(ctx, resourceGroupName, clusterName) - return -} - -// Update updates an existing cluster. This can be used to partially update (ie. update one or two properties) a -// cluster without affecting the rest of the cluster definition. -// Parameters: -// cluster - the properties specified here will overwrite the corresponding properties in the existing cluster -// (ie. Those properties will be updated). -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client ClustersClient) Update(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string) (result ClustersUpdateFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClustersClient.Update") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.ClustersClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, cluster, resourceGroupName, clusterName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Update", nil, "Failure preparing request") - return - } - - result, err = client.UpdateSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersClient", "Update", result.Response(), "Failure sending request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client ClustersClient) UpdatePreparer(ctx context.Context, cluster Cluster, resourceGroupName string, clusterName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - cluster.Etag = nil - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}", pathParameters), - autorest.WithJSON(cluster), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client ClustersClient) UpdateSender(req *http.Request) (future ClustersUpdateFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client ClustersClient) UpdateResponder(resp *http.Response) (result Cluster, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/enums.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/enums.go deleted file mode 100644 index c160d225282a..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/enums.go +++ /dev/null @@ -1,450 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -// AuthenticationMode enumerates the values for authentication mode. -type AuthenticationMode string - -const ( - // AuthenticationModeConnectionString ... - AuthenticationModeConnectionString AuthenticationMode = "ConnectionString" - // AuthenticationModeMsi ... - AuthenticationModeMsi AuthenticationMode = "Msi" - // AuthenticationModeUserToken ... - AuthenticationModeUserToken AuthenticationMode = "UserToken" -) - -// PossibleAuthenticationModeValues returns an array of possible values for the AuthenticationMode const type. -func PossibleAuthenticationModeValues() []AuthenticationMode { - return []AuthenticationMode{AuthenticationModeConnectionString, AuthenticationModeMsi, AuthenticationModeUserToken} -} - -// BindingType enumerates the values for binding type. -type BindingType string - -const ( - // BindingTypeFunctionRetrieveDefaultDefinitionParameters ... - BindingTypeFunctionRetrieveDefaultDefinitionParameters BindingType = "FunctionRetrieveDefaultDefinitionParameters" - // BindingTypeMicrosoftMachineLearningWebService ... - BindingTypeMicrosoftMachineLearningWebService BindingType = "Microsoft.MachineLearning/WebService" - // BindingTypeMicrosoftStreamAnalyticsJavascriptUdf ... - BindingTypeMicrosoftStreamAnalyticsJavascriptUdf BindingType = "Microsoft.StreamAnalytics/JavascriptUdf" -) - -// PossibleBindingTypeValues returns an array of possible values for the BindingType const type. -func PossibleBindingTypeValues() []BindingType { - return []BindingType{BindingTypeFunctionRetrieveDefaultDefinitionParameters, BindingTypeMicrosoftMachineLearningWebService, BindingTypeMicrosoftStreamAnalyticsJavascriptUdf} -} - -// ClusterProvisioningState enumerates the values for cluster provisioning state. -type ClusterProvisioningState string - -const ( - // ClusterProvisioningStateCanceled The cluster provisioning was canceled. - ClusterProvisioningStateCanceled ClusterProvisioningState = "Canceled" - // ClusterProvisioningStateFailed The cluster provisioning failed. - ClusterProvisioningStateFailed ClusterProvisioningState = "Failed" - // ClusterProvisioningStateInProgress The cluster provisioning was inprogress. - ClusterProvisioningStateInProgress ClusterProvisioningState = "InProgress" - // ClusterProvisioningStateSucceeded The cluster provisioning succeeded. - ClusterProvisioningStateSucceeded ClusterProvisioningState = "Succeeded" -) - -// PossibleClusterProvisioningStateValues returns an array of possible values for the ClusterProvisioningState const type. -func PossibleClusterProvisioningStateValues() []ClusterProvisioningState { - return []ClusterProvisioningState{ClusterProvisioningStateCanceled, ClusterProvisioningStateFailed, ClusterProvisioningStateInProgress, ClusterProvisioningStateSucceeded} -} - -// ClusterSkuName enumerates the values for cluster sku name. -type ClusterSkuName string - -const ( - // ClusterSkuNameDefault The default SKU. - ClusterSkuNameDefault ClusterSkuName = "Default" -) - -// PossibleClusterSkuNameValues returns an array of possible values for the ClusterSkuName const type. -func PossibleClusterSkuNameValues() []ClusterSkuName { - return []ClusterSkuName{ClusterSkuNameDefault} -} - -// CompatibilityLevel enumerates the values for compatibility level. -type CompatibilityLevel string - -const ( - // CompatibilityLevelOneFullStopTwo ... - CompatibilityLevelOneFullStopTwo CompatibilityLevel = "1.2" - // CompatibilityLevelOneFullStopZero ... - CompatibilityLevelOneFullStopZero CompatibilityLevel = "1.0" -) - -// PossibleCompatibilityLevelValues returns an array of possible values for the CompatibilityLevel const type. -func PossibleCompatibilityLevelValues() []CompatibilityLevel { - return []CompatibilityLevel{CompatibilityLevelOneFullStopTwo, CompatibilityLevelOneFullStopZero} -} - -// CompressionType enumerates the values for compression type. -type CompressionType string - -const ( - // CompressionTypeDeflate ... - CompressionTypeDeflate CompressionType = "Deflate" - // CompressionTypeGZip ... - CompressionTypeGZip CompressionType = "GZip" - // CompressionTypeNone ... - CompressionTypeNone CompressionType = "None" -) - -// PossibleCompressionTypeValues returns an array of possible values for the CompressionType const type. -func PossibleCompressionTypeValues() []CompressionType { - return []CompressionType{CompressionTypeDeflate, CompressionTypeGZip, CompressionTypeNone} -} - -// ContentStoragePolicy enumerates the values for content storage policy. -type ContentStoragePolicy string - -const ( - // ContentStoragePolicyJobStorageAccount ... - ContentStoragePolicyJobStorageAccount ContentStoragePolicy = "JobStorageAccount" - // ContentStoragePolicySystemAccount ... - ContentStoragePolicySystemAccount ContentStoragePolicy = "SystemAccount" -) - -// PossibleContentStoragePolicyValues returns an array of possible values for the ContentStoragePolicy const type. -func PossibleContentStoragePolicyValues() []ContentStoragePolicy { - return []ContentStoragePolicy{ContentStoragePolicyJobStorageAccount, ContentStoragePolicySystemAccount} -} - -// Encoding enumerates the values for encoding. -type Encoding string - -const ( - // EncodingUTF8 ... - EncodingUTF8 Encoding = "UTF8" -) - -// PossibleEncodingValues returns an array of possible values for the Encoding const type. -func PossibleEncodingValues() []Encoding { - return []Encoding{EncodingUTF8} -} - -// EventSerializationType enumerates the values for event serialization type. -type EventSerializationType string - -const ( - // EventSerializationTypeAvro ... - EventSerializationTypeAvro EventSerializationType = "Avro" - // EventSerializationTypeCsv ... - EventSerializationTypeCsv EventSerializationType = "Csv" - // EventSerializationTypeJSON ... - EventSerializationTypeJSON EventSerializationType = "Json" - // EventSerializationTypeParquet ... - EventSerializationTypeParquet EventSerializationType = "Parquet" -) - -// PossibleEventSerializationTypeValues returns an array of possible values for the EventSerializationType const type. -func PossibleEventSerializationTypeValues() []EventSerializationType { - return []EventSerializationType{EventSerializationTypeAvro, EventSerializationTypeCsv, EventSerializationTypeJSON, EventSerializationTypeParquet} -} - -// EventsOutOfOrderPolicy enumerates the values for events out of order policy. -type EventsOutOfOrderPolicy string - -const ( - // EventsOutOfOrderPolicyAdjust ... - EventsOutOfOrderPolicyAdjust EventsOutOfOrderPolicy = "Adjust" - // EventsOutOfOrderPolicyDrop ... - EventsOutOfOrderPolicyDrop EventsOutOfOrderPolicy = "Drop" -) - -// PossibleEventsOutOfOrderPolicyValues returns an array of possible values for the EventsOutOfOrderPolicy const type. -func PossibleEventsOutOfOrderPolicyValues() []EventsOutOfOrderPolicy { - return []EventsOutOfOrderPolicy{EventsOutOfOrderPolicyAdjust, EventsOutOfOrderPolicyDrop} -} - -// JobState enumerates the values for job state. -type JobState string - -const ( - // JobStateCreated The job is currently in the Created state. - JobStateCreated JobState = "Created" - // JobStateDegraded The job is currently in the Degraded state. - JobStateDegraded JobState = "Degraded" - // JobStateDeleting The job is currently in the Deleting state. - JobStateDeleting JobState = "Deleting" - // JobStateFailed The job is currently in the Failed state. - JobStateFailed JobState = "Failed" - // JobStateRestarting The job is currently in the Restarting state. - JobStateRestarting JobState = "Restarting" - // JobStateRunning The job is currently in the Running state. - JobStateRunning JobState = "Running" - // JobStateScaling The job is currently in the Scaling state. - JobStateScaling JobState = "Scaling" - // JobStateStarting The job is currently in the Starting state. - JobStateStarting JobState = "Starting" - // JobStateStopped The job is currently in the Stopped state. - JobStateStopped JobState = "Stopped" - // JobStateStopping The job is currently in the Stopping state. - JobStateStopping JobState = "Stopping" -) - -// PossibleJobStateValues returns an array of possible values for the JobState const type. -func PossibleJobStateValues() []JobState { - return []JobState{JobStateCreated, JobStateDegraded, JobStateDeleting, JobStateFailed, JobStateRestarting, JobStateRunning, JobStateScaling, JobStateStarting, JobStateStopped, JobStateStopping} -} - -// JobType enumerates the values for job type. -type JobType string - -const ( - // JobTypeCloud ... - JobTypeCloud JobType = "Cloud" - // JobTypeEdge ... - JobTypeEdge JobType = "Edge" -) - -// PossibleJobTypeValues returns an array of possible values for the JobType const type. -func PossibleJobTypeValues() []JobType { - return []JobType{JobTypeCloud, JobTypeEdge} -} - -// JSONOutputSerializationFormat enumerates the values for json output serialization format. -type JSONOutputSerializationFormat string - -const ( - // JSONOutputSerializationFormatArray ... - JSONOutputSerializationFormatArray JSONOutputSerializationFormat = "Array" - // JSONOutputSerializationFormatLineSeparated ... - JSONOutputSerializationFormatLineSeparated JSONOutputSerializationFormat = "LineSeparated" -) - -// PossibleJSONOutputSerializationFormatValues returns an array of possible values for the JSONOutputSerializationFormat const type. -func PossibleJSONOutputSerializationFormatValues() []JSONOutputSerializationFormat { - return []JSONOutputSerializationFormat{JSONOutputSerializationFormatArray, JSONOutputSerializationFormatLineSeparated} -} - -// OutputErrorPolicy enumerates the values for output error policy. -type OutputErrorPolicy string - -const ( - // OutputErrorPolicyDrop ... - OutputErrorPolicyDrop OutputErrorPolicy = "Drop" - // OutputErrorPolicyStop ... - OutputErrorPolicyStop OutputErrorPolicy = "Stop" -) - -// PossibleOutputErrorPolicyValues returns an array of possible values for the OutputErrorPolicy const type. -func PossibleOutputErrorPolicyValues() []OutputErrorPolicy { - return []OutputErrorPolicy{OutputErrorPolicyDrop, OutputErrorPolicyStop} -} - -// OutputStartMode enumerates the values for output start mode. -type OutputStartMode string - -const ( - // OutputStartModeCustomTime ... - OutputStartModeCustomTime OutputStartMode = "CustomTime" - // OutputStartModeJobStartTime ... - OutputStartModeJobStartTime OutputStartMode = "JobStartTime" - // OutputStartModeLastOutputEventTime ... - OutputStartModeLastOutputEventTime OutputStartMode = "LastOutputEventTime" -) - -// PossibleOutputStartModeValues returns an array of possible values for the OutputStartMode const type. -func PossibleOutputStartModeValues() []OutputStartMode { - return []OutputStartMode{OutputStartModeCustomTime, OutputStartModeJobStartTime, OutputStartModeLastOutputEventTime} -} - -// RefreshType enumerates the values for refresh type. -type RefreshType string - -const ( - // RefreshTypeRefreshPeriodicallyWithDelta ... - RefreshTypeRefreshPeriodicallyWithDelta RefreshType = "RefreshPeriodicallyWithDelta" - // RefreshTypeRefreshPeriodicallyWithFull ... - RefreshTypeRefreshPeriodicallyWithFull RefreshType = "RefreshPeriodicallyWithFull" - // RefreshTypeStatic ... - RefreshTypeStatic RefreshType = "Static" -) - -// PossibleRefreshTypeValues returns an array of possible values for the RefreshType const type. -func PossibleRefreshTypeValues() []RefreshType { - return []RefreshType{RefreshTypeRefreshPeriodicallyWithDelta, RefreshTypeRefreshPeriodicallyWithFull, RefreshTypeStatic} -} - -// SkuName enumerates the values for sku name. -type SkuName string - -const ( - // SkuNameStandard ... - SkuNameStandard SkuName = "Standard" -) - -// PossibleSkuNameValues returns an array of possible values for the SkuName const type. -func PossibleSkuNameValues() []SkuName { - return []SkuName{SkuNameStandard} -} - -// Type enumerates the values for type. -type Type string - -const ( - // TypeAvro ... - TypeAvro Type = "Avro" - // TypeCsv ... - TypeCsv Type = "Csv" - // TypeJSON ... - TypeJSON Type = "Json" - // TypeParquet ... - TypeParquet Type = "Parquet" - // TypeSerialization ... - TypeSerialization Type = "Serialization" -) - -// PossibleTypeValues returns an array of possible values for the Type const type. -func PossibleTypeValues() []Type { - return []Type{TypeAvro, TypeCsv, TypeJSON, TypeParquet, TypeSerialization} -} - -// TypeBasicFunctionBinding enumerates the values for type basic function binding. -type TypeBasicFunctionBinding string - -const ( - // TypeBasicFunctionBindingTypeFunctionBinding ... - TypeBasicFunctionBindingTypeFunctionBinding TypeBasicFunctionBinding = "FunctionBinding" - // TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService ... - TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService TypeBasicFunctionBinding = "Microsoft.MachineLearning/WebService" - // TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf ... - TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf TypeBasicFunctionBinding = "Microsoft.StreamAnalytics/JavascriptUdf" -) - -// PossibleTypeBasicFunctionBindingValues returns an array of possible values for the TypeBasicFunctionBinding const type. -func PossibleTypeBasicFunctionBindingValues() []TypeBasicFunctionBinding { - return []TypeBasicFunctionBinding{TypeBasicFunctionBindingTypeFunctionBinding, TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService, TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf} -} - -// TypeBasicFunctionProperties enumerates the values for type basic function properties. -type TypeBasicFunctionProperties string - -const ( - // TypeBasicFunctionPropertiesTypeAggregate ... - TypeBasicFunctionPropertiesTypeAggregate TypeBasicFunctionProperties = "Aggregate" - // TypeBasicFunctionPropertiesTypeFunctionProperties ... - TypeBasicFunctionPropertiesTypeFunctionProperties TypeBasicFunctionProperties = "FunctionProperties" - // TypeBasicFunctionPropertiesTypeScalar ... - TypeBasicFunctionPropertiesTypeScalar TypeBasicFunctionProperties = "Scalar" -) - -// PossibleTypeBasicFunctionPropertiesValues returns an array of possible values for the TypeBasicFunctionProperties const type. -func PossibleTypeBasicFunctionPropertiesValues() []TypeBasicFunctionProperties { - return []TypeBasicFunctionProperties{TypeBasicFunctionPropertiesTypeAggregate, TypeBasicFunctionPropertiesTypeFunctionProperties, TypeBasicFunctionPropertiesTypeScalar} -} - -// TypeBasicInputProperties enumerates the values for type basic input properties. -type TypeBasicInputProperties string - -const ( - // TypeBasicInputPropertiesTypeInputProperties ... - TypeBasicInputPropertiesTypeInputProperties TypeBasicInputProperties = "InputProperties" - // TypeBasicInputPropertiesTypeReference ... - TypeBasicInputPropertiesTypeReference TypeBasicInputProperties = "Reference" - // TypeBasicInputPropertiesTypeStream ... - TypeBasicInputPropertiesTypeStream TypeBasicInputProperties = "Stream" -) - -// PossibleTypeBasicInputPropertiesValues returns an array of possible values for the TypeBasicInputProperties const type. -func PossibleTypeBasicInputPropertiesValues() []TypeBasicInputProperties { - return []TypeBasicInputProperties{TypeBasicInputPropertiesTypeInputProperties, TypeBasicInputPropertiesTypeReference, TypeBasicInputPropertiesTypeStream} -} - -// TypeBasicOutputDataSource enumerates the values for type basic output data source. -type TypeBasicOutputDataSource string - -const ( - // TypeBasicOutputDataSourceTypeMicrosoftAzureFunction ... - TypeBasicOutputDataSourceTypeMicrosoftAzureFunction TypeBasicOutputDataSource = "Microsoft.AzureFunction" - // TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts ... - TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts TypeBasicOutputDataSource = "Microsoft.DataLake/Accounts" - // TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub ... - TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub TypeBasicOutputDataSource = "Microsoft.EventHub/EventHub" - // TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub ... - TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub TypeBasicOutputDataSource = "Microsoft.ServiceBus/EventHub" - // TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue ... - TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue TypeBasicOutputDataSource = "Microsoft.ServiceBus/Queue" - // TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic ... - TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic TypeBasicOutputDataSource = "Microsoft.ServiceBus/Topic" - // TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase ... - TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase TypeBasicOutputDataSource = "Microsoft.Sql/Server/Database" - // TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse ... - TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse TypeBasicOutputDataSource = "Microsoft.Sql/Server/DataWarehouse" - // TypeBasicOutputDataSourceTypeMicrosoftStorageBlob ... - TypeBasicOutputDataSourceTypeMicrosoftStorageBlob TypeBasicOutputDataSource = "Microsoft.Storage/Blob" - // TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB ... - TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB TypeBasicOutputDataSource = "Microsoft.Storage/DocumentDB" - // TypeBasicOutputDataSourceTypeMicrosoftStorageTable ... - TypeBasicOutputDataSourceTypeMicrosoftStorageTable TypeBasicOutputDataSource = "Microsoft.Storage/Table" - // TypeBasicOutputDataSourceTypeOutputDataSource ... - TypeBasicOutputDataSourceTypeOutputDataSource TypeBasicOutputDataSource = "OutputDataSource" - // TypeBasicOutputDataSourceTypePowerBI ... - TypeBasicOutputDataSourceTypePowerBI TypeBasicOutputDataSource = "PowerBI" -) - -// PossibleTypeBasicOutputDataSourceValues returns an array of possible values for the TypeBasicOutputDataSource const type. -func PossibleTypeBasicOutputDataSourceValues() []TypeBasicOutputDataSource { - return []TypeBasicOutputDataSource{TypeBasicOutputDataSourceTypeMicrosoftAzureFunction, TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts, TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub, TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub, TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue, TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic, TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase, TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse, TypeBasicOutputDataSourceTypeMicrosoftStorageBlob, TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB, TypeBasicOutputDataSourceTypeMicrosoftStorageTable, TypeBasicOutputDataSourceTypeOutputDataSource, TypeBasicOutputDataSourceTypePowerBI} -} - -// TypeBasicReferenceInputDataSource enumerates the values for type basic reference input data source. -type TypeBasicReferenceInputDataSource string - -const ( - // TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase ... - TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase TypeBasicReferenceInputDataSource = "Microsoft.Sql/Server/Database" - // TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob ... - TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob TypeBasicReferenceInputDataSource = "Microsoft.Storage/Blob" - // TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource ... - TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource TypeBasicReferenceInputDataSource = "ReferenceInputDataSource" -) - -// PossibleTypeBasicReferenceInputDataSourceValues returns an array of possible values for the TypeBasicReferenceInputDataSource const type. -func PossibleTypeBasicReferenceInputDataSourceValues() []TypeBasicReferenceInputDataSource { - return []TypeBasicReferenceInputDataSource{TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase, TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource} -} - -// TypeBasicStreamInputDataSource enumerates the values for type basic stream input data source. -type TypeBasicStreamInputDataSource string - -const ( - // TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs ... - TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs TypeBasicStreamInputDataSource = "Microsoft.Devices/IotHubs" - // TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub ... - TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub TypeBasicStreamInputDataSource = "Microsoft.EventHub/EventHub" - // TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub ... - TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub TypeBasicStreamInputDataSource = "Microsoft.ServiceBus/EventHub" - // TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob ... - TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob TypeBasicStreamInputDataSource = "Microsoft.Storage/Blob" - // TypeBasicStreamInputDataSourceTypeStreamInputDataSource ... - TypeBasicStreamInputDataSourceTypeStreamInputDataSource TypeBasicStreamInputDataSource = "StreamInputDataSource" -) - -// PossibleTypeBasicStreamInputDataSourceValues returns an array of possible values for the TypeBasicStreamInputDataSource const type. -func PossibleTypeBasicStreamInputDataSourceValues() []TypeBasicStreamInputDataSource { - return []TypeBasicStreamInputDataSource{TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs, TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub, TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub, TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob, TypeBasicStreamInputDataSourceTypeStreamInputDataSource} -} - -// UdfType enumerates the values for udf type. -type UdfType string - -const ( - // UdfTypeScalar ... - UdfTypeScalar UdfType = "Scalar" -) - -// PossibleUdfTypeValues returns an array of possible values for the UdfType const type. -func PossibleUdfTypeValues() []UdfType { - return []UdfType{UdfTypeScalar} -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/functions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/functions.go deleted file mode 100644 index 4cc821335fed..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/functions.go +++ /dev/null @@ -1,743 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// FunctionsClient is the stream Analytics Client -type FunctionsClient struct { - BaseClient -} - -// NewFunctionsClient creates an instance of the FunctionsClient client. -func NewFunctionsClient(subscriptionID string) FunctionsClient { - return NewFunctionsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewFunctionsClientWithBaseURI creates an instance of the FunctionsClient client using a custom endpoint. Use this -// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewFunctionsClientWithBaseURI(baseURI string, subscriptionID string) FunctionsClient { - return FunctionsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrReplace creates a function or replaces an already existing function under an existing streaming job. -// Parameters: -// function - the definition of the function that will be used to create a new function or replace the existing -// one under the streaming job. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new function to be created, but to prevent updating an existing -// function. Other values will result in a 412 Pre-condition Failed response. -func (client FunctionsClient) CreateOrReplace(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (result Function, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.CreateOrReplace") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "CreateOrReplace", err.Error()) - } - - req, err := client.CreateOrReplacePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", nil, "Failure preparing request") - return - } - - resp, err := client.CreateOrReplaceSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure sending request") - return - } - - result, err = client.CreateOrReplaceResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "CreateOrReplace", resp, "Failure responding to request") - return - } - - return -} - -// CreateOrReplacePreparer prepares the CreateOrReplace request. -func (client FunctionsClient) CreateOrReplacePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), - autorest.WithJSON(function), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always -// closes the http.Response Body. -func (client FunctionsClient) CreateOrReplaceResponder(resp *http.Response) (result Function, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete deletes a function from the streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -func (client FunctionsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result autorest.Response, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Delete") - defer func() { - sc := -1 - if result.Response != nil { - sc = result.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, functionName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", nil, "Failure preparing request") - return - } - - resp, err := client.DeleteSender(req) - if err != nil { - result.Response = resp - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure sending request") - return - } - - result, err = client.DeleteResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Delete", resp, "Failure responding to request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client FunctionsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) DeleteSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client FunctionsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets details about the specified function. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -func (client FunctionsClient) Get(ctx context.Context, resourceGroupName string, jobName string, functionName string) (result Function, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, jobName, functionName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client FunctionsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client FunctionsClient) GetResponder(resp *http.Response) (result Function, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// ListByStreamingJob lists all of the functions under the specified streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties -// to include in the response, or "*" to include all properties. By default, all properties are returned except -// diagnostics. Currently only accepts '*' as a valid value. -func (client FunctionsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.flr.Response.Response != nil { - sc = result.flr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "ListByStreamingJob", err.Error()) - } - - result.fn = client.listByStreamingJobNextResults - req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", nil, "Failure preparing request") - return - } - - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.flr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure sending request") - return - } - - result.flr, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "ListByStreamingJob", resp, "Failure responding to request") - return - } - if result.flr.hasNextLink() && result.flr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByStreamingJobPreparer prepares the ListByStreamingJob request. -func (client FunctionsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(selectParameter) > 0 { - queryParameters["$select"] = autorest.Encode("query", selectParameter) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always -// closes the http.Response Body. -func (client FunctionsClient) ListByStreamingJobResponder(resp *http.Response) (result FunctionListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByStreamingJobNextResults retrieves the next set of results, if any. -func (client FunctionsClient) listByStreamingJobNextResults(ctx context.Context, lastResults FunctionListResult) (result FunctionListResult, err error) { - req, err := lastResults.functionListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. -func (client FunctionsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result FunctionListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) - return -} - -// RetrieveDefaultDefinition retrieves the default definition of a function based on the parameters specified. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -// functionRetrieveDefaultDefinitionParameters - parameters used to specify the type of function to retrieve -// the default definition for. -func (client FunctionsClient) RetrieveDefaultDefinition(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (result Function, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.RetrieveDefaultDefinition") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", err.Error()) - } - - req, err := client.RetrieveDefaultDefinitionPreparer(ctx, resourceGroupName, jobName, functionName, functionRetrieveDefaultDefinitionParameters) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", nil, "Failure preparing request") - return - } - - resp, err := client.RetrieveDefaultDefinitionSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure sending request") - return - } - - result, err = client.RetrieveDefaultDefinitionResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "RetrieveDefaultDefinition", resp, "Failure responding to request") - return - } - - return -} - -// RetrieveDefaultDefinitionPreparer prepares the RetrieveDefaultDefinition request. -func (client FunctionsClient) RetrieveDefaultDefinitionPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, functionRetrieveDefaultDefinitionParameters *BasicFunctionRetrieveDefaultDefinitionParameters) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if functionRetrieveDefaultDefinitionParameters != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(functionRetrieveDefaultDefinitionParameters)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// RetrieveDefaultDefinitionSender sends the RetrieveDefaultDefinition request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) RetrieveDefaultDefinitionSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// RetrieveDefaultDefinitionResponder handles the response to the RetrieveDefaultDefinition request. The method always -// closes the http.Response Body. -func (client FunctionsClient) RetrieveDefaultDefinitionResponder(resp *http.Response) (result Function, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Test tests if the information provided for a function is valid. This can range from testing the connection to the -// underlying web service behind the function or making sure the function code provided is syntactically correct. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -// function - if the function specified does not already exist, this parameter must contain the full function -// definition intended to be tested. If the function specified already exists, this parameter can be left null -// to test the existing function as is or if specified, the properties specified will overwrite the -// corresponding properties in the existing function (exactly like a PATCH operation) and the resulting -// function will be tested. -func (client FunctionsClient) Test(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (result FunctionsTestFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Test") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "Test", err.Error()) - } - - req, err := client.TestPreparer(ctx, resourceGroupName, jobName, functionName, function) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", nil, "Failure preparing request") - return - } - - result, err = client.TestSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Test", result.Response(), "Failure sending request") - return - } - - return -} - -// TestPreparer prepares the Test request. -func (client FunctionsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, functionName string, function *Function) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if function != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(function)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// TestSender sends the Test request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) TestSender(req *http.Request) (future FunctionsTestFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// TestResponder handles the response to the Test request. The method always -// closes the http.Response Body. -func (client FunctionsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Update updates an existing function under an existing streaming job. This can be used to partially update (ie. -// update one or two properties) a function without affecting the rest the job or function definition. -// Parameters: -// function - a function object. The properties specified here will overwrite the corresponding properties in -// the existing function (ie. Those properties will be updated). Any properties that are set to null here will -// mean that the corresponding property in the existing function will remain the same and not change as a -// result of this PATCH operation. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// functionName - the name of the function. -// ifMatch - the ETag of the function. Omit this value to always overwrite the current function. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client FunctionsClient) Update(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (result Function, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionsClient.Update") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.FunctionsClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, function, resourceGroupName, jobName, functionName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", nil, "Failure preparing request") - return - } - - resp, err := client.UpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure sending request") - return - } - - result, err = client.UpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsClient", "Update", resp, "Failure responding to request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client FunctionsClient) UpdatePreparer(ctx context.Context, function Function, resourceGroupName string, jobName string, functionName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "functionName": autorest.Encode("path", functionName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}", pathParameters), - autorest.WithJSON(function), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client FunctionsClient) UpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client FunctionsClient) UpdateResponder(resp *http.Response) (result Function, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/inputs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/inputs.go deleted file mode 100644 index 0ca5c1e15010..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/inputs.go +++ /dev/null @@ -1,646 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// InputsClient is the stream Analytics Client -type InputsClient struct { - BaseClient -} - -// NewInputsClient creates an instance of the InputsClient client. -func NewInputsClient(subscriptionID string) InputsClient { - return NewInputsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewInputsClientWithBaseURI creates an instance of the InputsClient client using a custom endpoint. Use this when -// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewInputsClientWithBaseURI(baseURI string, subscriptionID string) InputsClient { - return InputsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrReplace creates an input or replaces an already existing input under an existing streaming job. -// Parameters: -// input - the definition of the input that will be used to create a new input or replace the existing one -// under the streaming job. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// inputName - the name of the input. -// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new input to be created, but to prevent updating an existing input. -// Other values will result in a 412 Pre-condition Failed response. -func (client InputsClient) CreateOrReplace(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (result Input, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.CreateOrReplace") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "CreateOrReplace", err.Error()) - } - - req, err := client.CreateOrReplacePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", nil, "Failure preparing request") - return - } - - resp, err := client.CreateOrReplaceSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure sending request") - return - } - - result, err = client.CreateOrReplaceResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "CreateOrReplace", resp, "Failure responding to request") - return - } - - return -} - -// CreateOrReplacePreparer prepares the CreateOrReplace request. -func (client InputsClient) CreateOrReplacePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "inputName": autorest.Encode("path", inputName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), - autorest.WithJSON(input), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always -// closes the http.Response Body. -func (client InputsClient) CreateOrReplaceResponder(resp *http.Response) (result Input, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete deletes an input from the streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// inputName - the name of the input. -func (client InputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result autorest.Response, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Delete") - defer func() { - sc := -1 - if result.Response != nil { - sc = result.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, inputName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", nil, "Failure preparing request") - return - } - - resp, err := client.DeleteSender(req) - if err != nil { - result.Response = resp - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure sending request") - return - } - - result, err = client.DeleteResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Delete", resp, "Failure responding to request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client InputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "inputName": autorest.Encode("path", inputName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) DeleteSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client InputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets details about the specified input. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// inputName - the name of the input. -func (client InputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, inputName string) (result Input, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, jobName, inputName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client InputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "inputName": autorest.Encode("path", inputName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client InputsClient) GetResponder(resp *http.Response) (result Input, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// ListByStreamingJob lists all of the inputs under the specified streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties -// to include in the response, or "*" to include all properties. By default, all properties are returned except -// diagnostics. Currently only accepts '*' as a valid value. -func (client InputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.ilr.Response.Response != nil { - sc = result.ilr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "ListByStreamingJob", err.Error()) - } - - result.fn = client.listByStreamingJobNextResults - req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", nil, "Failure preparing request") - return - } - - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.ilr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure sending request") - return - } - - result.ilr, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "ListByStreamingJob", resp, "Failure responding to request") - return - } - if result.ilr.hasNextLink() && result.ilr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByStreamingJobPreparer prepares the ListByStreamingJob request. -func (client InputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(selectParameter) > 0 { - queryParameters["$select"] = autorest.Encode("query", selectParameter) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always -// closes the http.Response Body. -func (client InputsClient) ListByStreamingJobResponder(resp *http.Response) (result InputListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByStreamingJobNextResults retrieves the next set of results, if any. -func (client InputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults InputListResult) (result InputListResult, err error) { - req, err := lastResults.inputListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. -func (client InputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result InputListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) - return -} - -// Test tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// inputName - the name of the input. -// input - if the input specified does not already exist, this parameter must contain the full input definition -// intended to be tested. If the input specified already exists, this parameter can be left null to test the -// existing input as is or if specified, the properties specified will overwrite the corresponding properties -// in the existing input (exactly like a PATCH operation) and the resulting input will be tested. -func (client InputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (result InputsTestFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Test") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "Test", err.Error()) - } - - req, err := client.TestPreparer(ctx, resourceGroupName, jobName, inputName, input) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", nil, "Failure preparing request") - return - } - - result, err = client.TestSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Test", result.Response(), "Failure sending request") - return - } - - return -} - -// TestPreparer prepares the Test request. -func (client InputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, inputName string, input *Input) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "inputName": autorest.Encode("path", inputName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if input != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(input)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// TestSender sends the Test request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) TestSender(req *http.Request) (future InputsTestFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// TestResponder handles the response to the Test request. The method always -// closes the http.Response Body. -func (client InputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Update updates an existing input under an existing streaming job. This can be used to partially update (ie. update -// one or two properties) an input without affecting the rest the job or input definition. -// Parameters: -// input - an Input object. The properties specified here will overwrite the corresponding properties in the -// existing input (ie. Those properties will be updated). Any properties that are set to null here will mean -// that the corresponding property in the existing input will remain the same and not change as a result of -// this PATCH operation. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// inputName - the name of the input. -// ifMatch - the ETag of the input. Omit this value to always overwrite the current input. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client InputsClient) Update(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (result Input, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputsClient.Update") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.InputsClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, input, resourceGroupName, jobName, inputName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", nil, "Failure preparing request") - return - } - - resp, err := client.UpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure sending request") - return - } - - result, err = client.UpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsClient", "Update", resp, "Failure responding to request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client InputsClient) UpdatePreparer(ctx context.Context, input Input, resourceGroupName string, jobName string, inputName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "inputName": autorest.Encode("path", inputName), - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}", pathParameters), - autorest.WithJSON(input), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client InputsClient) UpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client InputsClient) UpdateResponder(resp *http.Response) (result Input, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/models.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/models.go deleted file mode 100644 index cdd214b2a08e..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/models.go +++ /dev/null @@ -1,7670 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "encoding/json" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/date" - "github.com/Azure/go-autorest/autorest/to" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// The package's fully qualified name. -const fqdn = "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" - -// AggregateFunctionProperties the properties that are associated with an aggregate function. -type AggregateFunctionProperties struct { - // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - *FunctionConfiguration `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicFunctionPropertiesTypeFunctionProperties', 'TypeBasicFunctionPropertiesTypeAggregate', 'TypeBasicFunctionPropertiesTypeScalar' - Type TypeBasicFunctionProperties `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AggregateFunctionProperties. -func (afp AggregateFunctionProperties) MarshalJSON() ([]byte, error) { - afp.Type = TypeBasicFunctionPropertiesTypeAggregate - objectMap := make(map[string]interface{}) - if afp.FunctionConfiguration != nil { - objectMap["properties"] = afp.FunctionConfiguration - } - if afp.Type != "" { - objectMap["type"] = afp.Type - } - return json.Marshal(objectMap) -} - -// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. -func (afp AggregateFunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { - return &afp, true -} - -// AsScalarFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. -func (afp AggregateFunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { - return nil, false -} - -// AsFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. -func (afp AggregateFunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { - return nil, false -} - -// AsBasicFunctionProperties is the BasicFunctionProperties implementation for AggregateFunctionProperties. -func (afp AggregateFunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { - return &afp, true -} - -// UnmarshalJSON is the custom unmarshaler for AggregateFunctionProperties struct. -func (afp *AggregateFunctionProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - afp.Etag = &etag - } - case "properties": - if v != nil { - var functionConfiguration FunctionConfiguration - err = json.Unmarshal(*v, &functionConfiguration) - if err != nil { - return err - } - afp.FunctionConfiguration = &functionConfiguration - } - case "type": - if v != nil { - var typeVar TypeBasicFunctionProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - afp.Type = typeVar - } - } - } - - return nil -} - -// AvroSerialization describes how data from an input is serialized or how data is serialized when written -// to an output in Avro format. -type AvroSerialization struct { - // Properties - The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. - Properties interface{} `json:"properties,omitempty"` - // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv', 'TypeParquet' - Type Type `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AvroSerialization. -func (as AvroSerialization) MarshalJSON() ([]byte, error) { - as.Type = TypeAvro - objectMap := make(map[string]interface{}) - if as.Properties != nil { - objectMap["properties"] = as.Properties - } - if as.Type != "" { - objectMap["type"] = as.Type - } - return json.Marshal(objectMap) -} - -// AsAvroSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsAvroSerialization() (*AvroSerialization, bool) { - return &as, true -} - -// AsJSONSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsJSONSerialization() (*JSONSerialization, bool) { - return nil, false -} - -// AsCsvSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsCsvSerialization() (*CsvSerialization, bool) { - return nil, false -} - -// AsParquetSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { - return nil, false -} - -// AsSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsSerialization() (*Serialization, bool) { - return nil, false -} - -// AsBasicSerialization is the BasicSerialization implementation for AvroSerialization. -func (as AvroSerialization) AsBasicSerialization() (BasicSerialization, bool) { - return &as, true -} - -// AzureDataLakeStoreOutputDataSource describes an Azure Data Lake Store output data source. -type AzureDataLakeStoreOutputDataSource struct { - // AzureDataLakeStoreOutputDataSourceProperties - The properties that are associated with an Azure Data Lake Store output. Required on PUT (CreateOrReplace) requests. - *AzureDataLakeStoreOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) MarshalJSON() ([]byte, error) { - adlsods.Type = TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts - objectMap := make(map[string]interface{}) - if adlsods.AzureDataLakeStoreOutputDataSourceProperties != nil { - objectMap["properties"] = adlsods.AzureDataLakeStoreOutputDataSourceProperties - } - if adlsods.Type != "" { - objectMap["type"] = adlsods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return &adlsods, true -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureDataLakeStoreOutputDataSource. -func (adlsods AzureDataLakeStoreOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &adlsods, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureDataLakeStoreOutputDataSource struct. -func (adlsods *AzureDataLakeStoreOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureDataLakeStoreOutputDataSourceProperties AzureDataLakeStoreOutputDataSourceProperties - err = json.Unmarshal(*v, &azureDataLakeStoreOutputDataSourceProperties) - if err != nil { - return err - } - adlsods.AzureDataLakeStoreOutputDataSourceProperties = &azureDataLakeStoreOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - adlsods.Type = typeVar - } - } - } - - return nil -} - -// AzureDataLakeStoreOutputDataSourceProperties the properties that are associated with an Azure Data Lake -// Store. -type AzureDataLakeStoreOutputDataSourceProperties struct { - // AccountName - The name of the Azure Data Lake Store account. Required on PUT (CreateOrReplace) requests. - AccountName *string `json:"accountName,omitempty"` - // TenantID - The tenant id of the user used to obtain the refresh token. Required on PUT (CreateOrReplace) requests. - TenantID *string `json:"tenantId,omitempty"` - // FilePathPrefix - The location of the file to which the output should be written to. Required on PUT (CreateOrReplace) requests. - FilePathPrefix *string `json:"filePathPrefix,omitempty"` - // DateFormat - The date format. Wherever {date} appears in filePathPrefix, the value of this property is used as the date format instead. - DateFormat *string `json:"dateFormat,omitempty"` - // TimeFormat - The time format. Wherever {time} appears in filePathPrefix, the value of this property is used as the time format instead. - TimeFormat *string `json:"timeFormat,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` - // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - RefreshToken *string `json:"refreshToken,omitempty"` - // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` - // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` -} - -// AzureFunctionOutputDataSource defines the metadata of AzureFunctionOutputDataSource -type AzureFunctionOutputDataSource struct { - // AzureFunctionOutputDataSourceProperties - The properties that are associated with a Azure Function output. Required on PUT (CreateOrReplace) requests. - *AzureFunctionOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) MarshalJSON() ([]byte, error) { - afods.Type = TypeBasicOutputDataSourceTypeMicrosoftAzureFunction - objectMap := make(map[string]interface{}) - if afods.AzureFunctionOutputDataSourceProperties != nil { - objectMap["properties"] = afods.AzureFunctionOutputDataSourceProperties - } - if afods.Type != "" { - objectMap["type"] = afods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return &afods, true -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureFunctionOutputDataSource. -func (afods AzureFunctionOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &afods, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureFunctionOutputDataSource struct. -func (afods *AzureFunctionOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureFunctionOutputDataSourceProperties AzureFunctionOutputDataSourceProperties - err = json.Unmarshal(*v, &azureFunctionOutputDataSourceProperties) - if err != nil { - return err - } - afods.AzureFunctionOutputDataSourceProperties = &azureFunctionOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - afods.Type = typeVar - } - } - } - - return nil -} - -// AzureFunctionOutputDataSourceProperties the properties that are associated with an Azure Function -// output. -type AzureFunctionOutputDataSourceProperties struct { - // FunctionAppName - The name of your Azure Functions app. - FunctionAppName *string `json:"functionAppName,omitempty"` - // FunctionName - The name of the function in your Azure Functions app. - FunctionName *string `json:"functionName,omitempty"` - // APIKey - If you want to use an Azure Function from another subscription, you can do so by providing the key to access your function. - APIKey *string `json:"apiKey,omitempty"` - // MaxBatchSize - A property that lets you set the maximum size for each output batch that's sent to your Azure function. The input unit is in bytes. By default, this value is 262,144 bytes (256 KB). - MaxBatchSize *float64 `json:"maxBatchSize,omitempty"` - // MaxBatchCount - A property that lets you specify the maximum number of events in each batch that's sent to Azure Functions. The default value is 100. - MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` -} - -// AzureMachineLearningWebServiceFunctionBinding the binding to an Azure Machine Learning web service. -type AzureMachineLearningWebServiceFunctionBinding struct { - // AzureMachineLearningWebServiceFunctionBindingProperties - The binding properties associated with an Azure Machine learning web service. - *AzureMachineLearningWebServiceFunctionBindingProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicFunctionBindingTypeFunctionBinding', 'TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService' - Type TypeBasicFunctionBinding `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureMachineLearningWebServiceFunctionBinding. -func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) MarshalJSON() ([]byte, error) { - amlwsfb.Type = TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService - objectMap := make(map[string]interface{}) - if amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties != nil { - objectMap["properties"] = amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties - } - if amlwsfb.Type != "" { - objectMap["type"] = amlwsfb.Type - } - return json.Marshal(objectMap) -} - -// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. -func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { - return nil, false -} - -// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. -func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { - return &amlwsfb, true -} - -// AsFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. -func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { - return nil, false -} - -// AsBasicFunctionBinding is the BasicFunctionBinding implementation for AzureMachineLearningWebServiceFunctionBinding. -func (amlwsfb AzureMachineLearningWebServiceFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { - return &amlwsfb, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningWebServiceFunctionBinding struct. -func (amlwsfb *AzureMachineLearningWebServiceFunctionBinding) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureMachineLearningWebServiceFunctionBindingProperties AzureMachineLearningWebServiceFunctionBindingProperties - err = json.Unmarshal(*v, &azureMachineLearningWebServiceFunctionBindingProperties) - if err != nil { - return err - } - amlwsfb.AzureMachineLearningWebServiceFunctionBindingProperties = &azureMachineLearningWebServiceFunctionBindingProperties - } - case "type": - if v != nil { - var typeVar TypeBasicFunctionBinding - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - amlwsfb.Type = typeVar - } - } - } - - return nil -} - -// AzureMachineLearningWebServiceFunctionBindingProperties the binding properties associated with an Azure -// Machine learning web service. -type AzureMachineLearningWebServiceFunctionBindingProperties struct { - // Endpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs - Endpoint *string `json:"endpoint,omitempty"` - // APIKey - The API key used to authenticate with Request-Response endpoint. - APIKey *string `json:"apiKey,omitempty"` - // Inputs - The inputs for the Azure Machine Learning web service endpoint. - Inputs *AzureMachineLearningWebServiceInputs `json:"inputs,omitempty"` - // Outputs - A list of outputs from the Azure Machine Learning web service endpoint execution. - Outputs *[]AzureMachineLearningWebServiceOutputColumn `json:"outputs,omitempty"` - // BatchSize - Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. - BatchSize *int32 `json:"batchSize,omitempty"` -} - -// AzureMachineLearningWebServiceFunctionBindingRetrievalProperties the binding retrieval properties -// associated with an Azure Machine learning web service. -type AzureMachineLearningWebServiceFunctionBindingRetrievalProperties struct { - // ExecuteEndpoint - The Request-Response execute endpoint of the Azure Machine Learning web service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs - ExecuteEndpoint *string `json:"executeEndpoint,omitempty"` - // UdfType - The function type. Possible values include: 'UdfTypeScalar' - UdfType UdfType `json:"udfType,omitempty"` -} - -// AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters the parameters needed to -// retrieve the default function definition for an Azure Machine Learning web service function. -type AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters struct { - // AzureMachineLearningWebServiceFunctionBindingRetrievalProperties - The binding retrieval properties associated with an Azure Machine learning web service. - *AzureMachineLearningWebServiceFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` - // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' - BindingType BindingType `json:"bindingType,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. -func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { - amlwsfrddp.BindingType = BindingTypeMicrosoftMachineLearningWebService - objectMap := make(map[string]interface{}) - if amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties != nil { - objectMap["bindingRetrievalProperties"] = amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties - } - if amlwsfrddp.BindingType != "" { - objectMap["bindingType"] = amlwsfrddp.BindingType - } - return json.Marshal(objectMap) -} - -// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. -func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { - return &amlwsfrddp, true -} - -// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. -func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. -func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters. -func (amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { - return &amlwsfrddp, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters struct. -func (amlwsfrddp *AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "bindingRetrievalProperties": - if v != nil { - var azureMachineLearningWebServiceFunctionBindingRetrievalProperties AzureMachineLearningWebServiceFunctionBindingRetrievalProperties - err = json.Unmarshal(*v, &azureMachineLearningWebServiceFunctionBindingRetrievalProperties) - if err != nil { - return err - } - amlwsfrddp.AzureMachineLearningWebServiceFunctionBindingRetrievalProperties = &azureMachineLearningWebServiceFunctionBindingRetrievalProperties - } - case "bindingType": - if v != nil { - var bindingType BindingType - err = json.Unmarshal(*v, &bindingType) - if err != nil { - return err - } - amlwsfrddp.BindingType = bindingType - } - } - } - - return nil -} - -// AzureMachineLearningWebServiceInputColumn describes an input column for the Azure Machine Learning web -// service endpoint. -type AzureMachineLearningWebServiceInputColumn struct { - // Name - The name of the input column. - Name *string `json:"name,omitempty"` - // DataType - The (Azure Machine Learning supported) data type of the input column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . - DataType *string `json:"dataType,omitempty"` - // MapTo - The zero based index of the function parameter this input maps to. - MapTo *int32 `json:"mapTo,omitempty"` -} - -// AzureMachineLearningWebServiceInputs the inputs for the Azure Machine Learning web service endpoint. -type AzureMachineLearningWebServiceInputs struct { - // Name - The name of the input. This is the name provided while authoring the endpoint. - Name *string `json:"name,omitempty"` - // ColumnNames - A list of input columns for the Azure Machine Learning web service endpoint. - ColumnNames *[]AzureMachineLearningWebServiceInputColumn `json:"columnNames,omitempty"` -} - -// AzureMachineLearningWebServiceOutputColumn describes an output column for the Azure Machine Learning web -// service endpoint. -type AzureMachineLearningWebServiceOutputColumn struct { - // Name - The name of the output column. - Name *string `json:"name,omitempty"` - // DataType - The (Azure Machine Learning supported) data type of the output column. A list of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . - DataType *string `json:"dataType,omitempty"` -} - -// AzureSQLDatabaseDataSourceProperties the properties that are associated with an Azure SQL database data -// source. -type AzureSQLDatabaseDataSourceProperties struct { - // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Server *string `json:"server,omitempty"` - // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Database *string `json:"database,omitempty"` - // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - User *string `json:"user,omitempty"` - // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Password *string `json:"password,omitempty"` - // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // MaxBatchCount - Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. - MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` - // MaxWriterCount - Max Writer count, currently only 1(single writer) and 0(based on query partition) are available. Optional on PUT requests. - MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// AzureSQLDatabaseOutputDataSource describes an Azure SQL database output data source. -type AzureSQLDatabaseOutputDataSource struct { - // AzureSQLDatabaseOutputDataSourceProperties - The properties that are associated with an Azure SQL database output. Required on PUT (CreateOrReplace) requests. - *AzureSQLDatabaseOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) MarshalJSON() ([]byte, error) { - asdods.Type = TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase - objectMap := make(map[string]interface{}) - if asdods.AzureSQLDatabaseOutputDataSourceProperties != nil { - objectMap["properties"] = asdods.AzureSQLDatabaseOutputDataSourceProperties - } - if asdods.Type != "" { - objectMap["type"] = asdods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return &asdods, true -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureSQLDatabaseOutputDataSource. -func (asdods AzureSQLDatabaseOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &asdods, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureSQLDatabaseOutputDataSource struct. -func (asdods *AzureSQLDatabaseOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureSQLDatabaseOutputDataSourceProperties AzureSQLDatabaseOutputDataSourceProperties - err = json.Unmarshal(*v, &azureSQLDatabaseOutputDataSourceProperties) - if err != nil { - return err - } - asdods.AzureSQLDatabaseOutputDataSourceProperties = &azureSQLDatabaseOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - asdods.Type = typeVar - } - } - } - - return nil -} - -// AzureSQLDatabaseOutputDataSourceProperties the properties that are associated with an Azure SQL database -// output. -type AzureSQLDatabaseOutputDataSourceProperties struct { - // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Server *string `json:"server,omitempty"` - // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Database *string `json:"database,omitempty"` - // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - User *string `json:"user,omitempty"` - // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Password *string `json:"password,omitempty"` - // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // MaxBatchCount - Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. - MaxBatchCount *float64 `json:"maxBatchCount,omitempty"` - // MaxWriterCount - Max Writer count, currently only 1(single writer) and 0(based on query partition) are available. Optional on PUT requests. - MaxWriterCount *float64 `json:"maxWriterCount,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// AzureSQLReferenceInputDataSource describes an Azure SQL database reference input data source. -type AzureSQLReferenceInputDataSource struct { - // AzureSQLReferenceInputDataSourceProperties - The properties that are associated with SQL DB input containing reference data. Required on PUT (CreateOrReplace) requests. - *AzureSQLReferenceInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureSQLReferenceInputDataSource. -func (asrids AzureSQLReferenceInputDataSource) MarshalJSON() ([]byte, error) { - asrids.Type = TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase - objectMap := make(map[string]interface{}) - if asrids.AzureSQLReferenceInputDataSourceProperties != nil { - objectMap["properties"] = asrids.AzureSQLReferenceInputDataSourceProperties - } - if asrids.Type != "" { - objectMap["type"] = asrids.Type - } - return json.Marshal(objectMap) -} - -// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. -func (asrids AzureSQLReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { - return &asrids, true -} - -// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. -func (asrids AzureSQLReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { - return nil, false -} - -// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. -func (asrids AzureSQLReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { - return nil, false -} - -// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for AzureSQLReferenceInputDataSource. -func (asrids AzureSQLReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { - return &asrids, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureSQLReferenceInputDataSource struct. -func (asrids *AzureSQLReferenceInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureSQLReferenceInputDataSourceProperties AzureSQLReferenceInputDataSourceProperties - err = json.Unmarshal(*v, &azureSQLReferenceInputDataSourceProperties) - if err != nil { - return err - } - asrids.AzureSQLReferenceInputDataSourceProperties = &azureSQLReferenceInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicReferenceInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - asrids.Type = typeVar - } - } - } - - return nil -} - -// AzureSQLReferenceInputDataSourceProperties the properties that are associated with SQL DB input -// containing reference data. Required on PUT (CreateOrReplace) requests. -type AzureSQLReferenceInputDataSourceProperties struct { - // Server - This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. - Server *string `json:"server,omitempty"` - // Database - This element is associated with the datasource element. This is the name of the database that output will be written to. - Database *string `json:"database,omitempty"` - // User - This element is associated with the datasource element. This is the user name that will be used to connect to the SQL Database instance. - User *string `json:"user,omitempty"` - // Password - This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. - Password *string `json:"password,omitempty"` - // Table - This element is associated with the datasource element. The name of the table in the Azure SQL database.. - Table *string `json:"table,omitempty"` - // RefreshType - Indicates the type of data refresh option. Possible values include: 'RefreshTypeStatic', 'RefreshTypeRefreshPeriodicallyWithFull', 'RefreshTypeRefreshPeriodicallyWithDelta' - RefreshType RefreshType `json:"refreshType,omitempty"` - // RefreshRate - This element is associated with the datasource element. This indicates how frequently the data will be fetched from the database. It is of DateTime format. - RefreshRate *string `json:"refreshRate,omitempty"` - // FullSnapshotQuery - This element is associated with the datasource element. This query is used to fetch data from the sql database. - FullSnapshotQuery *string `json:"fullSnapshotQuery,omitempty"` - // DeltaSnapshotQuery - This element is associated with the datasource element. This query is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. - DeltaSnapshotQuery *string `json:"deltaSnapshotQuery,omitempty"` -} - -// AzureSynapseDataSourceProperties the properties that are associated with an Azure SQL database data -// source. -type AzureSynapseDataSourceProperties struct { - // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Server *string `json:"server,omitempty"` - // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Database *string `json:"database,omitempty"` - // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - User *string `json:"user,omitempty"` - // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Password *string `json:"password,omitempty"` -} - -// AzureSynapseOutputDataSource describes an Azure Synapse output data source. -type AzureSynapseOutputDataSource struct { - // AzureSynapseOutputDataSourceProperties - The properties that are associated with an Azure Synapse output. Required on PUT (CreateOrReplace) requests. - *AzureSynapseOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) MarshalJSON() ([]byte, error) { - asods.Type = TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse - objectMap := make(map[string]interface{}) - if asods.AzureSynapseOutputDataSourceProperties != nil { - objectMap["properties"] = asods.AzureSynapseOutputDataSourceProperties - } - if asods.Type != "" { - objectMap["type"] = asods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return &asods, true -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureSynapseOutputDataSource. -func (asods AzureSynapseOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &asods, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureSynapseOutputDataSource struct. -func (asods *AzureSynapseOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureSynapseOutputDataSourceProperties AzureSynapseOutputDataSourceProperties - err = json.Unmarshal(*v, &azureSynapseOutputDataSourceProperties) - if err != nil { - return err - } - asods.AzureSynapseOutputDataSourceProperties = &azureSynapseOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - asods.Type = typeVar - } - } - } - - return nil -} - -// AzureSynapseOutputDataSourceProperties the properties that are associated with an Azure Synapse output. -type AzureSynapseOutputDataSourceProperties struct { - // Server - The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Server *string `json:"server,omitempty"` - // Database - The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Database *string `json:"database,omitempty"` - // Table - The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // User - The user name that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - User *string `json:"user,omitempty"` - // Password - The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. - Password *string `json:"password,omitempty"` -} - -// AzureTableOutputDataSource describes an Azure Table output data source. -type AzureTableOutputDataSource struct { - // AzureTableOutputDataSourceProperties - The properties that are associated with an Azure Table output. Required on PUT (CreateOrReplace) requests. - *AzureTableOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) MarshalJSON() ([]byte, error) { - atods.Type = TypeBasicOutputDataSourceTypeMicrosoftStorageTable - objectMap := make(map[string]interface{}) - if atods.AzureTableOutputDataSourceProperties != nil { - objectMap["properties"] = atods.AzureTableOutputDataSourceProperties - } - if atods.Type != "" { - objectMap["type"] = atods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return &atods, true -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for AzureTableOutputDataSource. -func (atods AzureTableOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &atods, true -} - -// UnmarshalJSON is the custom unmarshaler for AzureTableOutputDataSource struct. -func (atods *AzureTableOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var azureTableOutputDataSourceProperties AzureTableOutputDataSourceProperties - err = json.Unmarshal(*v, &azureTableOutputDataSourceProperties) - if err != nil { - return err - } - atods.AzureTableOutputDataSourceProperties = &azureTableOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - atods.Type = typeVar - } - } - } - - return nil -} - -// AzureTableOutputDataSourceProperties the properties that are associated with an Azure Table output. -type AzureTableOutputDataSourceProperties struct { - // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountName *string `json:"accountName,omitempty"` - // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountKey *string `json:"accountKey,omitempty"` - // Table - The name of the Azure Table. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // PartitionKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the partition key for the Azure Table. Required on PUT (CreateOrReplace) requests. - PartitionKey *string `json:"partitionKey,omitempty"` - // RowKey - This element indicates the name of a column from the SELECT statement in the query that will be used as the row key for the Azure Table. Required on PUT (CreateOrReplace) requests. - RowKey *string `json:"rowKey,omitempty"` - // ColumnsToRemove - If specified, each item in the array is the name of a column to remove (if present) from output event entities. - ColumnsToRemove *[]string `json:"columnsToRemove,omitempty"` - // BatchSize - The number of rows to write to the Azure Table at a time. - BatchSize *int32 `json:"batchSize,omitempty"` -} - -// BlobDataSourceProperties the properties that are associated with a blob data source. -type BlobDataSourceProperties struct { - // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` - // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - Container *string `json:"container,omitempty"` - // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. - PathPattern *string `json:"pathPattern,omitempty"` - // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - DateFormat *string `json:"dateFormat,omitempty"` - // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - TimeFormat *string `json:"timeFormat,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// BlobOutputDataSource describes a blob output data source. -type BlobOutputDataSource struct { - // BlobOutputDataSourceProperties - The properties that are associated with a blob output. Required on PUT (CreateOrReplace) requests. - *BlobOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for BlobOutputDataSource. -func (bods BlobOutputDataSource) MarshalJSON() ([]byte, error) { - bods.Type = TypeBasicOutputDataSourceTypeMicrosoftStorageBlob - objectMap := make(map[string]interface{}) - if bods.BlobOutputDataSourceProperties != nil { - objectMap["properties"] = bods.BlobOutputDataSourceProperties - } - if bods.Type != "" { - objectMap["type"] = bods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return &bods, true -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for BlobOutputDataSource. -func (bods BlobOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &bods, true -} - -// UnmarshalJSON is the custom unmarshaler for BlobOutputDataSource struct. -func (bods *BlobOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var blobOutputDataSourceProperties BlobOutputDataSourceProperties - err = json.Unmarshal(*v, &blobOutputDataSourceProperties) - if err != nil { - return err - } - bods.BlobOutputDataSourceProperties = &blobOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - bods.Type = typeVar - } - } - } - - return nil -} - -// BlobOutputDataSourceProperties the properties that are associated with a blob output. -type BlobOutputDataSourceProperties struct { - // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` - // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - Container *string `json:"container,omitempty"` - // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. - PathPattern *string `json:"pathPattern,omitempty"` - // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - DateFormat *string `json:"dateFormat,omitempty"` - // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - TimeFormat *string `json:"timeFormat,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// BlobReferenceInputDataSource describes a blob input data source that contains reference data. -type BlobReferenceInputDataSource struct { - // BlobReferenceInputDataSourceProperties - The properties that are associated with a blob input containing reference data. Required on PUT (CreateOrReplace) requests. - *BlobReferenceInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for BlobReferenceInputDataSource. -func (brids BlobReferenceInputDataSource) MarshalJSON() ([]byte, error) { - brids.Type = TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob - objectMap := make(map[string]interface{}) - if brids.BlobReferenceInputDataSourceProperties != nil { - objectMap["properties"] = brids.BlobReferenceInputDataSourceProperties - } - if brids.Type != "" { - objectMap["type"] = brids.Type - } - return json.Marshal(objectMap) -} - -// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. -func (brids BlobReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { - return nil, false -} - -// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. -func (brids BlobReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { - return &brids, true -} - -// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. -func (brids BlobReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { - return nil, false -} - -// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for BlobReferenceInputDataSource. -func (brids BlobReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { - return &brids, true -} - -// UnmarshalJSON is the custom unmarshaler for BlobReferenceInputDataSource struct. -func (brids *BlobReferenceInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var blobReferenceInputDataSourceProperties BlobReferenceInputDataSourceProperties - err = json.Unmarshal(*v, &blobReferenceInputDataSourceProperties) - if err != nil { - return err - } - brids.BlobReferenceInputDataSourceProperties = &blobReferenceInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicReferenceInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - brids.Type = typeVar - } - } - } - - return nil -} - -// BlobReferenceInputDataSourceProperties the properties that are associated with a blob input containing -// reference data. -type BlobReferenceInputDataSourceProperties struct { - // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` - // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - Container *string `json:"container,omitempty"` - // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. - PathPattern *string `json:"pathPattern,omitempty"` - // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - DateFormat *string `json:"dateFormat,omitempty"` - // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - TimeFormat *string `json:"timeFormat,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// BlobStreamInputDataSource describes a blob input data source that contains stream data. -type BlobStreamInputDataSource struct { - // BlobStreamInputDataSourceProperties - The properties that are associated with a blob input containing stream data. Required on PUT (CreateOrReplace) requests. - *BlobStreamInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicStreamInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) MarshalJSON() ([]byte, error) { - bsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob - objectMap := make(map[string]interface{}) - if bsids.BlobStreamInputDataSourceProperties != nil { - objectMap["properties"] = bsids.BlobStreamInputDataSourceProperties - } - if bsids.Type != "" { - objectMap["type"] = bsids.Type - } - return json.Marshal(objectMap) -} - -// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { - return nil, false -} - -// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { - return &bsids, true -} - -// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { - return nil, false -} - -// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for BlobStreamInputDataSource. -func (bsids BlobStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { - return &bsids, true -} - -// UnmarshalJSON is the custom unmarshaler for BlobStreamInputDataSource struct. -func (bsids *BlobStreamInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var blobStreamInputDataSourceProperties BlobStreamInputDataSourceProperties - err = json.Unmarshal(*v, &blobStreamInputDataSourceProperties) - if err != nil { - return err - } - bsids.BlobStreamInputDataSourceProperties = &blobStreamInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicStreamInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - bsids.Type = typeVar - } - } - } - - return nil -} - -// BlobStreamInputDataSourceProperties the properties that are associated with a blob input containing -// stream data. -type BlobStreamInputDataSourceProperties struct { - // SourcePartitionCount - The partition count of the blob input data source. Range 1 - 1024. - SourcePartitionCount *int32 `json:"sourcePartitionCount,omitempty"` - // StorageAccounts - A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. - StorageAccounts *[]StorageAccount `json:"storageAccounts,omitempty"` - // Container - The name of a container within the associated Storage account. This container contains either the blob(s) to be read from or written to. Required on PUT (CreateOrReplace) requests. - Container *string `json:"container,omitempty"` - // PathPattern - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and example. - PathPattern *string `json:"pathPattern,omitempty"` - // DateFormat - The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. - DateFormat *string `json:"dateFormat,omitempty"` - // TimeFormat - The time format. Wherever {time} appears in pathPattern, the value of this property is used as the time format instead. - TimeFormat *string `json:"timeFormat,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// Cluster a Stream Analytics Cluster object -type Cluster struct { - autorest.Response `json:"-"` - Sku *ClusterSku `json:"sku,omitempty"` - // Etag - READ-ONLY; The current entity tag for the cluster. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - // ClusterProperties - The properties associated with a Stream Analytics cluster. - *ClusterProperties `json:"properties,omitempty"` - // Tags - Resource tags. - Tags map[string]*string `json:"tags"` - // Location - The geo-location where the resource lives - Location *string `json:"location,omitempty"` - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Cluster. -func (c Cluster) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if c.Sku != nil { - objectMap["sku"] = c.Sku - } - if c.ClusterProperties != nil { - objectMap["properties"] = c.ClusterProperties - } - if c.Tags != nil { - objectMap["tags"] = c.Tags - } - if c.Location != nil { - objectMap["location"] = c.Location - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for Cluster struct. -func (c *Cluster) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "sku": - if v != nil { - var sku ClusterSku - err = json.Unmarshal(*v, &sku) - if err != nil { - return err - } - c.Sku = &sku - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - c.Etag = &etag - } - case "properties": - if v != nil { - var clusterProperties ClusterProperties - err = json.Unmarshal(*v, &clusterProperties) - if err != nil { - return err - } - c.ClusterProperties = &clusterProperties - } - case "tags": - if v != nil { - var tags map[string]*string - err = json.Unmarshal(*v, &tags) - if err != nil { - return err - } - c.Tags = tags - } - case "location": - if v != nil { - var location string - err = json.Unmarshal(*v, &location) - if err != nil { - return err - } - c.Location = &location - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - c.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - c.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - c.Type = &typeVar - } - } - } - - return nil -} - -// ClusterInfo the properties associated with a Stream Analytics cluster. -type ClusterInfo struct { - // ID - The resource id of cluster. - ID *string `json:"id,omitempty"` -} - -// ClusterJob a streaming job. -type ClusterJob struct { - // ID - READ-ONLY; Resource ID of the streaming job. - ID *string `json:"id,omitempty"` - // StreamingUnits - READ-ONLY; The number of streaming units that are used by the streaming job. - StreamingUnits *int32 `json:"streamingUnits,omitempty"` - // JobState - Possible values include: 'JobStateCreated', 'JobStateStarting', 'JobStateRunning', 'JobStateStopping', 'JobStateStopped', 'JobStateDeleting', 'JobStateFailed', 'JobStateDegraded', 'JobStateRestarting', 'JobStateScaling' - JobState JobState `json:"jobState,omitempty"` -} - -// MarshalJSON is the custom marshaler for ClusterJob. -func (cj ClusterJob) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if cj.JobState != "" { - objectMap["jobState"] = cj.JobState - } - return json.Marshal(objectMap) -} - -// ClusterJobListResult a list of streaming jobs. Populated by a List operation. -type ClusterJobListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of streaming jobs. - Value *[]ClusterJob `json:"value,omitempty"` - // NextLink - READ-ONLY; The URL to fetch the next set of streaming jobs. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for ClusterJobListResult. -func (cjlr ClusterJobListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// ClusterJobListResultIterator provides access to a complete listing of ClusterJob values. -type ClusterJobListResultIterator struct { - i int - page ClusterJobListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *ClusterJobListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClusterJobListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *ClusterJobListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter ClusterJobListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter ClusterJobListResultIterator) Response() ClusterJobListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter ClusterJobListResultIterator) Value() ClusterJob { - if !iter.page.NotDone() { - return ClusterJob{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the ClusterJobListResultIterator type. -func NewClusterJobListResultIterator(page ClusterJobListResultPage) ClusterJobListResultIterator { - return ClusterJobListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (cjlr ClusterJobListResult) IsEmpty() bool { - return cjlr.Value == nil || len(*cjlr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (cjlr ClusterJobListResult) hasNextLink() bool { - return cjlr.NextLink != nil && len(*cjlr.NextLink) != 0 -} - -// clusterJobListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (cjlr ClusterJobListResult) clusterJobListResultPreparer(ctx context.Context) (*http.Request, error) { - if !cjlr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(cjlr.NextLink))) -} - -// ClusterJobListResultPage contains a page of ClusterJob values. -type ClusterJobListResultPage struct { - fn func(context.Context, ClusterJobListResult) (ClusterJobListResult, error) - cjlr ClusterJobListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *ClusterJobListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClusterJobListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.cjlr) - if err != nil { - return err - } - page.cjlr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *ClusterJobListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page ClusterJobListResultPage) NotDone() bool { - return !page.cjlr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page ClusterJobListResultPage) Response() ClusterJobListResult { - return page.cjlr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page ClusterJobListResultPage) Values() []ClusterJob { - if page.cjlr.IsEmpty() { - return nil - } - return *page.cjlr.Value -} - -// Creates a new instance of the ClusterJobListResultPage type. -func NewClusterJobListResultPage(cur ClusterJobListResult, getNextPage func(context.Context, ClusterJobListResult) (ClusterJobListResult, error)) ClusterJobListResultPage { - return ClusterJobListResultPage{ - fn: getNextPage, - cjlr: cur, - } -} - -// ClusterListResult a list of clusters populated by a 'list' operation. -type ClusterListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of clusters. - Value *[]Cluster `json:"value,omitempty"` - // NextLink - READ-ONLY; The URL to fetch the next set of clusters. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for ClusterListResult. -func (clr ClusterListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// ClusterListResultIterator provides access to a complete listing of Cluster values. -type ClusterListResultIterator struct { - i int - page ClusterListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *ClusterListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClusterListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *ClusterListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter ClusterListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter ClusterListResultIterator) Response() ClusterListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter ClusterListResultIterator) Value() Cluster { - if !iter.page.NotDone() { - return Cluster{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the ClusterListResultIterator type. -func NewClusterListResultIterator(page ClusterListResultPage) ClusterListResultIterator { - return ClusterListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (clr ClusterListResult) IsEmpty() bool { - return clr.Value == nil || len(*clr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (clr ClusterListResult) hasNextLink() bool { - return clr.NextLink != nil && len(*clr.NextLink) != 0 -} - -// clusterListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (clr ClusterListResult) clusterListResultPreparer(ctx context.Context) (*http.Request, error) { - if !clr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(clr.NextLink))) -} - -// ClusterListResultPage contains a page of Cluster values. -type ClusterListResultPage struct { - fn func(context.Context, ClusterListResult) (ClusterListResult, error) - clr ClusterListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *ClusterListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/ClusterListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.clr) - if err != nil { - return err - } - page.clr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *ClusterListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page ClusterListResultPage) NotDone() bool { - return !page.clr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page ClusterListResultPage) Response() ClusterListResult { - return page.clr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page ClusterListResultPage) Values() []Cluster { - if page.clr.IsEmpty() { - return nil - } - return *page.clr.Value -} - -// Creates a new instance of the ClusterListResultPage type. -func NewClusterListResultPage(cur ClusterListResult, getNextPage func(context.Context, ClusterListResult) (ClusterListResult, error)) ClusterListResultPage { - return ClusterListResultPage{ - fn: getNextPage, - clr: cur, - } -} - -// ClusterProperties the properties associated with a Stream Analytics cluster. -type ClusterProperties struct { - // CreatedDate - READ-ONLY; The date this cluster was created. - CreatedDate *date.Time `json:"createdDate,omitempty"` - // ClusterID - READ-ONLY; Unique identifier for the cluster. - ClusterID *string `json:"clusterId,omitempty"` - // ProvisioningState - Possible values include: 'ClusterProvisioningStateSucceeded', 'ClusterProvisioningStateFailed', 'ClusterProvisioningStateCanceled', 'ClusterProvisioningStateInProgress' - ProvisioningState ClusterProvisioningState `json:"provisioningState,omitempty"` - // CapacityAllocated - READ-ONLY; Represents the number of streaming units currently being used on the cluster. - CapacityAllocated *int32 `json:"capacityAllocated,omitempty"` - // CapacityAssigned - READ-ONLY; Represents the sum of the SUs of all streaming jobs associated with the cluster. If all of the jobs were running, this would be the capacity allocated. - CapacityAssigned *int32 `json:"capacityAssigned,omitempty"` -} - -// MarshalJSON is the custom marshaler for ClusterProperties. -func (cp ClusterProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if cp.ProvisioningState != "" { - objectMap["provisioningState"] = cp.ProvisioningState - } - return json.Marshal(objectMap) -} - -// ClustersCreateOrUpdateFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type ClustersCreateOrUpdateFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(ClustersClient) (Cluster, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *ClustersCreateOrUpdateFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for ClustersCreateOrUpdateFuture.Result. -func (future *ClustersCreateOrUpdateFuture) result(client ClustersClient) (c Cluster, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersCreateOrUpdateFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - c.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersCreateOrUpdateFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if c.Response.Response, err = future.GetResult(sender); err == nil && c.Response.Response.StatusCode != http.StatusNoContent { - c, err = client.CreateOrUpdateResponder(c.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersCreateOrUpdateFuture", "Result", c.Response.Response, "Failure responding to request") - } - } - return -} - -// ClustersDeleteFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type ClustersDeleteFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(ClustersClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *ClustersDeleteFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for ClustersDeleteFuture.Result. -func (future *ClustersDeleteFuture) result(client ClustersClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersDeleteFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersDeleteFuture") - return - } - ar.Response = future.Response() - return -} - -// ClusterSku the SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT -// (CreateOrUpdate) requests. -type ClusterSku struct { - // Name - Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. Possible values include: 'ClusterSkuNameDefault' - Name ClusterSkuName `json:"name,omitempty"` - // Capacity - Denotes the number of streaming units the cluster can support. Valid values for this property are multiples of 36 with a minimum value of 36 and maximum value of 216. Required on PUT (CreateOrUpdate) requests. - Capacity *int32 `json:"capacity,omitempty"` -} - -// ClustersUpdateFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type ClustersUpdateFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(ClustersClient) (Cluster, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *ClustersUpdateFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for ClustersUpdateFuture.Result. -func (future *ClustersUpdateFuture) result(client ClustersClient) (c Cluster, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersUpdateFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - c.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.ClustersUpdateFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if c.Response.Response, err = future.GetResult(sender); err == nil && c.Response.Response.StatusCode != http.StatusNoContent { - c, err = client.UpdateResponder(c.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.ClustersUpdateFuture", "Result", c.Response.Response, "Failure responding to request") - } - } - return -} - -// Compression describes how input data is compressed -type Compression struct { - // Type - Indicates the type of compression that the input uses. Required on PUT (CreateOrReplace) requests. Possible values include: 'CompressionTypeNone', 'CompressionTypeGZip', 'CompressionTypeDeflate' - Type CompressionType `json:"type,omitempty"` -} - -// CsvSerialization describes how data from an input is serialized or how data is serialized when written -// to an output in CSV format. -type CsvSerialization struct { - // CsvSerializationProperties - The properties that are associated with the CSV serialization type. Required on PUT (CreateOrReplace) requests. - *CsvSerializationProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv', 'TypeParquet' - Type Type `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for CsvSerialization. -func (cs CsvSerialization) MarshalJSON() ([]byte, error) { - cs.Type = TypeCsv - objectMap := make(map[string]interface{}) - if cs.CsvSerializationProperties != nil { - objectMap["properties"] = cs.CsvSerializationProperties - } - if cs.Type != "" { - objectMap["type"] = cs.Type - } - return json.Marshal(objectMap) -} - -// AsAvroSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsAvroSerialization() (*AvroSerialization, bool) { - return nil, false -} - -// AsJSONSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsJSONSerialization() (*JSONSerialization, bool) { - return nil, false -} - -// AsCsvSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsCsvSerialization() (*CsvSerialization, bool) { - return &cs, true -} - -// AsParquetSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { - return nil, false -} - -// AsSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsSerialization() (*Serialization, bool) { - return nil, false -} - -// AsBasicSerialization is the BasicSerialization implementation for CsvSerialization. -func (cs CsvSerialization) AsBasicSerialization() (BasicSerialization, bool) { - return &cs, true -} - -// UnmarshalJSON is the custom unmarshaler for CsvSerialization struct. -func (cs *CsvSerialization) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var csvSerializationProperties CsvSerializationProperties - err = json.Unmarshal(*v, &csvSerializationProperties) - if err != nil { - return err - } - cs.CsvSerializationProperties = &csvSerializationProperties - } - case "type": - if v != nil { - var typeVar Type - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - cs.Type = typeVar - } - } - } - - return nil -} - -// CsvSerializationProperties the properties that are associated with the CSV serialization type. -type CsvSerializationProperties struct { - // FieldDelimiter - Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of supported values. Required on PUT (CreateOrReplace) requests. - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'EncodingUTF8' - Encoding Encoding `json:"encoding,omitempty"` -} - -// DiagnosticCondition condition applicable to the resource, or to the job overall, that warrant customer -// attention. -type DiagnosticCondition struct { - // Since - READ-ONLY; The UTC timestamp of when the condition started. Customers should be able to find a corresponding event in the ops log around this time. - Since *string `json:"since,omitempty"` - // Code - READ-ONLY; The opaque diagnostic code. - Code *string `json:"code,omitempty"` - // Message - READ-ONLY; The human-readable message describing the condition in detail. Localized in the Accept-Language of the client request. - Message *string `json:"message,omitempty"` -} - -// MarshalJSON is the custom marshaler for DiagnosticCondition. -func (dc DiagnosticCondition) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// Diagnostics describes conditions applicable to the Input, Output, or the job overall, that warrant -// customer attention. -type Diagnostics struct { - // Conditions - READ-ONLY; A collection of zero or more conditions applicable to the resource, or to the job overall, that warrant customer attention. - Conditions *[]DiagnosticCondition `json:"conditions,omitempty"` -} - -// MarshalJSON is the custom marshaler for Diagnostics. -func (d Diagnostics) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// DocumentDbOutputDataSource describes a DocumentDB output data source. -type DocumentDbOutputDataSource struct { - // DocumentDbOutputDataSourceProperties - The properties that are associated with a DocumentDB output. Required on PUT (CreateOrReplace) requests. - *DocumentDbOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) MarshalJSON() ([]byte, error) { - ddods.Type = TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB - objectMap := make(map[string]interface{}) - if ddods.DocumentDbOutputDataSourceProperties != nil { - objectMap["properties"] = ddods.DocumentDbOutputDataSourceProperties - } - if ddods.Type != "" { - objectMap["type"] = ddods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return &ddods, true -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for DocumentDbOutputDataSource. -func (ddods DocumentDbOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &ddods, true -} - -// UnmarshalJSON is the custom unmarshaler for DocumentDbOutputDataSource struct. -func (ddods *DocumentDbOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var documentDbOutputDataSourceProperties DocumentDbOutputDataSourceProperties - err = json.Unmarshal(*v, &documentDbOutputDataSourceProperties) - if err != nil { - return err - } - ddods.DocumentDbOutputDataSourceProperties = &documentDbOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ddods.Type = typeVar - } - } - } - - return nil -} - -// DocumentDbOutputDataSourceProperties the properties that are associated with a DocumentDB output. -type DocumentDbOutputDataSourceProperties struct { - // AccountID - The DocumentDB account name or ID. Required on PUT (CreateOrReplace) requests. - AccountID *string `json:"accountId,omitempty"` - // AccountKey - The account key for the DocumentDB account. Required on PUT (CreateOrReplace) requests. - AccountKey *string `json:"accountKey,omitempty"` - // Database - The name of the DocumentDB database. Required on PUT (CreateOrReplace) requests. - Database *string `json:"database,omitempty"` - // CollectionNamePattern - The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT (CreateOrReplace) requests. - CollectionNamePattern *string `json:"collectionNamePattern,omitempty"` - // PartitionKey - The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} token, this property is required to be specified. - PartitionKey *string `json:"partitionKey,omitempty"` - // DocumentID - The name of the field in output events used to specify the primary key which insert or update operations are based on. - DocumentID *string `json:"documentId,omitempty"` -} - -// Error common error representation. -type Error struct { - // Error - Error definition properties. - Error *ErrorError `json:"error,omitempty"` -} - -// ErrorDetails common error details representation. -type ErrorDetails struct { - // Code - Error code. - Code *string `json:"code,omitempty"` - // Target - Error target. - Target *string `json:"target,omitempty"` - // Message - Error message. - Message *string `json:"message,omitempty"` -} - -// ErrorError error definition properties. -type ErrorError struct { - // Code - Error code. - Code *string `json:"code,omitempty"` - // Message - Error message. - Message *string `json:"message,omitempty"` - // Target - Error target. - Target *string `json:"target,omitempty"` - // Details - Error details. - Details *[]ErrorDetails `json:"details,omitempty"` -} - -// ErrorResponse describes the error that occurred. -type ErrorResponse struct { - // Code - READ-ONLY; Error code associated with the error that occurred. - Code *string `json:"code,omitempty"` - // Message - READ-ONLY; Describes the error in detail. - Message *string `json:"message,omitempty"` -} - -// MarshalJSON is the custom marshaler for ErrorResponse. -func (er ErrorResponse) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// EventHubDataSourceProperties the common properties that are associated with Event Hub data sources. -type EventHubDataSourceProperties struct { - // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - EventHubName *string `json:"eventHubName,omitempty"` - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// EventHubOutputDataSource describes an Event Hub output data source. -type EventHubOutputDataSource struct { - // EventHubOutputDataSourceProperties - The properties that are associated with an Event Hub output. Required on PUT (CreateOrReplace) requests. - *EventHubOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) MarshalJSON() ([]byte, error) { - ehods.Type = TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub - objectMap := make(map[string]interface{}) - if ehods.EventHubOutputDataSourceProperties != nil { - objectMap["properties"] = ehods.EventHubOutputDataSourceProperties - } - if ehods.Type != "" { - objectMap["type"] = ehods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return &ehods, true -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for EventHubOutputDataSource. -func (ehods EventHubOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &ehods, true -} - -// UnmarshalJSON is the custom unmarshaler for EventHubOutputDataSource struct. -func (ehods *EventHubOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var eventHubOutputDataSourceProperties EventHubOutputDataSourceProperties - err = json.Unmarshal(*v, &eventHubOutputDataSourceProperties) - if err != nil { - return err - } - ehods.EventHubOutputDataSourceProperties = &eventHubOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ehods.Type = typeVar - } - } - } - - return nil -} - -// EventHubOutputDataSourceProperties the properties that are associated with an Event Hub output. -type EventHubOutputDataSourceProperties struct { - // PartitionKey - The key/column that is used to determine to which partition to send event data. - PartitionKey *string `json:"partitionKey,omitempty"` - // PropertyColumns - The properties associated with this Event Hub output. - PropertyColumns *[]string `json:"propertyColumns,omitempty"` - // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - EventHubName *string `json:"eventHubName,omitempty"` - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// EventHubStreamInputDataSource describes an Event Hub input data source that contains stream data. -type EventHubStreamInputDataSource struct { - // EventHubStreamInputDataSourceProperties - The properties that are associated with an Event Hub input containing stream data. Required on PUT (CreateOrReplace) requests. - *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicStreamInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) MarshalJSON() ([]byte, error) { - ehsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub - objectMap := make(map[string]interface{}) - if ehsids.EventHubStreamInputDataSourceProperties != nil { - objectMap["properties"] = ehsids.EventHubStreamInputDataSourceProperties - } - if ehsids.Type != "" { - objectMap["type"] = ehsids.Type - } - return json.Marshal(objectMap) -} - -// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { - return &ehsids, true -} - -// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { - return nil, false -} - -// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { - return nil, false -} - -// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubStreamInputDataSource. -func (ehsids EventHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { - return &ehsids, true -} - -// UnmarshalJSON is the custom unmarshaler for EventHubStreamInputDataSource struct. -func (ehsids *EventHubStreamInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var eventHubStreamInputDataSourceProperties EventHubStreamInputDataSourceProperties - err = json.Unmarshal(*v, &eventHubStreamInputDataSourceProperties) - if err != nil { - return err - } - ehsids.EventHubStreamInputDataSourceProperties = &eventHubStreamInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicStreamInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ehsids.Type = typeVar - } - } - } - - return nil -} - -// EventHubStreamInputDataSourceProperties the properties that are associated with a Event Hub input -// containing stream data. -type EventHubStreamInputDataSourceProperties struct { - // ConsumerGroupName - The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. - ConsumerGroupName *string `json:"consumerGroupName,omitempty"` - // EventHubName - The name of the Event Hub. Required on PUT (CreateOrReplace) requests. - EventHubName *string `json:"eventHubName,omitempty"` - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// EventHubV2OutputDataSource describes an Event Hub output data source. -type EventHubV2OutputDataSource struct { - // EventHubOutputDataSourceProperties - The properties that are associated with an Event Hub output. Required on PUT (CreateOrReplace) requests. - *EventHubOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) MarshalJSON() ([]byte, error) { - ehvods.Type = TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub - objectMap := make(map[string]interface{}) - if ehvods.EventHubOutputDataSourceProperties != nil { - objectMap["properties"] = ehvods.EventHubOutputDataSourceProperties - } - if ehvods.Type != "" { - objectMap["type"] = ehvods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return &ehvods, true -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for EventHubV2OutputDataSource. -func (ehvods EventHubV2OutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &ehvods, true -} - -// UnmarshalJSON is the custom unmarshaler for EventHubV2OutputDataSource struct. -func (ehvods *EventHubV2OutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var eventHubOutputDataSourceProperties EventHubOutputDataSourceProperties - err = json.Unmarshal(*v, &eventHubOutputDataSourceProperties) - if err != nil { - return err - } - ehvods.EventHubOutputDataSourceProperties = &eventHubOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ehvods.Type = typeVar - } - } - } - - return nil -} - -// EventHubV2StreamInputDataSource describes an Event Hub input data source that contains stream data. -type EventHubV2StreamInputDataSource struct { - // EventHubStreamInputDataSourceProperties - The properties that are associated with an Event Hub input containing stream data. Required on PUT (CreateOrReplace) requests. - *EventHubStreamInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicStreamInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) MarshalJSON() ([]byte, error) { - ehvsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub - objectMap := make(map[string]interface{}) - if ehvsids.EventHubStreamInputDataSourceProperties != nil { - objectMap["properties"] = ehvsids.EventHubStreamInputDataSourceProperties - } - if ehvsids.Type != "" { - objectMap["type"] = ehvsids.Type - } - return json.Marshal(objectMap) -} - -// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { - return &ehvsids, true -} - -// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { - return nil, false -} - -// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { - return nil, false -} - -// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { - return nil, false -} - -// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for EventHubV2StreamInputDataSource. -func (ehvsids EventHubV2StreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { - return &ehvsids, true -} - -// UnmarshalJSON is the custom unmarshaler for EventHubV2StreamInputDataSource struct. -func (ehvsids *EventHubV2StreamInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var eventHubStreamInputDataSourceProperties EventHubStreamInputDataSourceProperties - err = json.Unmarshal(*v, &eventHubStreamInputDataSourceProperties) - if err != nil { - return err - } - ehvsids.EventHubStreamInputDataSourceProperties = &eventHubStreamInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicStreamInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ehvsids.Type = typeVar - } - } - } - - return nil -} - -// Function a function object, containing all information associated with the named function. All functions -// are contained under a streaming job. -type Function struct { - autorest.Response `json:"-"` - // Properties - The properties that are associated with a function. - Properties BasicFunctionProperties `json:"properties,omitempty"` - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Function. -func (f Function) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - objectMap["properties"] = f.Properties - if f.Name != nil { - objectMap["name"] = f.Name - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for Function struct. -func (f *Function) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - properties, err := unmarshalBasicFunctionProperties(*v) - if err != nil { - return err - } - f.Properties = properties - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - f.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - f.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - f.Type = &typeVar - } - } - } - - return nil -} - -// BasicFunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web service’s -// case, this describes the endpoint. -type BasicFunctionBinding interface { - AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) - AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) - AsFunctionBinding() (*FunctionBinding, bool) -} - -// FunctionBinding the physical binding of the function. For example, in the Azure Machine Learning web -// service’s case, this describes the endpoint. -type FunctionBinding struct { - // Type - Possible values include: 'TypeBasicFunctionBindingTypeFunctionBinding', 'TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService' - Type TypeBasicFunctionBinding `json:"type,omitempty"` -} - -func unmarshalBasicFunctionBinding(body []byte) (BasicFunctionBinding, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf): - var jsfb JavaScriptFunctionBinding - err := json.Unmarshal(body, &jsfb) - return jsfb, err - case string(TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService): - var amlwsfb AzureMachineLearningWebServiceFunctionBinding - err := json.Unmarshal(body, &amlwsfb) - return amlwsfb, err - default: - var fb FunctionBinding - err := json.Unmarshal(body, &fb) - return fb, err - } -} -func unmarshalBasicFunctionBindingArray(body []byte) ([]BasicFunctionBinding, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - fbArray := make([]BasicFunctionBinding, len(rawMessages)) - - for index, rawMessage := range rawMessages { - fb, err := unmarshalBasicFunctionBinding(*rawMessage) - if err != nil { - return nil, err - } - fbArray[index] = fb - } - return fbArray, nil -} - -// MarshalJSON is the custom marshaler for FunctionBinding. -func (fb FunctionBinding) MarshalJSON() ([]byte, error) { - fb.Type = TypeBasicFunctionBindingTypeFunctionBinding - objectMap := make(map[string]interface{}) - if fb.Type != "" { - objectMap["type"] = fb.Type - } - return json.Marshal(objectMap) -} - -// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. -func (fb FunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { - return nil, false -} - -// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. -func (fb FunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { - return nil, false -} - -// AsFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. -func (fb FunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { - return &fb, true -} - -// AsBasicFunctionBinding is the BasicFunctionBinding implementation for FunctionBinding. -func (fb FunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { - return &fb, true -} - -// FunctionConfiguration ... -type FunctionConfiguration struct { - Inputs *[]FunctionInput `json:"inputs,omitempty"` - Output *FunctionOutput `json:"output,omitempty"` - Binding BasicFunctionBinding `json:"binding,omitempty"` -} - -// UnmarshalJSON is the custom unmarshaler for FunctionConfiguration struct. -func (fc *FunctionConfiguration) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "inputs": - if v != nil { - var inputs []FunctionInput - err = json.Unmarshal(*v, &inputs) - if err != nil { - return err - } - fc.Inputs = &inputs - } - case "output": - if v != nil { - var output FunctionOutput - err = json.Unmarshal(*v, &output) - if err != nil { - return err - } - fc.Output = &output - } - case "binding": - if v != nil { - binding, err := unmarshalBasicFunctionBinding(*v) - if err != nil { - return err - } - fc.Binding = binding - } - } - } - - return nil -} - -// FunctionInput describes one input parameter of a function. -type FunctionInput struct { - // DataType - The (Azure Stream Analytics supported) data type of the function input parameter. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx - DataType *string `json:"dataType,omitempty"` - // IsConfigurationParameter - A flag indicating if the parameter is a configuration parameter. True if this input parameter is expected to be a constant. Default is false. - IsConfigurationParameter *bool `json:"isConfigurationParameter,omitempty"` -} - -// FunctionListResult object containing a list of functions under a streaming job. -type FunctionListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of functions under a streaming job. Populated by a 'List' operation. - Value *[]Function `json:"value,omitempty"` - // NextLink - READ-ONLY; The link (url) to the next page of results. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for FunctionListResult. -func (flr FunctionListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// FunctionListResultIterator provides access to a complete listing of Function values. -type FunctionListResultIterator struct { - i int - page FunctionListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *FunctionListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *FunctionListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter FunctionListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter FunctionListResultIterator) Response() FunctionListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter FunctionListResultIterator) Value() Function { - if !iter.page.NotDone() { - return Function{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the FunctionListResultIterator type. -func NewFunctionListResultIterator(page FunctionListResultPage) FunctionListResultIterator { - return FunctionListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (flr FunctionListResult) IsEmpty() bool { - return flr.Value == nil || len(*flr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (flr FunctionListResult) hasNextLink() bool { - return flr.NextLink != nil && len(*flr.NextLink) != 0 -} - -// functionListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (flr FunctionListResult) functionListResultPreparer(ctx context.Context) (*http.Request, error) { - if !flr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(flr.NextLink))) -} - -// FunctionListResultPage contains a page of Function values. -type FunctionListResultPage struct { - fn func(context.Context, FunctionListResult) (FunctionListResult, error) - flr FunctionListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *FunctionListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/FunctionListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.flr) - if err != nil { - return err - } - page.flr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *FunctionListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page FunctionListResultPage) NotDone() bool { - return !page.flr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page FunctionListResultPage) Response() FunctionListResult { - return page.flr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page FunctionListResultPage) Values() []Function { - if page.flr.IsEmpty() { - return nil - } - return *page.flr.Value -} - -// Creates a new instance of the FunctionListResultPage type. -func NewFunctionListResultPage(cur FunctionListResult, getNextPage func(context.Context, FunctionListResult) (FunctionListResult, error)) FunctionListResultPage { - return FunctionListResultPage{ - fn: getNextPage, - flr: cur, - } -} - -// FunctionOutput describes the output of a function. -type FunctionOutput struct { - // DataType - The (Azure Stream Analytics supported) data type of the function output. A list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx - DataType *string `json:"dataType,omitempty"` -} - -// BasicFunctionProperties the properties that are associated with a function. -type BasicFunctionProperties interface { - AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) - AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) - AsFunctionProperties() (*FunctionProperties, bool) -} - -// FunctionProperties the properties that are associated with a function. -type FunctionProperties struct { - // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - *FunctionConfiguration `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicFunctionPropertiesTypeFunctionProperties', 'TypeBasicFunctionPropertiesTypeAggregate', 'TypeBasicFunctionPropertiesTypeScalar' - Type TypeBasicFunctionProperties `json:"type,omitempty"` -} - -func unmarshalBasicFunctionProperties(body []byte) (BasicFunctionProperties, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicFunctionPropertiesTypeAggregate): - var afp AggregateFunctionProperties - err := json.Unmarshal(body, &afp) - return afp, err - case string(TypeBasicFunctionPropertiesTypeScalar): - var sfp ScalarFunctionProperties - err := json.Unmarshal(body, &sfp) - return sfp, err - default: - var fp FunctionProperties - err := json.Unmarshal(body, &fp) - return fp, err - } -} -func unmarshalBasicFunctionPropertiesArray(body []byte) ([]BasicFunctionProperties, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - fpArray := make([]BasicFunctionProperties, len(rawMessages)) - - for index, rawMessage := range rawMessages { - fp, err := unmarshalBasicFunctionProperties(*rawMessage) - if err != nil { - return nil, err - } - fpArray[index] = fp - } - return fpArray, nil -} - -// MarshalJSON is the custom marshaler for FunctionProperties. -func (fp FunctionProperties) MarshalJSON() ([]byte, error) { - fp.Type = TypeBasicFunctionPropertiesTypeFunctionProperties - objectMap := make(map[string]interface{}) - if fp.FunctionConfiguration != nil { - objectMap["properties"] = fp.FunctionConfiguration - } - if fp.Type != "" { - objectMap["type"] = fp.Type - } - return json.Marshal(objectMap) -} - -// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. -func (fp FunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { - return nil, false -} - -// AsScalarFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. -func (fp FunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { - return nil, false -} - -// AsFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. -func (fp FunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { - return &fp, true -} - -// AsBasicFunctionProperties is the BasicFunctionProperties implementation for FunctionProperties. -func (fp FunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { - return &fp, true -} - -// UnmarshalJSON is the custom unmarshaler for FunctionProperties struct. -func (fp *FunctionProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - fp.Etag = &etag - } - case "properties": - if v != nil { - var functionConfiguration FunctionConfiguration - err = json.Unmarshal(*v, &functionConfiguration) - if err != nil { - return err - } - fp.FunctionConfiguration = &functionConfiguration - } - case "type": - if v != nil { - var typeVar TypeBasicFunctionProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - fp.Type = typeVar - } - } - } - - return nil -} - -// BasicFunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the -// default definition for. -type BasicFunctionRetrieveDefaultDefinitionParameters interface { - AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) - AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) - AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) -} - -// FunctionRetrieveDefaultDefinitionParameters parameters used to specify the type of function to retrieve the -// default definition for. -type FunctionRetrieveDefaultDefinitionParameters struct { - // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' - BindingType BindingType `json:"bindingType,omitempty"` -} - -func unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(body []byte) (BasicFunctionRetrieveDefaultDefinitionParameters, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["bindingType"] { - case string(BindingTypeMicrosoftMachineLearningWebService): - var amlwsfrddp AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters - err := json.Unmarshal(body, &amlwsfrddp) - return amlwsfrddp, err - case string(BindingTypeMicrosoftStreamAnalyticsJavascriptUdf): - var jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters - err := json.Unmarshal(body, &jsfrddp) - return jsfrddp, err - default: - var frddp FunctionRetrieveDefaultDefinitionParameters - err := json.Unmarshal(body, &frddp) - return frddp, err - } -} -func unmarshalBasicFunctionRetrieveDefaultDefinitionParametersArray(body []byte) ([]BasicFunctionRetrieveDefaultDefinitionParameters, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - frddpArray := make([]BasicFunctionRetrieveDefaultDefinitionParameters, len(rawMessages)) - - for index, rawMessage := range rawMessages { - frddp, err := unmarshalBasicFunctionRetrieveDefaultDefinitionParameters(*rawMessage) - if err != nil { - return nil, err - } - frddpArray[index] = frddp - } - return frddpArray, nil -} - -// MarshalJSON is the custom marshaler for FunctionRetrieveDefaultDefinitionParameters. -func (frddp FunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { - frddp.BindingType = BindingTypeFunctionRetrieveDefaultDefinitionParameters - objectMap := make(map[string]interface{}) - if frddp.BindingType != "" { - objectMap["bindingType"] = frddp.BindingType - } - return json.Marshal(objectMap) -} - -// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. -func (frddp FunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. -func (frddp FunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. -func (frddp FunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { - return &frddp, true -} - -// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for FunctionRetrieveDefaultDefinitionParameters. -func (frddp FunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { - return &frddp, true -} - -// FunctionsTestFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type FunctionsTestFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(FunctionsClient) (ResourceTestStatus, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *FunctionsTestFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for FunctionsTestFuture.Result. -func (future *FunctionsTestFuture) result(client FunctionsClient) (rts ResourceTestStatus, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - rts.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.FunctionsTestFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { - rts, err = client.TestResponder(rts.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.FunctionsTestFuture", "Result", rts.Response.Response, "Failure responding to request") - } - } - return -} - -// Identity describes how identity is verified -type Identity struct { - // TenantID - The identity tenantId - TenantID *string `json:"tenantId,omitempty"` - // PrincipalID - The identity principal ID - PrincipalID *string `json:"principalId,omitempty"` - // Type - The identity type - Type *string `json:"type,omitempty"` -} - -// Input an input object, containing all information associated with the named input. All inputs are -// contained under a streaming job. -type Input struct { - autorest.Response `json:"-"` - // Properties - The properties that are associated with an input. Required on PUT (CreateOrReplace) requests. - Properties BasicInputProperties `json:"properties,omitempty"` - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Input. -func (i Input) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - objectMap["properties"] = i.Properties - if i.Name != nil { - objectMap["name"] = i.Name - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for Input struct. -func (i *Input) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - properties, err := unmarshalBasicInputProperties(*v) - if err != nil { - return err - } - i.Properties = properties - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - i.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - i.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - i.Type = &typeVar - } - } - } - - return nil -} - -// InputListResult object containing a list of inputs under a streaming job. -type InputListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of inputs under a streaming job. Populated by a 'List' operation. - Value *[]Input `json:"value,omitempty"` - // NextLink - READ-ONLY; The link (url) to the next page of results. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for InputListResult. -func (ilr InputListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// InputListResultIterator provides access to a complete listing of Input values. -type InputListResultIterator struct { - i int - page InputListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *InputListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *InputListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter InputListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter InputListResultIterator) Response() InputListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter InputListResultIterator) Value() Input { - if !iter.page.NotDone() { - return Input{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the InputListResultIterator type. -func NewInputListResultIterator(page InputListResultPage) InputListResultIterator { - return InputListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (ilr InputListResult) IsEmpty() bool { - return ilr.Value == nil || len(*ilr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (ilr InputListResult) hasNextLink() bool { - return ilr.NextLink != nil && len(*ilr.NextLink) != 0 -} - -// inputListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (ilr InputListResult) inputListResultPreparer(ctx context.Context) (*http.Request, error) { - if !ilr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(ilr.NextLink))) -} - -// InputListResultPage contains a page of Input values. -type InputListResultPage struct { - fn func(context.Context, InputListResult) (InputListResult, error) - ilr InputListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *InputListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/InputListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.ilr) - if err != nil { - return err - } - page.ilr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *InputListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page InputListResultPage) NotDone() bool { - return !page.ilr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page InputListResultPage) Response() InputListResult { - return page.ilr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page InputListResultPage) Values() []Input { - if page.ilr.IsEmpty() { - return nil - } - return *page.ilr.Value -} - -// Creates a new instance of the InputListResultPage type. -func NewInputListResultPage(cur InputListResult, getNextPage func(context.Context, InputListResult) (InputListResult, error)) InputListResultPage { - return InputListResultPage{ - fn: getNextPage, - ilr: cur, - } -} - -// BasicInputProperties the properties that are associated with an input. -type BasicInputProperties interface { - AsReferenceInputProperties() (*ReferenceInputProperties, bool) - AsStreamInputProperties() (*StreamInputProperties, bool) - AsInputProperties() (*InputProperties, bool) -} - -// InputProperties the properties that are associated with an input. -type InputProperties struct { - // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - Serialization BasicSerialization `json:"serialization,omitempty"` - // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. - Diagnostics *Diagnostics `json:"diagnostics,omitempty"` - // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - // Compression - Describes how input data is compressed - Compression *Compression `json:"compression,omitempty"` - // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data - PartitionKey *string `json:"partitionKey,omitempty"` - // Type - Possible values include: 'TypeBasicInputPropertiesTypeInputProperties', 'TypeBasicInputPropertiesTypeReference', 'TypeBasicInputPropertiesTypeStream' - Type TypeBasicInputProperties `json:"type,omitempty"` -} - -func unmarshalBasicInputProperties(body []byte) (BasicInputProperties, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicInputPropertiesTypeReference): - var rip ReferenceInputProperties - err := json.Unmarshal(body, &rip) - return rip, err - case string(TypeBasicInputPropertiesTypeStream): - var sip StreamInputProperties - err := json.Unmarshal(body, &sip) - return sip, err - default: - var IP InputProperties - err := json.Unmarshal(body, &IP) - return IP, err - } -} -func unmarshalBasicInputPropertiesArray(body []byte) ([]BasicInputProperties, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - IPArray := make([]BasicInputProperties, len(rawMessages)) - - for index, rawMessage := range rawMessages { - IP, err := unmarshalBasicInputProperties(*rawMessage) - if err != nil { - return nil, err - } - IPArray[index] = IP - } - return IPArray, nil -} - -// MarshalJSON is the custom marshaler for InputProperties. -func (IP InputProperties) MarshalJSON() ([]byte, error) { - IP.Type = TypeBasicInputPropertiesTypeInputProperties - objectMap := make(map[string]interface{}) - objectMap["serialization"] = IP.Serialization - if IP.Compression != nil { - objectMap["compression"] = IP.Compression - } - if IP.PartitionKey != nil { - objectMap["partitionKey"] = IP.PartitionKey - } - if IP.Type != "" { - objectMap["type"] = IP.Type - } - return json.Marshal(objectMap) -} - -// AsReferenceInputProperties is the BasicInputProperties implementation for InputProperties. -func (IP InputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { - return nil, false -} - -// AsStreamInputProperties is the BasicInputProperties implementation for InputProperties. -func (IP InputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { - return nil, false -} - -// AsInputProperties is the BasicInputProperties implementation for InputProperties. -func (IP InputProperties) AsInputProperties() (*InputProperties, bool) { - return &IP, true -} - -// AsBasicInputProperties is the BasicInputProperties implementation for InputProperties. -func (IP InputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { - return &IP, true -} - -// UnmarshalJSON is the custom unmarshaler for InputProperties struct. -func (IP *InputProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "serialization": - if v != nil { - serialization, err := unmarshalBasicSerialization(*v) - if err != nil { - return err - } - IP.Serialization = serialization - } - case "diagnostics": - if v != nil { - var diagnostics Diagnostics - err = json.Unmarshal(*v, &diagnostics) - if err != nil { - return err - } - IP.Diagnostics = &diagnostics - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - IP.Etag = &etag - } - case "compression": - if v != nil { - var compression Compression - err = json.Unmarshal(*v, &compression) - if err != nil { - return err - } - IP.Compression = &compression - } - case "partitionKey": - if v != nil { - var partitionKey string - err = json.Unmarshal(*v, &partitionKey) - if err != nil { - return err - } - IP.PartitionKey = &partitionKey - } - case "type": - if v != nil { - var typeVar TypeBasicInputProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - IP.Type = typeVar - } - } - } - - return nil -} - -// InputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. -type InputsTestFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(InputsClient) (ResourceTestStatus, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *InputsTestFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for InputsTestFuture.Result. -func (future *InputsTestFuture) result(client InputsClient) (rts ResourceTestStatus, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - rts.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.InputsTestFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { - rts, err = client.TestResponder(rts.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.InputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") - } - } - return -} - -// IoTHubStreamInputDataSource describes an IoT Hub input data source that contains stream data. -type IoTHubStreamInputDataSource struct { - // IoTHubStreamInputDataSourceProperties - The properties that are associated with an IoT Hub input containing stream data. Required on PUT (CreateOrReplace) requests. - *IoTHubStreamInputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicStreamInputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) MarshalJSON() ([]byte, error) { - ithsids.Type = TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs - objectMap := make(map[string]interface{}) - if ithsids.IoTHubStreamInputDataSourceProperties != nil { - objectMap["properties"] = ithsids.IoTHubStreamInputDataSourceProperties - } - if ithsids.Type != "" { - objectMap["type"] = ithsids.Type - } - return json.Marshal(objectMap) -} - -// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { - return &ithsids, true -} - -// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { - return nil, false -} - -// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { - return nil, false -} - -// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { - return nil, false -} - -// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for IoTHubStreamInputDataSource. -func (ithsids IoTHubStreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { - return &ithsids, true -} - -// UnmarshalJSON is the custom unmarshaler for IoTHubStreamInputDataSource struct. -func (ithsids *IoTHubStreamInputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var ioTHubStreamInputDataSourceProperties IoTHubStreamInputDataSourceProperties - err = json.Unmarshal(*v, &ioTHubStreamInputDataSourceProperties) - if err != nil { - return err - } - ithsids.IoTHubStreamInputDataSourceProperties = &ioTHubStreamInputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicStreamInputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - ithsids.Type = typeVar - } - } - } - - return nil -} - -// IoTHubStreamInputDataSourceProperties the properties that are associated with a IoT Hub input containing -// stream data. -type IoTHubStreamInputDataSourceProperties struct { - // IotHubNamespace - The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) requests. - IotHubNamespace *string `json:"iotHubNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the IoT Hub. This policy must contain at least the Service connect permission. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // ConsumerGroupName - The name of an IoT Hub Consumer Group that should be used to read events from the IoT Hub. If not specified, the input uses the Iot Hub’s default consumer group. - ConsumerGroupName *string `json:"consumerGroupName,omitempty"` - // Endpoint - The IoT Hub endpoint to connect to (ie. messages/events, messages/operationsMonitoringEvents, etc.). - Endpoint *string `json:"endpoint,omitempty"` -} - -// JavaScriptFunctionBinding the binding to a JavaScript function. -type JavaScriptFunctionBinding struct { - // JavaScriptFunctionBindingProperties - The binding properties associated with a JavaScript function. - *JavaScriptFunctionBindingProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicFunctionBindingTypeFunctionBinding', 'TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf', 'TypeBasicFunctionBindingTypeMicrosoftMachineLearningWebService' - Type TypeBasicFunctionBinding `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for JavaScriptFunctionBinding. -func (jsfb JavaScriptFunctionBinding) MarshalJSON() ([]byte, error) { - jsfb.Type = TypeBasicFunctionBindingTypeMicrosoftStreamAnalyticsJavascriptUdf - objectMap := make(map[string]interface{}) - if jsfb.JavaScriptFunctionBindingProperties != nil { - objectMap["properties"] = jsfb.JavaScriptFunctionBindingProperties - } - if jsfb.Type != "" { - objectMap["type"] = jsfb.Type - } - return json.Marshal(objectMap) -} - -// AsJavaScriptFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. -func (jsfb JavaScriptFunctionBinding) AsJavaScriptFunctionBinding() (*JavaScriptFunctionBinding, bool) { - return &jsfb, true -} - -// AsAzureMachineLearningWebServiceFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. -func (jsfb JavaScriptFunctionBinding) AsAzureMachineLearningWebServiceFunctionBinding() (*AzureMachineLearningWebServiceFunctionBinding, bool) { - return nil, false -} - -// AsFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. -func (jsfb JavaScriptFunctionBinding) AsFunctionBinding() (*FunctionBinding, bool) { - return nil, false -} - -// AsBasicFunctionBinding is the BasicFunctionBinding implementation for JavaScriptFunctionBinding. -func (jsfb JavaScriptFunctionBinding) AsBasicFunctionBinding() (BasicFunctionBinding, bool) { - return &jsfb, true -} - -// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionBinding struct. -func (jsfb *JavaScriptFunctionBinding) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var javaScriptFunctionBindingProperties JavaScriptFunctionBindingProperties - err = json.Unmarshal(*v, &javaScriptFunctionBindingProperties) - if err != nil { - return err - } - jsfb.JavaScriptFunctionBindingProperties = &javaScriptFunctionBindingProperties - } - case "type": - if v != nil { - var typeVar TypeBasicFunctionBinding - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - jsfb.Type = typeVar - } - } - } - - return nil -} - -// JavaScriptFunctionBindingProperties the binding properties associated with a JavaScript function. -type JavaScriptFunctionBindingProperties struct { - // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }' - Script *string `json:"script,omitempty"` -} - -// JavaScriptFunctionBindingRetrievalProperties the binding retrieval properties associated with a -// JavaScript function. -type JavaScriptFunctionBindingRetrievalProperties struct { - // Script - The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. - Script *string `json:"script,omitempty"` - // UdfType - The function type. Possible values include: 'UdfTypeScalar' - UdfType UdfType `json:"udfType,omitempty"` -} - -// JavaScriptFunctionRetrieveDefaultDefinitionParameters the parameters needed to retrieve the default -// function definition for a JavaScript function. -type JavaScriptFunctionRetrieveDefaultDefinitionParameters struct { - // JavaScriptFunctionBindingRetrievalProperties - The binding retrieval properties associated with a JavaScript function. - *JavaScriptFunctionBindingRetrievalProperties `json:"bindingRetrievalProperties,omitempty"` - // BindingType - Possible values include: 'BindingTypeFunctionRetrieveDefaultDefinitionParameters', 'BindingTypeMicrosoftMachineLearningWebService', 'BindingTypeMicrosoftStreamAnalyticsJavascriptUdf' - BindingType BindingType `json:"bindingType,omitempty"` -} - -// MarshalJSON is the custom marshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters. -func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) MarshalJSON() ([]byte, error) { - jsfrddp.BindingType = BindingTypeMicrosoftStreamAnalyticsJavascriptUdf - objectMap := make(map[string]interface{}) - if jsfrddp.JavaScriptFunctionBindingRetrievalProperties != nil { - objectMap["bindingRetrievalProperties"] = jsfrddp.JavaScriptFunctionBindingRetrievalProperties - } - if jsfrddp.BindingType != "" { - objectMap["bindingType"] = jsfrddp.BindingType - } - return json.Marshal(objectMap) -} - -// AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. -func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsAzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters() (*AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsJavaScriptFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. -func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsJavaScriptFunctionRetrieveDefaultDefinitionParameters() (*JavaScriptFunctionRetrieveDefaultDefinitionParameters, bool) { - return &jsfrddp, true -} - -// AsFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. -func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsFunctionRetrieveDefaultDefinitionParameters() (*FunctionRetrieveDefaultDefinitionParameters, bool) { - return nil, false -} - -// AsBasicFunctionRetrieveDefaultDefinitionParameters is the BasicFunctionRetrieveDefaultDefinitionParameters implementation for JavaScriptFunctionRetrieveDefaultDefinitionParameters. -func (jsfrddp JavaScriptFunctionRetrieveDefaultDefinitionParameters) AsBasicFunctionRetrieveDefaultDefinitionParameters() (BasicFunctionRetrieveDefaultDefinitionParameters, bool) { - return &jsfrddp, true -} - -// UnmarshalJSON is the custom unmarshaler for JavaScriptFunctionRetrieveDefaultDefinitionParameters struct. -func (jsfrddp *JavaScriptFunctionRetrieveDefaultDefinitionParameters) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "bindingRetrievalProperties": - if v != nil { - var javaScriptFunctionBindingRetrievalProperties JavaScriptFunctionBindingRetrievalProperties - err = json.Unmarshal(*v, &javaScriptFunctionBindingRetrievalProperties) - if err != nil { - return err - } - jsfrddp.JavaScriptFunctionBindingRetrievalProperties = &javaScriptFunctionBindingRetrievalProperties - } - case "bindingType": - if v != nil { - var bindingType BindingType - err = json.Unmarshal(*v, &bindingType) - if err != nil { - return err - } - jsfrddp.BindingType = bindingType - } - } - } - - return nil -} - -// JobStorageAccount the properties that are associated with an Azure Storage account with MSI -type JobStorageAccount struct { - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` - // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountName *string `json:"accountName,omitempty"` - // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountKey *string `json:"accountKey,omitempty"` -} - -// JSONSerialization describes how data from an input is serialized or how data is serialized when written -// to an output in JSON format. -type JSONSerialization struct { - // JSONSerializationProperties - The properties that are associated with the JSON serialization type. Required on PUT (CreateOrReplace) requests. - *JSONSerializationProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv', 'TypeParquet' - Type Type `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for JSONSerialization. -func (js JSONSerialization) MarshalJSON() ([]byte, error) { - js.Type = TypeJSON - objectMap := make(map[string]interface{}) - if js.JSONSerializationProperties != nil { - objectMap["properties"] = js.JSONSerializationProperties - } - if js.Type != "" { - objectMap["type"] = js.Type - } - return json.Marshal(objectMap) -} - -// AsAvroSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsAvroSerialization() (*AvroSerialization, bool) { - return nil, false -} - -// AsJSONSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsJSONSerialization() (*JSONSerialization, bool) { - return &js, true -} - -// AsCsvSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsCsvSerialization() (*CsvSerialization, bool) { - return nil, false -} - -// AsParquetSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { - return nil, false -} - -// AsSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsSerialization() (*Serialization, bool) { - return nil, false -} - -// AsBasicSerialization is the BasicSerialization implementation for JSONSerialization. -func (js JSONSerialization) AsBasicSerialization() (BasicSerialization, bool) { - return &js, true -} - -// UnmarshalJSON is the custom unmarshaler for JSONSerialization struct. -func (js *JSONSerialization) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var JSONSerializationProperties JSONSerializationProperties - err = json.Unmarshal(*v, &JSONSerializationProperties) - if err != nil { - return err - } - js.JSONSerializationProperties = &JSONSerializationProperties - } - case "type": - if v != nil { - var typeVar Type - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - js.Type = typeVar - } - } - } - - return nil -} - -// JSONSerializationProperties the properties that are associated with the JSON serialization type. -type JSONSerializationProperties struct { - // Encoding - Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. Possible values include: 'EncodingUTF8' - Encoding Encoding `json:"encoding,omitempty"` - // Format - This property only applies to JSON serialization of outputs only. It is not applicable to inputs. This property specifies the format of the JSON the output will be written in. The currently supported values are 'lineSeparated' indicating the output will be formatted by having each JSON object separated by a new line and 'array' indicating the output will be formatted as an array of JSON objects. Default value is 'lineSeparated' if left null. Possible values include: 'JSONOutputSerializationFormatLineSeparated', 'JSONOutputSerializationFormatArray' - Format JSONOutputSerializationFormat `json:"format,omitempty"` -} - -// OAuthBasedDataSourceProperties the properties that are associated with data sources that use OAuth as -// their authentication model. -type OAuthBasedDataSourceProperties struct { - // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - RefreshToken *string `json:"refreshToken,omitempty"` - // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` - // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` -} - -// Operation a Stream Analytics REST API operation -type Operation struct { - // Name - READ-ONLY; The name of the operation being performed on this particular object. - Name *string `json:"name,omitempty"` - // IsDataAction - Indicates whether the operation is a data action - IsDataAction *bool `json:"isDataAction,omitempty"` - // Display - READ-ONLY; Contains the localized display information for this particular operation / action. - Display *OperationDisplay `json:"display,omitempty"` -} - -// MarshalJSON is the custom marshaler for Operation. -func (o Operation) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if o.IsDataAction != nil { - objectMap["isDataAction"] = o.IsDataAction - } - return json.Marshal(objectMap) -} - -// OperationDisplay contains the localized display information for this particular operation / action. -type OperationDisplay struct { - // Provider - READ-ONLY; The localized friendly form of the resource provider name. - Provider *string `json:"provider,omitempty"` - // Resource - READ-ONLY; The localized friendly form of the resource type related to this action/operation. - Resource *string `json:"resource,omitempty"` - // Operation - READ-ONLY; The localized friendly name for the operation. - Operation *string `json:"operation,omitempty"` - // Description - READ-ONLY; The localized friendly description for the operation. - Description *string `json:"description,omitempty"` -} - -// MarshalJSON is the custom marshaler for OperationDisplay. -func (o OperationDisplay) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// OperationListResult result of the request to list Stream Analytics operations. It contains a list of -// operations and a URL link to get the next set of results. -type OperationListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; List of Stream Analytics operations supported by the Microsoft.StreamAnalytics resource provider. - Value *[]Operation `json:"value,omitempty"` - // NextLink - READ-ONLY; URL to get the next set of operation list results if there are any. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for OperationListResult. -func (olr OperationListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// OperationListResultIterator provides access to a complete listing of Operation values. -type OperationListResultIterator struct { - i int - page OperationListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *OperationListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *OperationListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter OperationListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter OperationListResultIterator) Response() OperationListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter OperationListResultIterator) Value() Operation { - if !iter.page.NotDone() { - return Operation{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the OperationListResultIterator type. -func NewOperationListResultIterator(page OperationListResultPage) OperationListResultIterator { - return OperationListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (olr OperationListResult) IsEmpty() bool { - return olr.Value == nil || len(*olr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (olr OperationListResult) hasNextLink() bool { - return olr.NextLink != nil && len(*olr.NextLink) != 0 -} - -// operationListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (olr OperationListResult) operationListResultPreparer(ctx context.Context) (*http.Request, error) { - if !olr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(olr.NextLink))) -} - -// OperationListResultPage contains a page of Operation values. -type OperationListResultPage struct { - fn func(context.Context, OperationListResult) (OperationListResult, error) - olr OperationListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *OperationListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OperationListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.olr) - if err != nil { - return err - } - page.olr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *OperationListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page OperationListResultPage) NotDone() bool { - return !page.olr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page OperationListResultPage) Response() OperationListResult { - return page.olr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page OperationListResultPage) Values() []Operation { - if page.olr.IsEmpty() { - return nil - } - return *page.olr.Value -} - -// Creates a new instance of the OperationListResultPage type. -func NewOperationListResultPage(cur OperationListResult, getNextPage func(context.Context, OperationListResult) (OperationListResult, error)) OperationListResultPage { - return OperationListResultPage{ - fn: getNextPage, - olr: cur, - } -} - -// Output an output object, containing all information associated with the named output. All outputs are -// contained under a streaming job. -type Output struct { - autorest.Response `json:"-"` - // OutputProperties - The properties that are associated with an output. Required on PUT (CreateOrReplace) requests. - *OutputProperties `json:"properties,omitempty"` - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Output. -func (o Output) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if o.OutputProperties != nil { - objectMap["properties"] = o.OutputProperties - } - if o.Name != nil { - objectMap["name"] = o.Name - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for Output struct. -func (o *Output) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var outputProperties OutputProperties - err = json.Unmarshal(*v, &outputProperties) - if err != nil { - return err - } - o.OutputProperties = &outputProperties - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - o.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - o.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - o.Type = &typeVar - } - } - } - - return nil -} - -// BasicOutputDataSource describes the data source that output will be written to. -type BasicOutputDataSource interface { - AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) - AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) - AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) - AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) - AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) - AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) - AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) - AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) - AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) - AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) - AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) - AsBlobOutputDataSource() (*BlobOutputDataSource, bool) - AsOutputDataSource() (*OutputDataSource, bool) -} - -// OutputDataSource describes the data source that output will be written to. -type OutputDataSource struct { - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -func unmarshalBasicOutputDataSource(body []byte) (BasicOutputDataSource, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts): - var adlsods AzureDataLakeStoreOutputDataSource - err := json.Unmarshal(body, &adlsods) - return adlsods, err - case string(TypeBasicOutputDataSourceTypePowerBI): - var pbods PowerBIOutputDataSource - err := json.Unmarshal(body, &pbods) - return pbods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic): - var sbtods ServiceBusTopicOutputDataSource - err := json.Unmarshal(body, &sbtods) - return sbtods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue): - var sbqods ServiceBusQueueOutputDataSource - err := json.Unmarshal(body, &sbqods) - return sbqods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftAzureFunction): - var afods AzureFunctionOutputDataSource - err := json.Unmarshal(body, &afods) - return afods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB): - var ddods DocumentDbOutputDataSource - err := json.Unmarshal(body, &ddods) - return ddods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse): - var asods AzureSynapseOutputDataSource - err := json.Unmarshal(body, &asods) - return asods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase): - var asdods AzureSQLDatabaseOutputDataSource - err := json.Unmarshal(body, &asdods) - return asdods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub): - var ehvods EventHubV2OutputDataSource - err := json.Unmarshal(body, &ehvods) - return ehvods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub): - var ehods EventHubOutputDataSource - err := json.Unmarshal(body, &ehods) - return ehods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftStorageTable): - var atods AzureTableOutputDataSource - err := json.Unmarshal(body, &atods) - return atods, err - case string(TypeBasicOutputDataSourceTypeMicrosoftStorageBlob): - var bods BlobOutputDataSource - err := json.Unmarshal(body, &bods) - return bods, err - default: - var ods OutputDataSource - err := json.Unmarshal(body, &ods) - return ods, err - } -} -func unmarshalBasicOutputDataSourceArray(body []byte) ([]BasicOutputDataSource, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - odsArray := make([]BasicOutputDataSource, len(rawMessages)) - - for index, rawMessage := range rawMessages { - ods, err := unmarshalBasicOutputDataSource(*rawMessage) - if err != nil { - return nil, err - } - odsArray[index] = ods - } - return odsArray, nil -} - -// MarshalJSON is the custom marshaler for OutputDataSource. -func (ods OutputDataSource) MarshalJSON() ([]byte, error) { - ods.Type = TypeBasicOutputDataSourceTypeOutputDataSource - objectMap := make(map[string]interface{}) - if ods.Type != "" { - objectMap["type"] = ods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return &ods, true -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for OutputDataSource. -func (ods OutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &ods, true -} - -// OutputListResult object containing a list of outputs under a streaming job. -type OutputListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of outputs under a streaming job. Populated by a 'List' operation. - Value *[]Output `json:"value,omitempty"` - // NextLink - READ-ONLY; The link (url) to the next page of results. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for OutputListResult. -func (olr OutputListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// OutputListResultIterator provides access to a complete listing of Output values. -type OutputListResultIterator struct { - i int - page OutputListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *OutputListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *OutputListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter OutputListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter OutputListResultIterator) Response() OutputListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter OutputListResultIterator) Value() Output { - if !iter.page.NotDone() { - return Output{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the OutputListResultIterator type. -func NewOutputListResultIterator(page OutputListResultPage) OutputListResultIterator { - return OutputListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (olr OutputListResult) IsEmpty() bool { - return olr.Value == nil || len(*olr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (olr OutputListResult) hasNextLink() bool { - return olr.NextLink != nil && len(*olr.NextLink) != 0 -} - -// outputListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (olr OutputListResult) outputListResultPreparer(ctx context.Context) (*http.Request, error) { - if !olr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(olr.NextLink))) -} - -// OutputListResultPage contains a page of Output values. -type OutputListResultPage struct { - fn func(context.Context, OutputListResult) (OutputListResult, error) - olr OutputListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *OutputListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.olr) - if err != nil { - return err - } - page.olr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *OutputListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page OutputListResultPage) NotDone() bool { - return !page.olr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page OutputListResultPage) Response() OutputListResult { - return page.olr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page OutputListResultPage) Values() []Output { - if page.olr.IsEmpty() { - return nil - } - return *page.olr.Value -} - -// Creates a new instance of the OutputListResultPage type. -func NewOutputListResultPage(cur OutputListResult, getNextPage func(context.Context, OutputListResult) (OutputListResult, error)) OutputListResultPage { - return OutputListResultPage{ - fn: getNextPage, - olr: cur, - } -} - -// OutputProperties the properties that are associated with an output. -type OutputProperties struct { - // Datasource - Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. - Datasource BasicOutputDataSource `json:"datasource,omitempty"` - // TimeWindow - The time frame for filtering Stream Analytics job outputs. - TimeWindow *string `json:"timeWindow,omitempty"` - // SizeWindow - The size window to constrain a Stream Analytics output to. - SizeWindow *float64 `json:"sizeWindow,omitempty"` - // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - Serialization BasicSerialization `json:"serialization,omitempty"` - // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. - Diagnostics *Diagnostics `json:"diagnostics,omitempty"` - // Etag - READ-ONLY; The current entity tag for the output. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` -} - -// MarshalJSON is the custom marshaler for OutputProperties. -func (op OutputProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - objectMap["datasource"] = op.Datasource - if op.TimeWindow != nil { - objectMap["timeWindow"] = op.TimeWindow - } - if op.SizeWindow != nil { - objectMap["sizeWindow"] = op.SizeWindow - } - objectMap["serialization"] = op.Serialization - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for OutputProperties struct. -func (op *OutputProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "datasource": - if v != nil { - datasource, err := unmarshalBasicOutputDataSource(*v) - if err != nil { - return err - } - op.Datasource = datasource - } - case "timeWindow": - if v != nil { - var timeWindow string - err = json.Unmarshal(*v, &timeWindow) - if err != nil { - return err - } - op.TimeWindow = &timeWindow - } - case "sizeWindow": - if v != nil { - var sizeWindow float64 - err = json.Unmarshal(*v, &sizeWindow) - if err != nil { - return err - } - op.SizeWindow = &sizeWindow - } - case "serialization": - if v != nil { - serialization, err := unmarshalBasicSerialization(*v) - if err != nil { - return err - } - op.Serialization = serialization - } - case "diagnostics": - if v != nil { - var diagnostics Diagnostics - err = json.Unmarshal(*v, &diagnostics) - if err != nil { - return err - } - op.Diagnostics = &diagnostics - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - op.Etag = &etag - } - } - } - - return nil -} - -// OutputsTestFuture an abstraction for monitoring and retrieving the results of a long-running operation. -type OutputsTestFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(OutputsClient) (ResourceTestStatus, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *OutputsTestFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for OutputsTestFuture.Result. -func (future *OutputsTestFuture) result(client OutputsClient) (rts ResourceTestStatus, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - rts.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.OutputsTestFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if rts.Response.Response, err = future.GetResult(sender); err == nil && rts.Response.Response.StatusCode != http.StatusNoContent { - rts, err = client.TestResponder(rts.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsTestFuture", "Result", rts.Response.Response, "Failure responding to request") - } - } - return -} - -// ParquetSerialization describes how data from an input is serialized or how data is serialized when -// written to an output in Parquet format. -type ParquetSerialization struct { - // Properties - The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - Properties interface{} `json:"properties,omitempty"` - // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv', 'TypeParquet' - Type Type `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ParquetSerialization. -func (ps ParquetSerialization) MarshalJSON() ([]byte, error) { - ps.Type = TypeParquet - objectMap := make(map[string]interface{}) - if ps.Properties != nil { - objectMap["properties"] = ps.Properties - } - if ps.Type != "" { - objectMap["type"] = ps.Type - } - return json.Marshal(objectMap) -} - -// AsAvroSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsAvroSerialization() (*AvroSerialization, bool) { - return nil, false -} - -// AsJSONSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsJSONSerialization() (*JSONSerialization, bool) { - return nil, false -} - -// AsCsvSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsCsvSerialization() (*CsvSerialization, bool) { - return nil, false -} - -// AsParquetSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsParquetSerialization() (*ParquetSerialization, bool) { - return &ps, true -} - -// AsSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsSerialization() (*Serialization, bool) { - return nil, false -} - -// AsBasicSerialization is the BasicSerialization implementation for ParquetSerialization. -func (ps ParquetSerialization) AsBasicSerialization() (BasicSerialization, bool) { - return &ps, true -} - -// PowerBIOutputDataSource describes a Power BI output data source. -type PowerBIOutputDataSource struct { - // PowerBIOutputDataSourceProperties - The properties that are associated with a Power BI output. Required on PUT (CreateOrReplace) requests. - *PowerBIOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) MarshalJSON() ([]byte, error) { - pbods.Type = TypeBasicOutputDataSourceTypePowerBI - objectMap := make(map[string]interface{}) - if pbods.PowerBIOutputDataSourceProperties != nil { - objectMap["properties"] = pbods.PowerBIOutputDataSourceProperties - } - if pbods.Type != "" { - objectMap["type"] = pbods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return &pbods, true -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for PowerBIOutputDataSource. -func (pbods PowerBIOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &pbods, true -} - -// UnmarshalJSON is the custom unmarshaler for PowerBIOutputDataSource struct. -func (pbods *PowerBIOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var powerBIOutputDataSourceProperties PowerBIOutputDataSourceProperties - err = json.Unmarshal(*v, &powerBIOutputDataSourceProperties) - if err != nil { - return err - } - pbods.PowerBIOutputDataSourceProperties = &powerBIOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - pbods.Type = typeVar - } - } - } - - return nil -} - -// PowerBIOutputDataSourceProperties the properties that are associated with a Power BI output. -type PowerBIOutputDataSourceProperties struct { - // Dataset - The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - Dataset *string `json:"dataset,omitempty"` - // Table - The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. - Table *string `json:"table,omitempty"` - // GroupID - The ID of the Power BI group. - GroupID *string `json:"groupId,omitempty"` - // GroupName - The name of the Power BI group. Use this property to help remember which specific Power BI group id was used. - GroupName *string `json:"groupName,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` - // RefreshToken - A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - RefreshToken *string `json:"refreshToken,omitempty"` - // TokenUserPrincipalName - The user principal name (UPN) of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserPrincipalName *string `json:"tokenUserPrincipalName,omitempty"` - // TokenUserDisplayName - The user display name of the user that was used to obtain the refresh token. Use this property to help remember which user was used to obtain the refresh token. - TokenUserDisplayName *string `json:"tokenUserDisplayName,omitempty"` -} - -// PrivateEndpoint complete information about the private endpoint. -type PrivateEndpoint struct { - autorest.Response `json:"-"` - // PrivateEndpointProperties - The properties associated with a private endpoint. - *PrivateEndpointProperties `json:"properties,omitempty"` - // Etag - READ-ONLY; Unique opaque string (generally a GUID) that represents the metadata state of the resource (private endpoint) and changes whenever the resource is updated. Required on PUT (CreateOrUpdate) requests. - Etag *string `json:"etag,omitempty"` - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateEndpoint. -func (peVar PrivateEndpoint) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if peVar.PrivateEndpointProperties != nil { - objectMap["properties"] = peVar.PrivateEndpointProperties - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for PrivateEndpoint struct. -func (peVar *PrivateEndpoint) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var privateEndpointProperties PrivateEndpointProperties - err = json.Unmarshal(*v, &privateEndpointProperties) - if err != nil { - return err - } - peVar.PrivateEndpointProperties = &privateEndpointProperties - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - peVar.Etag = &etag - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - peVar.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - peVar.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - peVar.Type = &typeVar - } - } - } - - return nil -} - -// PrivateEndpointListResult a list of private endpoints. -type PrivateEndpointListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of private endpoints. - Value *[]PrivateEndpoint `json:"value,omitempty"` - // NextLink - READ-ONLY; The URL to fetch the next set of private endpoints. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateEndpointListResult. -func (pelr PrivateEndpointListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// PrivateEndpointListResultIterator provides access to a complete listing of PrivateEndpoint values. -type PrivateEndpointListResultIterator struct { - i int - page PrivateEndpointListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *PrivateEndpointListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *PrivateEndpointListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter PrivateEndpointListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter PrivateEndpointListResultIterator) Response() PrivateEndpointListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter PrivateEndpointListResultIterator) Value() PrivateEndpoint { - if !iter.page.NotDone() { - return PrivateEndpoint{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the PrivateEndpointListResultIterator type. -func NewPrivateEndpointListResultIterator(page PrivateEndpointListResultPage) PrivateEndpointListResultIterator { - return PrivateEndpointListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (pelr PrivateEndpointListResult) IsEmpty() bool { - return pelr.Value == nil || len(*pelr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (pelr PrivateEndpointListResult) hasNextLink() bool { - return pelr.NextLink != nil && len(*pelr.NextLink) != 0 -} - -// privateEndpointListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (pelr PrivateEndpointListResult) privateEndpointListResultPreparer(ctx context.Context) (*http.Request, error) { - if !pelr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(pelr.NextLink))) -} - -// PrivateEndpointListResultPage contains a page of PrivateEndpoint values. -type PrivateEndpointListResultPage struct { - fn func(context.Context, PrivateEndpointListResult) (PrivateEndpointListResult, error) - pelr PrivateEndpointListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *PrivateEndpointListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.pelr) - if err != nil { - return err - } - page.pelr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *PrivateEndpointListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page PrivateEndpointListResultPage) NotDone() bool { - return !page.pelr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page PrivateEndpointListResultPage) Response() PrivateEndpointListResult { - return page.pelr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page PrivateEndpointListResultPage) Values() []PrivateEndpoint { - if page.pelr.IsEmpty() { - return nil - } - return *page.pelr.Value -} - -// Creates a new instance of the PrivateEndpointListResultPage type. -func NewPrivateEndpointListResultPage(cur PrivateEndpointListResult, getNextPage func(context.Context, PrivateEndpointListResult) (PrivateEndpointListResult, error)) PrivateEndpointListResultPage { - return PrivateEndpointListResultPage{ - fn: getNextPage, - pelr: cur, - } -} - -// PrivateEndpointProperties the properties associated with a private endpoint. -type PrivateEndpointProperties struct { - // CreatedDate - READ-ONLY; The date when this private endpoint was created. - CreatedDate *string `json:"createdDate,omitempty"` - // ManualPrivateLinkServiceConnections - A list of connections to the remote resource. Immutable after it is set. - ManualPrivateLinkServiceConnections *[]PrivateLinkServiceConnection `json:"manualPrivateLinkServiceConnections,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateEndpointProperties. -func (pep PrivateEndpointProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if pep.ManualPrivateLinkServiceConnections != nil { - objectMap["manualPrivateLinkServiceConnections"] = pep.ManualPrivateLinkServiceConnections - } - return json.Marshal(objectMap) -} - -// PrivateEndpointsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type PrivateEndpointsDeleteFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(PrivateEndpointsClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *PrivateEndpointsDeleteFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for PrivateEndpointsDeleteFuture.Result. -func (future *PrivateEndpointsDeleteFuture) result(client PrivateEndpointsClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsDeleteFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.PrivateEndpointsDeleteFuture") - return - } - ar.Response = future.Response() - return -} - -// PrivateLinkConnectionState a collection of read-only information about the state of the connection to -// the private remote resource. -type PrivateLinkConnectionState struct { - // Status - READ-ONLY; Indicates whether the connection has been Approved/Rejected/Removed by the owner of the remote resource/service. - Status *string `json:"status,omitempty"` - // Description - READ-ONLY; The reason for approval/rejection of the connection. - Description *string `json:"description,omitempty"` - // ActionsRequired - READ-ONLY; A message indicating if changes on the service provider require any updates on the consumer. - ActionsRequired *string `json:"actionsRequired,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateLinkConnectionState. -func (plcs PrivateLinkConnectionState) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// PrivateLinkServiceConnection a grouping of information about the connection to the remote resource. -type PrivateLinkServiceConnection struct { - // PrivateLinkServiceConnectionProperties - Bag of properties defining a privatelinkServiceConnection. - *PrivateLinkServiceConnectionProperties `json:"properties,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateLinkServiceConnection. -func (plsc PrivateLinkServiceConnection) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if plsc.PrivateLinkServiceConnectionProperties != nil { - objectMap["properties"] = plsc.PrivateLinkServiceConnectionProperties - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for PrivateLinkServiceConnection struct. -func (plsc *PrivateLinkServiceConnection) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var privateLinkServiceConnectionProperties PrivateLinkServiceConnectionProperties - err = json.Unmarshal(*v, &privateLinkServiceConnectionProperties) - if err != nil { - return err - } - plsc.PrivateLinkServiceConnectionProperties = &privateLinkServiceConnectionProperties - } - } - } - - return nil -} - -// PrivateLinkServiceConnectionProperties bag of properties defining a privatelinkServiceConnection. -type PrivateLinkServiceConnectionProperties struct { - // PrivateLinkServiceID - The resource id of the private link service. Required on PUT (CreateOrUpdate) requests. - PrivateLinkServiceID *string `json:"privateLinkServiceId,omitempty"` - // GroupIds - The ID(s) of the group(s) obtained from the remote resource that this private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. - GroupIds *[]string `json:"groupIds,omitempty"` - // RequestMessage - READ-ONLY; A message passed to the owner of the remote resource with this connection request. Restricted to 140 chars. - RequestMessage *string `json:"requestMessage,omitempty"` - // PrivateLinkServiceConnectionState - A collection of read-only information about the state of the connection to the private remote resource. - PrivateLinkServiceConnectionState *PrivateLinkConnectionState `json:"privateLinkServiceConnectionState,omitempty"` -} - -// MarshalJSON is the custom marshaler for PrivateLinkServiceConnectionProperties. -func (plscp PrivateLinkServiceConnectionProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if plscp.PrivateLinkServiceID != nil { - objectMap["privateLinkServiceId"] = plscp.PrivateLinkServiceID - } - if plscp.GroupIds != nil { - objectMap["groupIds"] = plscp.GroupIds - } - if plscp.PrivateLinkServiceConnectionState != nil { - objectMap["privateLinkServiceConnectionState"] = plscp.PrivateLinkServiceConnectionState - } - return json.Marshal(objectMap) -} - -// ProxyResource the resource model definition for a ARM proxy resource. It will have everything other than -// required location and tags -type ProxyResource struct { - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ProxyResource. -func (pr ProxyResource) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// BasicReferenceInputDataSource describes an input data source that contains reference data. -type BasicReferenceInputDataSource interface { - AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) - AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) - AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) -} - -// ReferenceInputDataSource describes an input data source that contains reference data. -type ReferenceInputDataSource struct { - // Type - Possible values include: 'TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource', 'TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicReferenceInputDataSource `json:"type,omitempty"` -} - -func unmarshalBasicReferenceInputDataSource(body []byte) (BasicReferenceInputDataSource, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicReferenceInputDataSourceTypeMicrosoftSQLServerDatabase): - var asrids AzureSQLReferenceInputDataSource - err := json.Unmarshal(body, &asrids) - return asrids, err - case string(TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob): - var brids BlobReferenceInputDataSource - err := json.Unmarshal(body, &brids) - return brids, err - default: - var rids ReferenceInputDataSource - err := json.Unmarshal(body, &rids) - return rids, err - } -} -func unmarshalBasicReferenceInputDataSourceArray(body []byte) ([]BasicReferenceInputDataSource, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - ridsArray := make([]BasicReferenceInputDataSource, len(rawMessages)) - - for index, rawMessage := range rawMessages { - rids, err := unmarshalBasicReferenceInputDataSource(*rawMessage) - if err != nil { - return nil, err - } - ridsArray[index] = rids - } - return ridsArray, nil -} - -// MarshalJSON is the custom marshaler for ReferenceInputDataSource. -func (rids ReferenceInputDataSource) MarshalJSON() ([]byte, error) { - rids.Type = TypeBasicReferenceInputDataSourceTypeReferenceInputDataSource - objectMap := make(map[string]interface{}) - if rids.Type != "" { - objectMap["type"] = rids.Type - } - return json.Marshal(objectMap) -} - -// AsAzureSQLReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. -func (rids ReferenceInputDataSource) AsAzureSQLReferenceInputDataSource() (*AzureSQLReferenceInputDataSource, bool) { - return nil, false -} - -// AsBlobReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. -func (rids ReferenceInputDataSource) AsBlobReferenceInputDataSource() (*BlobReferenceInputDataSource, bool) { - return nil, false -} - -// AsReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. -func (rids ReferenceInputDataSource) AsReferenceInputDataSource() (*ReferenceInputDataSource, bool) { - return &rids, true -} - -// AsBasicReferenceInputDataSource is the BasicReferenceInputDataSource implementation for ReferenceInputDataSource. -func (rids ReferenceInputDataSource) AsBasicReferenceInputDataSource() (BasicReferenceInputDataSource, bool) { - return &rids, true -} - -// ReferenceInputProperties the properties that are associated with an input containing reference data. -type ReferenceInputProperties struct { - // Datasource - Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. - Datasource BasicReferenceInputDataSource `json:"datasource,omitempty"` - // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - Serialization BasicSerialization `json:"serialization,omitempty"` - // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. - Diagnostics *Diagnostics `json:"diagnostics,omitempty"` - // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - // Compression - Describes how input data is compressed - Compression *Compression `json:"compression,omitempty"` - // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data - PartitionKey *string `json:"partitionKey,omitempty"` - // Type - Possible values include: 'TypeBasicInputPropertiesTypeInputProperties', 'TypeBasicInputPropertiesTypeReference', 'TypeBasicInputPropertiesTypeStream' - Type TypeBasicInputProperties `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ReferenceInputProperties. -func (rip ReferenceInputProperties) MarshalJSON() ([]byte, error) { - rip.Type = TypeBasicInputPropertiesTypeReference - objectMap := make(map[string]interface{}) - objectMap["datasource"] = rip.Datasource - objectMap["serialization"] = rip.Serialization - if rip.Compression != nil { - objectMap["compression"] = rip.Compression - } - if rip.PartitionKey != nil { - objectMap["partitionKey"] = rip.PartitionKey - } - if rip.Type != "" { - objectMap["type"] = rip.Type - } - return json.Marshal(objectMap) -} - -// AsReferenceInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. -func (rip ReferenceInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { - return &rip, true -} - -// AsStreamInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. -func (rip ReferenceInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { - return nil, false -} - -// AsInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. -func (rip ReferenceInputProperties) AsInputProperties() (*InputProperties, bool) { - return nil, false -} - -// AsBasicInputProperties is the BasicInputProperties implementation for ReferenceInputProperties. -func (rip ReferenceInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { - return &rip, true -} - -// UnmarshalJSON is the custom unmarshaler for ReferenceInputProperties struct. -func (rip *ReferenceInputProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "datasource": - if v != nil { - datasource, err := unmarshalBasicReferenceInputDataSource(*v) - if err != nil { - return err - } - rip.Datasource = datasource - } - case "serialization": - if v != nil { - serialization, err := unmarshalBasicSerialization(*v) - if err != nil { - return err - } - rip.Serialization = serialization - } - case "diagnostics": - if v != nil { - var diagnostics Diagnostics - err = json.Unmarshal(*v, &diagnostics) - if err != nil { - return err - } - rip.Diagnostics = &diagnostics - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - rip.Etag = &etag - } - case "compression": - if v != nil { - var compression Compression - err = json.Unmarshal(*v, &compression) - if err != nil { - return err - } - rip.Compression = &compression - } - case "partitionKey": - if v != nil { - var partitionKey string - err = json.Unmarshal(*v, &partitionKey) - if err != nil { - return err - } - rip.PartitionKey = &partitionKey - } - case "type": - if v != nil { - var typeVar TypeBasicInputProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - rip.Type = typeVar - } - } - } - - return nil -} - -// Resource the base resource definition -type Resource struct { - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Resource. -func (r Resource) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// ResourceTestStatus describes the status of the test operation along with error information, if -// applicable. -type ResourceTestStatus struct { - autorest.Response `json:"-"` - // Status - READ-ONLY; The status of the test operation. - Status *string `json:"status,omitempty"` - // Error - READ-ONLY; Describes the error that occurred. - Error *ErrorResponse `json:"error,omitempty"` -} - -// MarshalJSON is the custom marshaler for ResourceTestStatus. -func (rts ResourceTestStatus) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// ScalarFunctionProperties the properties that are associated with a scalar function. -type ScalarFunctionProperties struct { - // Etag - READ-ONLY; The current entity tag for the function. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - *FunctionConfiguration `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicFunctionPropertiesTypeFunctionProperties', 'TypeBasicFunctionPropertiesTypeAggregate', 'TypeBasicFunctionPropertiesTypeScalar' - Type TypeBasicFunctionProperties `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ScalarFunctionProperties. -func (sfp ScalarFunctionProperties) MarshalJSON() ([]byte, error) { - sfp.Type = TypeBasicFunctionPropertiesTypeScalar - objectMap := make(map[string]interface{}) - if sfp.FunctionConfiguration != nil { - objectMap["properties"] = sfp.FunctionConfiguration - } - if sfp.Type != "" { - objectMap["type"] = sfp.Type - } - return json.Marshal(objectMap) -} - -// AsAggregateFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. -func (sfp ScalarFunctionProperties) AsAggregateFunctionProperties() (*AggregateFunctionProperties, bool) { - return nil, false -} - -// AsScalarFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. -func (sfp ScalarFunctionProperties) AsScalarFunctionProperties() (*ScalarFunctionProperties, bool) { - return &sfp, true -} - -// AsFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. -func (sfp ScalarFunctionProperties) AsFunctionProperties() (*FunctionProperties, bool) { - return nil, false -} - -// AsBasicFunctionProperties is the BasicFunctionProperties implementation for ScalarFunctionProperties. -func (sfp ScalarFunctionProperties) AsBasicFunctionProperties() (BasicFunctionProperties, bool) { - return &sfp, true -} - -// UnmarshalJSON is the custom unmarshaler for ScalarFunctionProperties struct. -func (sfp *ScalarFunctionProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - sfp.Etag = &etag - } - case "properties": - if v != nil { - var functionConfiguration FunctionConfiguration - err = json.Unmarshal(*v, &functionConfiguration) - if err != nil { - return err - } - sfp.FunctionConfiguration = &functionConfiguration - } - case "type": - if v != nil { - var typeVar TypeBasicFunctionProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sfp.Type = typeVar - } - } - } - - return nil -} - -// ScaleStreamingJobParameters parameters supplied to the Scale Streaming Job operation. -type ScaleStreamingJobParameters struct { - // StreamingUnits - Specifies the number of streaming units that the streaming job will scale to. - StreamingUnits *int32 `json:"streamingUnits,omitempty"` -} - -// BasicSerialization describes how data from an input is serialized or how data is serialized when written to an -// output. -type BasicSerialization interface { - AsAvroSerialization() (*AvroSerialization, bool) - AsJSONSerialization() (*JSONSerialization, bool) - AsCsvSerialization() (*CsvSerialization, bool) - AsParquetSerialization() (*ParquetSerialization, bool) - AsSerialization() (*Serialization, bool) -} - -// Serialization describes how data from an input is serialized or how data is serialized when written to an -// output. -type Serialization struct { - // Type - Possible values include: 'TypeSerialization', 'TypeAvro', 'TypeJSON', 'TypeCsv', 'TypeParquet' - Type Type `json:"type,omitempty"` -} - -func unmarshalBasicSerialization(body []byte) (BasicSerialization, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeAvro): - var as AvroSerialization - err := json.Unmarshal(body, &as) - return as, err - case string(TypeJSON): - var js JSONSerialization - err := json.Unmarshal(body, &js) - return js, err - case string(TypeCsv): - var cs CsvSerialization - err := json.Unmarshal(body, &cs) - return cs, err - case string(TypeParquet): - var ps ParquetSerialization - err := json.Unmarshal(body, &ps) - return ps, err - default: - var s Serialization - err := json.Unmarshal(body, &s) - return s, err - } -} -func unmarshalBasicSerializationArray(body []byte) ([]BasicSerialization, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - sArray := make([]BasicSerialization, len(rawMessages)) - - for index, rawMessage := range rawMessages { - s, err := unmarshalBasicSerialization(*rawMessage) - if err != nil { - return nil, err - } - sArray[index] = s - } - return sArray, nil -} - -// MarshalJSON is the custom marshaler for Serialization. -func (s Serialization) MarshalJSON() ([]byte, error) { - s.Type = TypeSerialization - objectMap := make(map[string]interface{}) - if s.Type != "" { - objectMap["type"] = s.Type - } - return json.Marshal(objectMap) -} - -// AsAvroSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsAvroSerialization() (*AvroSerialization, bool) { - return nil, false -} - -// AsJSONSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsJSONSerialization() (*JSONSerialization, bool) { - return nil, false -} - -// AsCsvSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsCsvSerialization() (*CsvSerialization, bool) { - return nil, false -} - -// AsParquetSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsParquetSerialization() (*ParquetSerialization, bool) { - return nil, false -} - -// AsSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsSerialization() (*Serialization, bool) { - return &s, true -} - -// AsBasicSerialization is the BasicSerialization implementation for Serialization. -func (s Serialization) AsBasicSerialization() (BasicSerialization, bool) { - return &s, true -} - -// ServiceBusDataSourceProperties the common properties that are associated with Service Bus data sources -// (Queues, Topics, Event Hubs, etc.). -type ServiceBusDataSourceProperties struct { - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// ServiceBusQueueOutputDataSource describes a Service Bus Queue output data source. -type ServiceBusQueueOutputDataSource struct { - // ServiceBusQueueOutputDataSourceProperties - The properties that are associated with a Service Bus Queue output. Required on PUT (CreateOrReplace) requests. - *ServiceBusQueueOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) MarshalJSON() ([]byte, error) { - sbqods.Type = TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue - objectMap := make(map[string]interface{}) - if sbqods.ServiceBusQueueOutputDataSourceProperties != nil { - objectMap["properties"] = sbqods.ServiceBusQueueOutputDataSourceProperties - } - if sbqods.Type != "" { - objectMap["type"] = sbqods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return &sbqods, true -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusQueueOutputDataSource. -func (sbqods ServiceBusQueueOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &sbqods, true -} - -// UnmarshalJSON is the custom unmarshaler for ServiceBusQueueOutputDataSource struct. -func (sbqods *ServiceBusQueueOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var serviceBusQueueOutputDataSourceProperties ServiceBusQueueOutputDataSourceProperties - err = json.Unmarshal(*v, &serviceBusQueueOutputDataSourceProperties) - if err != nil { - return err - } - sbqods.ServiceBusQueueOutputDataSourceProperties = &serviceBusQueueOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sbqods.Type = typeVar - } - } - } - - return nil -} - -// ServiceBusQueueOutputDataSourceProperties the properties that are associated with a Service Bus Queue -// output. -type ServiceBusQueueOutputDataSourceProperties struct { - // QueueName - The name of the Service Bus Queue. Required on PUT (CreateOrReplace) requests. - QueueName *string `json:"queueName,omitempty"` - // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. - PropertyColumns *[]string `json:"propertyColumns,omitempty"` - // SystemPropertyColumns - The system properties associated with the Service Bus Queue. The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, Label, ScheduledEnqueueTimeUtc. - SystemPropertyColumns interface{} `json:"systemPropertyColumns,omitempty"` - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// ServiceBusTopicOutputDataSource describes a Service Bus Topic output data source. -type ServiceBusTopicOutputDataSource struct { - // ServiceBusTopicOutputDataSourceProperties - The properties that are associated with a Service Bus Topic output. Required on PUT (CreateOrReplace) requests. - *ServiceBusTopicOutputDataSourceProperties `json:"properties,omitempty"` - // Type - Possible values include: 'TypeBasicOutputDataSourceTypeOutputDataSource', 'TypeBasicOutputDataSourceTypeMicrosoftDataLakeAccounts', 'TypeBasicOutputDataSourceTypePowerBI', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusQueue', 'TypeBasicOutputDataSourceTypeMicrosoftAzureFunction', 'TypeBasicOutputDataSourceTypeMicrosoftStorageDocumentDB', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDataWarehouse', 'TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase', 'TypeBasicOutputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicOutputDataSourceTypeMicrosoftStorageTable', 'TypeBasicOutputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicOutputDataSource `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) MarshalJSON() ([]byte, error) { - sbtods.Type = TypeBasicOutputDataSourceTypeMicrosoftServiceBusTopic - objectMap := make(map[string]interface{}) - if sbtods.ServiceBusTopicOutputDataSourceProperties != nil { - objectMap["properties"] = sbtods.ServiceBusTopicOutputDataSourceProperties - } - if sbtods.Type != "" { - objectMap["type"] = sbtods.Type - } - return json.Marshal(objectMap) -} - -// AsAzureDataLakeStoreOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsAzureDataLakeStoreOutputDataSource() (*AzureDataLakeStoreOutputDataSource, bool) { - return nil, false -} - -// AsPowerBIOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsPowerBIOutputDataSource() (*PowerBIOutputDataSource, bool) { - return nil, false -} - -// AsServiceBusTopicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusTopicOutputDataSource() (*ServiceBusTopicOutputDataSource, bool) { - return &sbtods, true -} - -// AsServiceBusQueueOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsServiceBusQueueOutputDataSource() (*ServiceBusQueueOutputDataSource, bool) { - return nil, false -} - -// AsAzureFunctionOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsAzureFunctionOutputDataSource() (*AzureFunctionOutputDataSource, bool) { - return nil, false -} - -// AsDocumentDbOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsDocumentDbOutputDataSource() (*DocumentDbOutputDataSource, bool) { - return nil, false -} - -// AsAzureSynapseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsAzureSynapseOutputDataSource() (*AzureSynapseOutputDataSource, bool) { - return nil, false -} - -// AsAzureSQLDatabaseOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsAzureSQLDatabaseOutputDataSource() (*AzureSQLDatabaseOutputDataSource, bool) { - return nil, false -} - -// AsEventHubV2OutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsEventHubV2OutputDataSource() (*EventHubV2OutputDataSource, bool) { - return nil, false -} - -// AsEventHubOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsEventHubOutputDataSource() (*EventHubOutputDataSource, bool) { - return nil, false -} - -// AsAzureTableOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsAzureTableOutputDataSource() (*AzureTableOutputDataSource, bool) { - return nil, false -} - -// AsBlobOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsBlobOutputDataSource() (*BlobOutputDataSource, bool) { - return nil, false -} - -// AsOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsOutputDataSource() (*OutputDataSource, bool) { - return nil, false -} - -// AsBasicOutputDataSource is the BasicOutputDataSource implementation for ServiceBusTopicOutputDataSource. -func (sbtods ServiceBusTopicOutputDataSource) AsBasicOutputDataSource() (BasicOutputDataSource, bool) { - return &sbtods, true -} - -// UnmarshalJSON is the custom unmarshaler for ServiceBusTopicOutputDataSource struct. -func (sbtods *ServiceBusTopicOutputDataSource) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var serviceBusTopicOutputDataSourceProperties ServiceBusTopicOutputDataSourceProperties - err = json.Unmarshal(*v, &serviceBusTopicOutputDataSourceProperties) - if err != nil { - return err - } - sbtods.ServiceBusTopicOutputDataSourceProperties = &serviceBusTopicOutputDataSourceProperties - } - case "type": - if v != nil { - var typeVar TypeBasicOutputDataSource - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sbtods.Type = typeVar - } - } - } - - return nil -} - -// ServiceBusTopicOutputDataSourceProperties the properties that are associated with a Service Bus Topic -// output. -type ServiceBusTopicOutputDataSourceProperties struct { - // TopicName - The name of the Service Bus Topic. Required on PUT (CreateOrReplace) requests. - TopicName *string `json:"topicName,omitempty"` - // PropertyColumns - A string array of the names of output columns to be attached to Service Bus messages as custom properties. - PropertyColumns *[]string `json:"propertyColumns,omitempty"` - // SystemPropertyColumns - The system properties associated with the Service Bus Topic Output. The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, Label, ScheduledEnqueueTimeUtc. - SystemPropertyColumns map[string]*string `json:"systemPropertyColumns"` - // ServiceBusNamespace - The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - ServiceBusNamespace *string `json:"serviceBusNamespace,omitempty"` - // SharedAccessPolicyName - The shared access policy name for the Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyName *string `json:"sharedAccessPolicyName,omitempty"` - // SharedAccessPolicyKey - The shared access policy key for the specified shared access policy. Required on PUT (CreateOrReplace) requests. - SharedAccessPolicyKey *string `json:"sharedAccessPolicyKey,omitempty"` - // AuthenticationMode - Authentication Mode. Possible values include: 'AuthenticationModeMsi', 'AuthenticationModeUserToken', 'AuthenticationModeConnectionString' - AuthenticationMode AuthenticationMode `json:"authenticationMode,omitempty"` -} - -// MarshalJSON is the custom marshaler for ServiceBusTopicOutputDataSourceProperties. -func (sbtodsp ServiceBusTopicOutputDataSourceProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if sbtodsp.TopicName != nil { - objectMap["topicName"] = sbtodsp.TopicName - } - if sbtodsp.PropertyColumns != nil { - objectMap["propertyColumns"] = sbtodsp.PropertyColumns - } - if sbtodsp.SystemPropertyColumns != nil { - objectMap["systemPropertyColumns"] = sbtodsp.SystemPropertyColumns - } - if sbtodsp.ServiceBusNamespace != nil { - objectMap["serviceBusNamespace"] = sbtodsp.ServiceBusNamespace - } - if sbtodsp.SharedAccessPolicyName != nil { - objectMap["sharedAccessPolicyName"] = sbtodsp.SharedAccessPolicyName - } - if sbtodsp.SharedAccessPolicyKey != nil { - objectMap["sharedAccessPolicyKey"] = sbtodsp.SharedAccessPolicyKey - } - if sbtodsp.AuthenticationMode != "" { - objectMap["authenticationMode"] = sbtodsp.AuthenticationMode - } - return json.Marshal(objectMap) -} - -// Sku the properties that are associated with a SKU. -type Sku struct { - // Name - The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values include: 'SkuNameStandard' - Name SkuName `json:"name,omitempty"` -} - -// StartStreamingJobParameters parameters supplied to the Start Streaming Job operation. -type StartStreamingJobParameters struct { - // OutputStartMode - Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'OutputStartModeJobStartTime', 'OutputStartModeCustomTime', 'OutputStartModeLastOutputEventTime' - OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` - // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. - OutputStartTime *date.Time `json:"outputStartTime,omitempty"` -} - -// StorageAccount the properties that are associated with an Azure Storage account -type StorageAccount struct { - // AccountName - The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountName *string `json:"accountName,omitempty"` - // AccountKey - The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. - AccountKey *string `json:"accountKey,omitempty"` -} - -// StreamingJob a streaming job object, containing all information associated with the named streaming job. -type StreamingJob struct { - autorest.Response `json:"-"` - // StreamingJobProperties - The properties that are associated with a streaming job. Required on PUT (CreateOrReplace) requests. - *StreamingJobProperties `json:"properties,omitempty"` - // Identity - Describes the system-assigned managed identity assigned to this job that can be used to authenticate with inputs and outputs. - Identity *Identity `json:"identity,omitempty"` - // Tags - Resource tags. - Tags map[string]*string `json:"tags"` - // Location - The geo-location where the resource lives - Location *string `json:"location,omitempty"` - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for StreamingJob. -func (sj StreamingJob) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if sj.StreamingJobProperties != nil { - objectMap["properties"] = sj.StreamingJobProperties - } - if sj.Identity != nil { - objectMap["identity"] = sj.Identity - } - if sj.Tags != nil { - objectMap["tags"] = sj.Tags - } - if sj.Location != nil { - objectMap["location"] = sj.Location - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for StreamingJob struct. -func (sj *StreamingJob) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var streamingJobProperties StreamingJobProperties - err = json.Unmarshal(*v, &streamingJobProperties) - if err != nil { - return err - } - sj.StreamingJobProperties = &streamingJobProperties - } - case "identity": - if v != nil { - var identity Identity - err = json.Unmarshal(*v, &identity) - if err != nil { - return err - } - sj.Identity = &identity - } - case "tags": - if v != nil { - var tags map[string]*string - err = json.Unmarshal(*v, &tags) - if err != nil { - return err - } - sj.Tags = tags - } - case "location": - if v != nil { - var location string - err = json.Unmarshal(*v, &location) - if err != nil { - return err - } - sj.Location = &location - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - sj.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - sj.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sj.Type = &typeVar - } - } - } - - return nil -} - -// StreamingJobListResult object containing a list of streaming jobs. -type StreamingJobListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; A list of streaming jobs. Populated by a 'List' operation. - Value *[]StreamingJob `json:"value,omitempty"` - // NextLink - READ-ONLY; The link (url) to the next page of results. - NextLink *string `json:"nextLink,omitempty"` -} - -// MarshalJSON is the custom marshaler for StreamingJobListResult. -func (sjlr StreamingJobListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// StreamingJobListResultIterator provides access to a complete listing of StreamingJob values. -type StreamingJobListResultIterator struct { - i int - page StreamingJobListResultPage -} - -// NextWithContext advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -func (iter *StreamingJobListResultIterator) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultIterator.NextWithContext") - defer func() { - sc := -1 - if iter.Response().Response.Response != nil { - sc = iter.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - iter.i++ - if iter.i < len(iter.page.Values()) { - return nil - } - err = iter.page.NextWithContext(ctx) - if err != nil { - iter.i-- - return err - } - iter.i = 0 - return nil -} - -// Next advances to the next value. If there was an error making -// the request the iterator does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (iter *StreamingJobListResultIterator) Next() error { - return iter.NextWithContext(context.Background()) -} - -// NotDone returns true if the enumeration should be started or is not yet complete. -func (iter StreamingJobListResultIterator) NotDone() bool { - return iter.page.NotDone() && iter.i < len(iter.page.Values()) -} - -// Response returns the raw server response from the last page request. -func (iter StreamingJobListResultIterator) Response() StreamingJobListResult { - return iter.page.Response() -} - -// Value returns the current value or a zero-initialized value if the -// iterator has advanced beyond the end of the collection. -func (iter StreamingJobListResultIterator) Value() StreamingJob { - if !iter.page.NotDone() { - return StreamingJob{} - } - return iter.page.Values()[iter.i] -} - -// Creates a new instance of the StreamingJobListResultIterator type. -func NewStreamingJobListResultIterator(page StreamingJobListResultPage) StreamingJobListResultIterator { - return StreamingJobListResultIterator{page: page} -} - -// IsEmpty returns true if the ListResult contains no values. -func (sjlr StreamingJobListResult) IsEmpty() bool { - return sjlr.Value == nil || len(*sjlr.Value) == 0 -} - -// hasNextLink returns true if the NextLink is not empty. -func (sjlr StreamingJobListResult) hasNextLink() bool { - return sjlr.NextLink != nil && len(*sjlr.NextLink) != 0 -} - -// streamingJobListResultPreparer prepares a request to retrieve the next set of results. -// It returns nil if no more results exist. -func (sjlr StreamingJobListResult) streamingJobListResultPreparer(ctx context.Context) (*http.Request, error) { - if !sjlr.hasNextLink() { - return nil, nil - } - return autorest.Prepare((&http.Request{}).WithContext(ctx), - autorest.AsJSON(), - autorest.AsGet(), - autorest.WithBaseURL(to.String(sjlr.NextLink))) -} - -// StreamingJobListResultPage contains a page of StreamingJob values. -type StreamingJobListResultPage struct { - fn func(context.Context, StreamingJobListResult) (StreamingJobListResult, error) - sjlr StreamingJobListResult -} - -// NextWithContext advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -func (page *StreamingJobListResultPage) NextWithContext(ctx context.Context) (err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobListResultPage.NextWithContext") - defer func() { - sc := -1 - if page.Response().Response.Response != nil { - sc = page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - for { - next, err := page.fn(ctx, page.sjlr) - if err != nil { - return err - } - page.sjlr = next - if !next.hasNextLink() || !next.IsEmpty() { - break - } - } - return nil -} - -// Next advances to the next page of values. If there was an error making -// the request the page does not advance and the error is returned. -// Deprecated: Use NextWithContext() instead. -func (page *StreamingJobListResultPage) Next() error { - return page.NextWithContext(context.Background()) -} - -// NotDone returns true if the page enumeration should be started or is not yet complete. -func (page StreamingJobListResultPage) NotDone() bool { - return !page.sjlr.IsEmpty() -} - -// Response returns the raw server response from the last page request. -func (page StreamingJobListResultPage) Response() StreamingJobListResult { - return page.sjlr -} - -// Values returns the slice of values for the current page or nil if there are no values. -func (page StreamingJobListResultPage) Values() []StreamingJob { - if page.sjlr.IsEmpty() { - return nil - } - return *page.sjlr.Value -} - -// Creates a new instance of the StreamingJobListResultPage type. -func NewStreamingJobListResultPage(cur StreamingJobListResult, getNextPage func(context.Context, StreamingJobListResult) (StreamingJobListResult, error)) StreamingJobListResultPage { - return StreamingJobListResultPage{ - fn: getNextPage, - sjlr: cur, - } -} - -// StreamingJobProperties the properties that are associated with a streaming job. -type StreamingJobProperties struct { - // Sku - Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. - Sku *Sku `json:"sku,omitempty"` - // JobID - READ-ONLY; A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. - JobID *string `json:"jobId,omitempty"` - // ProvisioningState - READ-ONLY; Describes the provisioning status of the streaming job. - ProvisioningState *string `json:"provisioningState,omitempty"` - // JobState - READ-ONLY; Describes the state of the streaming job. - JobState *string `json:"jobState,omitempty"` - // JobType - Describes the type of the job. Valid modes are `Cloud` and 'Edge'. Possible values include: 'JobTypeCloud', 'JobTypeEdge' - JobType JobType `json:"jobType,omitempty"` - // OutputStartMode - This property should only be utilized when it is desired that the job be started immediately upon creation. Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time. Possible values include: 'OutputStartModeJobStartTime', 'OutputStartModeCustomTime', 'OutputStartModeLastOutputEventTime' - OutputStartMode OutputStartMode `json:"outputStartMode,omitempty"` - // OutputStartTime - Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime. - OutputStartTime *date.Time `json:"outputStartTime,omitempty"` - // LastOutputEventTime - READ-ONLY; Value is either an ISO-8601 formatted timestamp indicating the last output event time of the streaming job or null indicating that output has not yet been produced. In case of multiple outputs or multiple streams, this shows the latest value in that set. - LastOutputEventTime *date.Time `json:"lastOutputEventTime,omitempty"` - // EventsOutOfOrderPolicy - Indicates the policy to apply to events that arrive out of order in the input event stream. Possible values include: 'EventsOutOfOrderPolicyAdjust', 'EventsOutOfOrderPolicyDrop' - EventsOutOfOrderPolicy EventsOutOfOrderPolicy `json:"eventsOutOfOrderPolicy,omitempty"` - // OutputErrorPolicy - Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size). Possible values include: 'OutputErrorPolicyStop', 'OutputErrorPolicyDrop' - OutputErrorPolicy OutputErrorPolicy `json:"outputErrorPolicy,omitempty"` - // EventsOutOfOrderMaxDelayInSeconds - The maximum tolerable delay in seconds where out-of-order events can be adjusted to be back in order. - EventsOutOfOrderMaxDelayInSeconds *int32 `json:"eventsOutOfOrderMaxDelayInSeconds,omitempty"` - // EventsLateArrivalMaxDelayInSeconds - The maximum tolerable delay in seconds where events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to have a value of -1. - EventsLateArrivalMaxDelayInSeconds *int32 `json:"eventsLateArrivalMaxDelayInSeconds,omitempty"` - // DataLocale - The data locale of the stream analytics job. Value should be the name of a supported .NET Culture from the set https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none specified. - DataLocale *string `json:"dataLocale,omitempty"` - // CompatibilityLevel - Controls certain runtime behaviors of the streaming job. Possible values include: 'CompatibilityLevelOneFullStopZero', 'CompatibilityLevelOneFullStopTwo' - CompatibilityLevel CompatibilityLevel `json:"compatibilityLevel,omitempty"` - // CreatedDate - READ-ONLY; Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. - CreatedDate *date.Time `json:"createdDate,omitempty"` - // Inputs - A list of one or more inputs to the streaming job. The name property for each input is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual input. - Inputs *[]Input `json:"inputs,omitempty"` - // Transformation - Indicates the query and the number of streaming units to use for the streaming job. The name property of the transformation is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. - Transformation *Transformation `json:"transformation,omitempty"` - // Outputs - A list of one or more outputs for the streaming job. The name property for each output is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual output. - Outputs *[]Output `json:"outputs,omitempty"` - // Functions - A list of one or more functions for the streaming job. The name property for each function is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation. - Functions *[]Function `json:"functions,omitempty"` - // Etag - READ-ONLY; The current entity tag for the streaming job. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - // JobStorageAccount - The properties that are associated with an Azure Storage account with MSI - JobStorageAccount *JobStorageAccount `json:"jobStorageAccount,omitempty"` - // ContentStoragePolicy - Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. Possible values include: 'ContentStoragePolicySystemAccount', 'ContentStoragePolicyJobStorageAccount' - ContentStoragePolicy ContentStoragePolicy `json:"contentStoragePolicy,omitempty"` - // Cluster - The cluster which streaming jobs will run on. - Cluster *ClusterInfo `json:"cluster,omitempty"` -} - -// MarshalJSON is the custom marshaler for StreamingJobProperties. -func (sjp StreamingJobProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if sjp.Sku != nil { - objectMap["sku"] = sjp.Sku - } - if sjp.JobType != "" { - objectMap["jobType"] = sjp.JobType - } - if sjp.OutputStartMode != "" { - objectMap["outputStartMode"] = sjp.OutputStartMode - } - if sjp.OutputStartTime != nil { - objectMap["outputStartTime"] = sjp.OutputStartTime - } - if sjp.EventsOutOfOrderPolicy != "" { - objectMap["eventsOutOfOrderPolicy"] = sjp.EventsOutOfOrderPolicy - } - if sjp.OutputErrorPolicy != "" { - objectMap["outputErrorPolicy"] = sjp.OutputErrorPolicy - } - if sjp.EventsOutOfOrderMaxDelayInSeconds != nil { - objectMap["eventsOutOfOrderMaxDelayInSeconds"] = sjp.EventsOutOfOrderMaxDelayInSeconds - } - if sjp.EventsLateArrivalMaxDelayInSeconds != nil { - objectMap["eventsLateArrivalMaxDelayInSeconds"] = sjp.EventsLateArrivalMaxDelayInSeconds - } - if sjp.DataLocale != nil { - objectMap["dataLocale"] = sjp.DataLocale - } - if sjp.CompatibilityLevel != "" { - objectMap["compatibilityLevel"] = sjp.CompatibilityLevel - } - if sjp.Inputs != nil { - objectMap["inputs"] = sjp.Inputs - } - if sjp.Transformation != nil { - objectMap["transformation"] = sjp.Transformation - } - if sjp.Outputs != nil { - objectMap["outputs"] = sjp.Outputs - } - if sjp.Functions != nil { - objectMap["functions"] = sjp.Functions - } - if sjp.JobStorageAccount != nil { - objectMap["jobStorageAccount"] = sjp.JobStorageAccount - } - if sjp.ContentStoragePolicy != "" { - objectMap["contentStoragePolicy"] = sjp.ContentStoragePolicy - } - if sjp.Cluster != nil { - objectMap["cluster"] = sjp.Cluster - } - return json.Marshal(objectMap) -} - -// StreamingJobsCreateOrReplaceFuture an abstraction for monitoring and retrieving the results of a -// long-running operation. -type StreamingJobsCreateOrReplaceFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(StreamingJobsClient) (StreamingJob, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *StreamingJobsCreateOrReplaceFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for StreamingJobsCreateOrReplaceFuture.Result. -func (future *StreamingJobsCreateOrReplaceFuture) result(client StreamingJobsClient) (sj StreamingJob, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - sj.Response.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsCreateOrReplaceFuture") - return - } - sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) - if sj.Response.Response, err = future.GetResult(sender); err == nil && sj.Response.Response.StatusCode != http.StatusNoContent { - sj, err = client.CreateOrReplaceResponder(sj.Response.Response) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsCreateOrReplaceFuture", "Result", sj.Response.Response, "Failure responding to request") - } - } - return -} - -// StreamingJobsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type StreamingJobsDeleteFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(StreamingJobsClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *StreamingJobsDeleteFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for StreamingJobsDeleteFuture.Result. -func (future *StreamingJobsDeleteFuture) result(client StreamingJobsClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsDeleteFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsDeleteFuture") - return - } - ar.Response = future.Response() - return -} - -// StreamingJobsScaleFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type StreamingJobsScaleFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(StreamingJobsClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *StreamingJobsScaleFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for StreamingJobsScaleFuture.Result. -func (future *StreamingJobsScaleFuture) result(client StreamingJobsClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsScaleFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsScaleFuture") - return - } - ar.Response = future.Response() - return -} - -// StreamingJobsStartFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type StreamingJobsStartFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(StreamingJobsClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *StreamingJobsStartFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for StreamingJobsStartFuture.Result. -func (future *StreamingJobsStartFuture) result(client StreamingJobsClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStartFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStartFuture") - return - } - ar.Response = future.Response() - return -} - -// StreamingJobsStopFuture an abstraction for monitoring and retrieving the results of a long-running -// operation. -type StreamingJobsStopFuture struct { - azure.FutureAPI - // Result returns the result of the asynchronous operation. - // If the operation has not completed it will return an error. - Result func(StreamingJobsClient) (autorest.Response, error) -} - -// UnmarshalJSON is the custom unmarshaller for CreateFuture. -func (future *StreamingJobsStopFuture) UnmarshalJSON(body []byte) error { - var azFuture azure.Future - if err := json.Unmarshal(body, &azFuture); err != nil { - return err - } - future.FutureAPI = &azFuture - future.Result = future.result - return nil -} - -// result is the default implementation for StreamingJobsStopFuture.Result. -func (future *StreamingJobsStopFuture) result(client StreamingJobsClient) (ar autorest.Response, err error) { - var done bool - done, err = future.DoneWithContext(context.Background(), client) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsStopFuture", "Result", future.Response(), "Polling failure") - return - } - if !done { - ar.Response = future.Response() - err = azure.NewAsyncOpIncompleteError("streamanalytics.StreamingJobsStopFuture") - return - } - ar.Response = future.Response() - return -} - -// BasicStreamInputDataSource describes an input data source that contains stream data. -type BasicStreamInputDataSource interface { - AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) - AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) - AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) - AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) - AsStreamInputDataSource() (*StreamInputDataSource, bool) -} - -// StreamInputDataSource describes an input data source that contains stream data. -type StreamInputDataSource struct { - // Type - Possible values include: 'TypeBasicStreamInputDataSourceTypeStreamInputDataSource', 'TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs', 'TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub', 'TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob' - Type TypeBasicStreamInputDataSource `json:"type,omitempty"` -} - -func unmarshalBasicStreamInputDataSource(body []byte) (BasicStreamInputDataSource, error) { - var m map[string]interface{} - err := json.Unmarshal(body, &m) - if err != nil { - return nil, err - } - - switch m["type"] { - case string(TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs): - var ithsids IoTHubStreamInputDataSource - err := json.Unmarshal(body, &ithsids) - return ithsids, err - case string(TypeBasicStreamInputDataSourceTypeMicrosoftEventHubEventHub): - var ehvsids EventHubV2StreamInputDataSource - err := json.Unmarshal(body, &ehvsids) - return ehvsids, err - case string(TypeBasicStreamInputDataSourceTypeMicrosoftServiceBusEventHub): - var ehsids EventHubStreamInputDataSource - err := json.Unmarshal(body, &ehsids) - return ehsids, err - case string(TypeBasicStreamInputDataSourceTypeMicrosoftStorageBlob): - var bsids BlobStreamInputDataSource - err := json.Unmarshal(body, &bsids) - return bsids, err - default: - var sids StreamInputDataSource - err := json.Unmarshal(body, &sids) - return sids, err - } -} -func unmarshalBasicStreamInputDataSourceArray(body []byte) ([]BasicStreamInputDataSource, error) { - var rawMessages []*json.RawMessage - err := json.Unmarshal(body, &rawMessages) - if err != nil { - return nil, err - } - - sidsArray := make([]BasicStreamInputDataSource, len(rawMessages)) - - for index, rawMessage := range rawMessages { - sids, err := unmarshalBasicStreamInputDataSource(*rawMessage) - if err != nil { - return nil, err - } - sidsArray[index] = sids - } - return sidsArray, nil -} - -// MarshalJSON is the custom marshaler for StreamInputDataSource. -func (sids StreamInputDataSource) MarshalJSON() ([]byte, error) { - sids.Type = TypeBasicStreamInputDataSourceTypeStreamInputDataSource - objectMap := make(map[string]interface{}) - if sids.Type != "" { - objectMap["type"] = sids.Type - } - return json.Marshal(objectMap) -} - -// AsIoTHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsIoTHubStreamInputDataSource() (*IoTHubStreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubV2StreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsEventHubV2StreamInputDataSource() (*EventHubV2StreamInputDataSource, bool) { - return nil, false -} - -// AsEventHubStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsEventHubStreamInputDataSource() (*EventHubStreamInputDataSource, bool) { - return nil, false -} - -// AsBlobStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsBlobStreamInputDataSource() (*BlobStreamInputDataSource, bool) { - return nil, false -} - -// AsStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsStreamInputDataSource() (*StreamInputDataSource, bool) { - return &sids, true -} - -// AsBasicStreamInputDataSource is the BasicStreamInputDataSource implementation for StreamInputDataSource. -func (sids StreamInputDataSource) AsBasicStreamInputDataSource() (BasicStreamInputDataSource, bool) { - return &sids, true -} - -// StreamInputProperties the properties that are associated with an input containing stream data. -type StreamInputProperties struct { - // Datasource - Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. - Datasource BasicStreamInputDataSource `json:"datasource,omitempty"` - // Serialization - Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. - Serialization BasicSerialization `json:"serialization,omitempty"` - // Diagnostics - READ-ONLY; Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. - Diagnostics *Diagnostics `json:"diagnostics,omitempty"` - // Etag - READ-ONLY; The current entity tag for the input. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` - // Compression - Describes how input data is compressed - Compression *Compression `json:"compression,omitempty"` - // PartitionKey - partitionKey Describes a key in the input data which is used for partitioning the input data - PartitionKey *string `json:"partitionKey,omitempty"` - // Type - Possible values include: 'TypeBasicInputPropertiesTypeInputProperties', 'TypeBasicInputPropertiesTypeReference', 'TypeBasicInputPropertiesTypeStream' - Type TypeBasicInputProperties `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for StreamInputProperties. -func (sip StreamInputProperties) MarshalJSON() ([]byte, error) { - sip.Type = TypeBasicInputPropertiesTypeStream - objectMap := make(map[string]interface{}) - objectMap["datasource"] = sip.Datasource - objectMap["serialization"] = sip.Serialization - if sip.Compression != nil { - objectMap["compression"] = sip.Compression - } - if sip.PartitionKey != nil { - objectMap["partitionKey"] = sip.PartitionKey - } - if sip.Type != "" { - objectMap["type"] = sip.Type - } - return json.Marshal(objectMap) -} - -// AsReferenceInputProperties is the BasicInputProperties implementation for StreamInputProperties. -func (sip StreamInputProperties) AsReferenceInputProperties() (*ReferenceInputProperties, bool) { - return nil, false -} - -// AsStreamInputProperties is the BasicInputProperties implementation for StreamInputProperties. -func (sip StreamInputProperties) AsStreamInputProperties() (*StreamInputProperties, bool) { - return &sip, true -} - -// AsInputProperties is the BasicInputProperties implementation for StreamInputProperties. -func (sip StreamInputProperties) AsInputProperties() (*InputProperties, bool) { - return nil, false -} - -// AsBasicInputProperties is the BasicInputProperties implementation for StreamInputProperties. -func (sip StreamInputProperties) AsBasicInputProperties() (BasicInputProperties, bool) { - return &sip, true -} - -// UnmarshalJSON is the custom unmarshaler for StreamInputProperties struct. -func (sip *StreamInputProperties) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "datasource": - if v != nil { - datasource, err := unmarshalBasicStreamInputDataSource(*v) - if err != nil { - return err - } - sip.Datasource = datasource - } - case "serialization": - if v != nil { - serialization, err := unmarshalBasicSerialization(*v) - if err != nil { - return err - } - sip.Serialization = serialization - } - case "diagnostics": - if v != nil { - var diagnostics Diagnostics - err = json.Unmarshal(*v, &diagnostics) - if err != nil { - return err - } - sip.Diagnostics = &diagnostics - } - case "etag": - if v != nil { - var etag string - err = json.Unmarshal(*v, &etag) - if err != nil { - return err - } - sip.Etag = &etag - } - case "compression": - if v != nil { - var compression Compression - err = json.Unmarshal(*v, &compression) - if err != nil { - return err - } - sip.Compression = &compression - } - case "partitionKey": - if v != nil { - var partitionKey string - err = json.Unmarshal(*v, &partitionKey) - if err != nil { - return err - } - sip.PartitionKey = &partitionKey - } - case "type": - if v != nil { - var typeVar TypeBasicInputProperties - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sip.Type = typeVar - } - } - } - - return nil -} - -// SubResource the base sub-resource model definition. -type SubResource struct { - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for SubResource. -func (sr SubResource) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if sr.Name != nil { - objectMap["name"] = sr.Name - } - return json.Marshal(objectMap) -} - -// SubscriptionQuota describes the current quota for the subscription. -type SubscriptionQuota struct { - // SubscriptionQuotaProperties - READ-ONLY; Describes the properties of the quota. - *SubscriptionQuotaProperties `json:"properties,omitempty"` - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for SubscriptionQuota. -func (sq SubscriptionQuota) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if sq.Name != nil { - objectMap["name"] = sq.Name - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for SubscriptionQuota struct. -func (sq *SubscriptionQuota) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var subscriptionQuotaProperties SubscriptionQuotaProperties - err = json.Unmarshal(*v, &subscriptionQuotaProperties) - if err != nil { - return err - } - sq.SubscriptionQuotaProperties = &subscriptionQuotaProperties - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - sq.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - sq.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - sq.Type = &typeVar - } - } - } - - return nil -} - -// SubscriptionQuotaProperties describes the properties of the quota. -type SubscriptionQuotaProperties struct { - // MaxCount - READ-ONLY; The max permitted usage of this resource. - MaxCount *int32 `json:"maxCount,omitempty"` - // CurrentCount - READ-ONLY; The current usage of this resource. - CurrentCount *int32 `json:"currentCount,omitempty"` -} - -// MarshalJSON is the custom marshaler for SubscriptionQuotaProperties. -func (sq SubscriptionQuotaProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// SubscriptionQuotasListResult result of the GetQuotas operation. It contains a list of quotas for the -// subscription in a particular region. -type SubscriptionQuotasListResult struct { - autorest.Response `json:"-"` - // Value - READ-ONLY; List of quotas for the subscription in a particular region. - Value *[]SubscriptionQuota `json:"value,omitempty"` -} - -// MarshalJSON is the custom marshaler for SubscriptionQuotasListResult. -func (sqlr SubscriptionQuotasListResult) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - return json.Marshal(objectMap) -} - -// TrackedResource the resource model definition for a ARM tracked top level resource -type TrackedResource struct { - // Tags - Resource tags. - Tags map[string]*string `json:"tags"` - // Location - The geo-location where the resource lives - Location *string `json:"location,omitempty"` - // ID - READ-ONLY; Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - ID *string `json:"id,omitempty"` - // Name - READ-ONLY; The name of the resource - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for TrackedResource. -func (tr TrackedResource) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if tr.Tags != nil { - objectMap["tags"] = tr.Tags - } - if tr.Location != nil { - objectMap["location"] = tr.Location - } - return json.Marshal(objectMap) -} - -// Transformation a transformation object, containing all information associated with the named -// transformation. All transformations are contained under a streaming job. -type Transformation struct { - autorest.Response `json:"-"` - // TransformationProperties - The properties that are associated with a transformation. Required on PUT (CreateOrReplace) requests. - *TransformationProperties `json:"properties,omitempty"` - // ID - READ-ONLY; Resource Id - ID *string `json:"id,omitempty"` - // Name - Resource name - Name *string `json:"name,omitempty"` - // Type - READ-ONLY; Resource type - Type *string `json:"type,omitempty"` -} - -// MarshalJSON is the custom marshaler for Transformation. -func (t Transformation) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if t.TransformationProperties != nil { - objectMap["properties"] = t.TransformationProperties - } - if t.Name != nil { - objectMap["name"] = t.Name - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON is the custom unmarshaler for Transformation struct. -func (t *Transformation) UnmarshalJSON(body []byte) error { - var m map[string]*json.RawMessage - err := json.Unmarshal(body, &m) - if err != nil { - return err - } - for k, v := range m { - switch k { - case "properties": - if v != nil { - var transformationProperties TransformationProperties - err = json.Unmarshal(*v, &transformationProperties) - if err != nil { - return err - } - t.TransformationProperties = &transformationProperties - } - case "id": - if v != nil { - var ID string - err = json.Unmarshal(*v, &ID) - if err != nil { - return err - } - t.ID = &ID - } - case "name": - if v != nil { - var name string - err = json.Unmarshal(*v, &name) - if err != nil { - return err - } - t.Name = &name - } - case "type": - if v != nil { - var typeVar string - err = json.Unmarshal(*v, &typeVar) - if err != nil { - return err - } - t.Type = &typeVar - } - } - } - - return nil -} - -// TransformationProperties the properties that are associated with a transformation. -type TransformationProperties struct { - // StreamingUnits - Specifies the number of streaming units that the streaming job uses. - StreamingUnits *int32 `json:"streamingUnits,omitempty"` - // ValidStreamingUnits - Specifies the valid streaming units a streaming job can scale to. - ValidStreamingUnits *[]int32 `json:"validStreamingUnits,omitempty"` - // Query - Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. - Query *string `json:"query,omitempty"` - // Etag - READ-ONLY; The current entity tag for the transformation. This is an opaque string. You can use it to detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. - Etag *string `json:"etag,omitempty"` -} - -// MarshalJSON is the custom marshaler for TransformationProperties. -func (tp TransformationProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]interface{}) - if tp.StreamingUnits != nil { - objectMap["streamingUnits"] = tp.StreamingUnits - } - if tp.ValidStreamingUnits != nil { - objectMap["validStreamingUnits"] = tp.ValidStreamingUnits - } - if tp.Query != nil { - objectMap["query"] = tp.Query - } - return json.Marshal(objectMap) -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/operations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/operations.go deleted file mode 100644 index c15a5b378f98..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/operations.go +++ /dev/null @@ -1,140 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// OperationsClient is the stream Analytics Client -type OperationsClient struct { - BaseClient -} - -// NewOperationsClient creates an instance of the OperationsClient client. -func NewOperationsClient(subscriptionID string) OperationsClient { - return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this -// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { - return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// List lists all of the available Stream Analytics related operations. -func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") - defer func() { - sc := -1 - if result.olr.Response.Response != nil { - sc = result.olr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.fn = client.listNextResults - req, err := client.ListPreparer(ctx) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", nil, "Failure preparing request") - return - } - - resp, err := client.ListSender(req) - if err != nil { - result.olr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure sending request") - return - } - - result.olr, err = client.ListResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "List", resp, "Failure responding to request") - return - } - if result.olr.hasNextLink() && result.olr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListPreparer prepares the List request. -func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPath("/providers/Microsoft.StreamAnalytics/operations"), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListSender sends the List request. The method will close the -// http.Response Body if it receives an error. -func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { - return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) -} - -// ListResponder handles the response to the List request. The method always -// closes the http.Response Body. -func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listNextResults retrieves the next set of results, if any. -func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationListResult) (result OperationListResult, err error) { - req, err := lastResults.operationListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure sending next results request") - } - result, err = client.ListResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OperationsClient", "listNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListComplete enumerates all values, automatically crossing page boundaries as required. -func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.List(ctx) - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/outputs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/outputs.go deleted file mode 100644 index 39500f66c3ab..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/outputs.go +++ /dev/null @@ -1,646 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// OutputsClient is the stream Analytics Client -type OutputsClient struct { - BaseClient -} - -// NewOutputsClient creates an instance of the OutputsClient client. -func NewOutputsClient(subscriptionID string) OutputsClient { - return NewOutputsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewOutputsClientWithBaseURI creates an instance of the OutputsClient client using a custom endpoint. Use this when -// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewOutputsClientWithBaseURI(baseURI string, subscriptionID string) OutputsClient { - return OutputsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrReplace creates an output or replaces an already existing output under an existing streaming job. -// Parameters: -// output - the definition of the output that will be used to create a new output or replace the existing one -// under the streaming job. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// outputName - the name of the output. -// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new output to be created, but to prevent updating an existing output. -// Other values will result in a 412 Pre-condition Failed response. -func (client OutputsClient) CreateOrReplace(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (result Output, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.CreateOrReplace") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "CreateOrReplace", err.Error()) - } - - req, err := client.CreateOrReplacePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", nil, "Failure preparing request") - return - } - - resp, err := client.CreateOrReplaceSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure sending request") - return - } - - result, err = client.CreateOrReplaceResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "CreateOrReplace", resp, "Failure responding to request") - return - } - - return -} - -// CreateOrReplacePreparer prepares the CreateOrReplace request. -func (client OutputsClient) CreateOrReplacePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "outputName": autorest.Encode("path", outputName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), - autorest.WithJSON(output), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always -// closes the http.Response Body. -func (client OutputsClient) CreateOrReplaceResponder(resp *http.Response) (result Output, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete deletes an output from the streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// outputName - the name of the output. -func (client OutputsClient) Delete(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result autorest.Response, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Delete") - defer func() { - sc := -1 - if result.Response != nil { - sc = result.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, jobName, outputName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", nil, "Failure preparing request") - return - } - - resp, err := client.DeleteSender(req) - if err != nil { - result.Response = resp - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure sending request") - return - } - - result, err = client.DeleteResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Delete", resp, "Failure responding to request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client OutputsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "outputName": autorest.Encode("path", outputName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) DeleteSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client OutputsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets details about the specified output. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// outputName - the name of the output. -func (client OutputsClient) Get(ctx context.Context, resourceGroupName string, jobName string, outputName string) (result Output, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, jobName, outputName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client OutputsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "outputName": autorest.Encode("path", outputName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client OutputsClient) GetResponder(resp *http.Response) (result Output, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// ListByStreamingJob lists all of the outputs under the specified streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// selectParameter - the $select OData query parameter. This is a comma-separated list of structural properties -// to include in the response, or "*" to include all properties. By default, all properties are returned except -// diagnostics. Currently only accepts '*' as a valid value. -func (client OutputsClient) ListByStreamingJob(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.olr.Response.Response != nil { - sc = result.olr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "ListByStreamingJob", err.Error()) - } - - result.fn = client.listByStreamingJobNextResults - req, err := client.ListByStreamingJobPreparer(ctx, resourceGroupName, jobName, selectParameter) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", nil, "Failure preparing request") - return - } - - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.olr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure sending request") - return - } - - result.olr, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "ListByStreamingJob", resp, "Failure responding to request") - return - } - if result.olr.hasNextLink() && result.olr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByStreamingJobPreparer prepares the ListByStreamingJob request. -func (client OutputsClient) ListByStreamingJobPreparer(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(selectParameter) > 0 { - queryParameters["$select"] = autorest.Encode("query", selectParameter) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByStreamingJobSender sends the ListByStreamingJob request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) ListByStreamingJobSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByStreamingJobResponder handles the response to the ListByStreamingJob request. The method always -// closes the http.Response Body. -func (client OutputsClient) ListByStreamingJobResponder(resp *http.Response) (result OutputListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByStreamingJobNextResults retrieves the next set of results, if any. -func (client OutputsClient) listByStreamingJobNextResults(ctx context.Context, lastResults OutputListResult) (result OutputListResult, err error) { - req, err := lastResults.outputListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByStreamingJobSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByStreamingJobResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "listByStreamingJobNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByStreamingJobComplete enumerates all values, automatically crossing page boundaries as required. -func (client OutputsClient) ListByStreamingJobComplete(ctx context.Context, resourceGroupName string, jobName string, selectParameter string) (result OutputListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.ListByStreamingJob") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByStreamingJob(ctx, resourceGroupName, jobName, selectParameter) - return -} - -// Test tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// outputName - the name of the output. -// output - if the output specified does not already exist, this parameter must contain the full output -// definition intended to be tested. If the output specified already exists, this parameter can be left null to -// test the existing output as is or if specified, the properties specified will overwrite the corresponding -// properties in the existing output (exactly like a PATCH operation) and the resulting output will be tested. -func (client OutputsClient) Test(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (result OutputsTestFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Test") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "Test", err.Error()) - } - - req, err := client.TestPreparer(ctx, resourceGroupName, jobName, outputName, output) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", nil, "Failure preparing request") - return - } - - result, err = client.TestSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Test", result.Response(), "Failure sending request") - return - } - - return -} - -// TestPreparer prepares the Test request. -func (client OutputsClient) TestPreparer(ctx context.Context, resourceGroupName string, jobName string, outputName string, output *Output) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "outputName": autorest.Encode("path", outputName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if output != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(output)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// TestSender sends the Test request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) TestSender(req *http.Request) (future OutputsTestFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// TestResponder handles the response to the Test request. The method always -// closes the http.Response Body. -func (client OutputsClient) TestResponder(resp *http.Response) (result ResourceTestStatus, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Update updates an existing output under an existing streaming job. This can be used to partially update (ie. update -// one or two properties) an output without affecting the rest the job or output definition. -// Parameters: -// output - an Output object. The properties specified here will overwrite the corresponding properties in the -// existing output (ie. Those properties will be updated). Any properties that are set to null here will mean -// that the corresponding property in the existing output will remain the same and not change as a result of -// this PATCH operation. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// outputName - the name of the output. -// ifMatch - the ETag of the output. Omit this value to always overwrite the current output. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client OutputsClient) Update(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (result Output, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/OutputsClient.Update") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.OutputsClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, output, resourceGroupName, jobName, outputName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", nil, "Failure preparing request") - return - } - - resp, err := client.UpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure sending request") - return - } - - result, err = client.UpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.OutputsClient", "Update", resp, "Failure responding to request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client OutputsClient) UpdatePreparer(ctx context.Context, output Output, resourceGroupName string, jobName string, outputName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "outputName": autorest.Encode("path", outputName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}", pathParameters), - autorest.WithJSON(output), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client OutputsClient) UpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client OutputsClient) UpdateResponder(resp *http.Response) (result Output, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/privateendpoints.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/privateendpoints.go deleted file mode 100644 index 2bf145f36507..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/privateendpoints.go +++ /dev/null @@ -1,444 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// PrivateEndpointsClient is the stream Analytics Client -type PrivateEndpointsClient struct { - BaseClient -} - -// NewPrivateEndpointsClient creates an instance of the PrivateEndpointsClient client. -func NewPrivateEndpointsClient(subscriptionID string) PrivateEndpointsClient { - return NewPrivateEndpointsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewPrivateEndpointsClientWithBaseURI creates an instance of the PrivateEndpointsClient client using a custom -// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure -// stack). -func NewPrivateEndpointsClientWithBaseURI(baseURI string, subscriptionID string) PrivateEndpointsClient { - return PrivateEndpointsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrUpdate creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. -// Parameters: -// privateEndpoint - the definition of the private endpoint that will be used to create a new cluster or -// replace the existing one. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -// privateEndpointName - the name of the private endpoint. -// ifMatch - the ETag of the resource. Omit this value to always overwrite the current record set. Specify the -// last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new resource to be created, but to prevent updating an existing record -// set. Other values will result in a 412 Pre-condition Failed response. -func (client PrivateEndpointsClient) CreateOrUpdate(ctx context.Context, privateEndpoint PrivateEndpoint, resourceGroupName string, clusterName string, privateEndpointName string, ifMatch string, ifNoneMatch string) (result PrivateEndpoint, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.CreateOrUpdate") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", err.Error()) - } - - req, err := client.CreateOrUpdatePreparer(ctx, privateEndpoint, resourceGroupName, clusterName, privateEndpointName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", nil, "Failure preparing request") - return - } - - resp, err := client.CreateOrUpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", resp, "Failure sending request") - return - } - - result, err = client.CreateOrUpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "CreateOrUpdate", resp, "Failure responding to request") - return - } - - return -} - -// CreateOrUpdatePreparer prepares the CreateOrUpdate request. -func (client PrivateEndpointsClient) CreateOrUpdatePreparer(ctx context.Context, privateEndpoint PrivateEndpoint, resourceGroupName string, clusterName string, privateEndpointName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "privateEndpointName": autorest.Encode("path", privateEndpointName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - privateEndpoint.Etag = nil - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), - autorest.WithJSON(privateEndpoint), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the -// http.Response Body if it receives an error. -func (client PrivateEndpointsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always -// closes the http.Response Body. -func (client PrivateEndpointsClient) CreateOrUpdateResponder(resp *http.Response) (result PrivateEndpoint, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete delete the specified private endpoint. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -// privateEndpointName - the name of the private endpoint. -func (client PrivateEndpointsClient) Delete(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result PrivateEndpointsDeleteFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.Delete") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, clusterName, privateEndpointName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Delete", nil, "Failure preparing request") - return - } - - result, err = client.DeleteSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Delete", result.Response(), "Failure sending request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client PrivateEndpointsClient) DeletePreparer(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "privateEndpointName": autorest.Encode("path", privateEndpointName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client PrivateEndpointsClient) DeleteSender(req *http.Request) (future PrivateEndpointsDeleteFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client PrivateEndpointsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets information about the specified Private Endpoint. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -// privateEndpointName - the name of the private endpoint. -func (client PrivateEndpointsClient) Get(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (result PrivateEndpoint, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, clusterName, privateEndpointName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client PrivateEndpointsClient) GetPreparer(ctx context.Context, resourceGroupName string, clusterName string, privateEndpointName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "privateEndpointName": autorest.Encode("path", privateEndpointName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client PrivateEndpointsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client PrivateEndpointsClient) GetResponder(resp *http.Response) (result PrivateEndpoint, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// ListByCluster lists the private endpoints in the cluster. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// clusterName - the name of the cluster. -func (client PrivateEndpointsClient) ListByCluster(ctx context.Context, resourceGroupName string, clusterName string) (result PrivateEndpointListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.ListByCluster") - defer func() { - sc := -1 - if result.pelr.Response.Response != nil { - sc = result.pelr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.PrivateEndpointsClient", "ListByCluster", err.Error()) - } - - result.fn = client.listByClusterNextResults - req, err := client.ListByClusterPreparer(ctx, resourceGroupName, clusterName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", nil, "Failure preparing request") - return - } - - resp, err := client.ListByClusterSender(req) - if err != nil { - result.pelr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", resp, "Failure sending request") - return - } - - result.pelr, err = client.ListByClusterResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "ListByCluster", resp, "Failure responding to request") - return - } - if result.pelr.hasNextLink() && result.pelr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByClusterPreparer prepares the ListByCluster request. -func (client PrivateEndpointsClient) ListByClusterPreparer(ctx context.Context, resourceGroupName string, clusterName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "clusterName": autorest.Encode("path", clusterName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByClusterSender sends the ListByCluster request. The method will close the -// http.Response Body if it receives an error. -func (client PrivateEndpointsClient) ListByClusterSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByClusterResponder handles the response to the ListByCluster request. The method always -// closes the http.Response Body. -func (client PrivateEndpointsClient) ListByClusterResponder(resp *http.Response) (result PrivateEndpointListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByClusterNextResults retrieves the next set of results, if any. -func (client PrivateEndpointsClient) listByClusterNextResults(ctx context.Context, lastResults PrivateEndpointListResult) (result PrivateEndpointListResult, err error) { - req, err := lastResults.privateEndpointListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByClusterSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByClusterResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.PrivateEndpointsClient", "listByClusterNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByClusterComplete enumerates all values, automatically crossing page boundaries as required. -func (client PrivateEndpointsClient) ListByClusterComplete(ctx context.Context, resourceGroupName string, clusterName string) (result PrivateEndpointListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/PrivateEndpointsClient.ListByCluster") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByCluster(ctx, resourceGroupName, clusterName) - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/streamingjobs.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/streamingjobs.go deleted file mode 100644 index 4d8ffbfb9c9b..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/streamingjobs.go +++ /dev/null @@ -1,954 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// StreamingJobsClient is the stream Analytics Client -type StreamingJobsClient struct { - BaseClient -} - -// NewStreamingJobsClient creates an instance of the StreamingJobsClient client. -func NewStreamingJobsClient(subscriptionID string) StreamingJobsClient { - return NewStreamingJobsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewStreamingJobsClientWithBaseURI creates an instance of the StreamingJobsClient client using a custom endpoint. -// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewStreamingJobsClientWithBaseURI(baseURI string, subscriptionID string) StreamingJobsClient { - return StreamingJobsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrReplace creates a streaming job or replaces an already existing streaming job. -// Parameters: -// streamingJob - the definition of the streaming job that will be used to create a new streaming job or -// replace the existing one. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify -// the last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new streaming job to be created, but to prevent updating an existing -// record set. Other values will result in a 412 Pre-condition Failed response. -func (client StreamingJobsClient) CreateOrReplace(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (result StreamingJobsCreateOrReplaceFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.CreateOrReplace") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "CreateOrReplace", err.Error()) - } - - req, err := client.CreateOrReplacePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", nil, "Failure preparing request") - return - } - - result, err = client.CreateOrReplaceSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "CreateOrReplace", result.Response(), "Failure sending request") - return - } - - return -} - -// CreateOrReplacePreparer prepares the CreateOrReplace request. -func (client StreamingJobsClient) CreateOrReplacePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), - autorest.WithJSON(streamingJob), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) CreateOrReplaceSender(req *http.Request) (future StreamingJobsCreateOrReplaceFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) CreateOrReplaceResponder(resp *http.Response) (result StreamingJob, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Delete deletes a streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -func (client StreamingJobsClient) Delete(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsDeleteFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Delete") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Delete", err.Error()) - } - - req, err := client.DeletePreparer(ctx, resourceGroupName, jobName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", nil, "Failure preparing request") - return - } - - result, err = client.DeleteSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Delete", result.Response(), "Failure sending request") - return - } - - return -} - -// DeletePreparer prepares the Delete request. -func (client StreamingJobsClient) DeletePreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsDelete(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// DeleteSender sends the Delete request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) DeleteSender(req *http.Request) (future StreamingJobsDeleteFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// DeleteResponder handles the response to the Delete request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), - autorest.ByClosing()) - result.Response = resp - return -} - -// Get gets details about the specified streaming job. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job -// properties to include in the response, beyond the default set returned when this parameter is absent. The -// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and -// 'functions'. -func (client StreamingJobsClient) Get(ctx context.Context, resourceGroupName string, jobName string, expand string) (result StreamingJob, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, jobName, expand) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client StreamingJobsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, expand string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(expand) > 0 { - queryParameters["$expand"] = autorest.Encode("query", expand) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) GetResponder(resp *http.Response) (result StreamingJob, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// List lists all of the streaming jobs in the given subscription. -// Parameters: -// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job -// properties to include in the response, beyond the default set returned when this parameter is absent. The -// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and -// 'functions'. -func (client StreamingJobsClient) List(ctx context.Context, expand string) (result StreamingJobListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") - defer func() { - sc := -1 - if result.sjlr.Response.Response != nil { - sc = result.sjlr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "List", err.Error()) - } - - result.fn = client.listNextResults - req, err := client.ListPreparer(ctx, expand) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", nil, "Failure preparing request") - return - } - - resp, err := client.ListSender(req) - if err != nil { - result.sjlr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure sending request") - return - } - - result.sjlr, err = client.ListResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "List", resp, "Failure responding to request") - return - } - if result.sjlr.hasNextLink() && result.sjlr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListPreparer prepares the List request. -func (client StreamingJobsClient) ListPreparer(ctx context.Context, expand string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(expand) > 0 { - queryParameters["$expand"] = autorest.Encode("query", expand) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListSender sends the List request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) ListSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListResponder handles the response to the List request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) ListResponder(resp *http.Response) (result StreamingJobListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listNextResults retrieves the next set of results, if any. -func (client StreamingJobsClient) listNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { - req, err := lastResults.streamingJobListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure sending next results request") - } - result, err = client.ListResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListComplete enumerates all values, automatically crossing page boundaries as required. -func (client StreamingJobsClient) ListComplete(ctx context.Context, expand string) (result StreamingJobListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.List") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.List(ctx, expand) - return -} - -// ListByResourceGroup lists all of the streaming jobs in the specified resource group. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// expand - the $expand OData query parameter. This is a comma-separated list of additional streaming job -// properties to include in the response, beyond the default set returned when this parameter is absent. The -// default set is all streaming job properties other than 'inputs', 'transformation', 'outputs', and -// 'functions'. -func (client StreamingJobsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultPage, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") - defer func() { - sc := -1 - if result.sjlr.Response.Response != nil { - sc = result.sjlr.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "ListByResourceGroup", err.Error()) - } - - result.fn = client.listByResourceGroupNextResults - req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName, expand) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", nil, "Failure preparing request") - return - } - - resp, err := client.ListByResourceGroupSender(req) - if err != nil { - result.sjlr.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure sending request") - return - } - - result.sjlr, err = client.ListByResourceGroupResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "ListByResourceGroup", resp, "Failure responding to request") - return - } - if result.sjlr.hasNextLink() && result.sjlr.IsEmpty() { - err = result.NextWithContext(ctx) - return - } - - return -} - -// ListByResourceGroupPreparer prepares the ListByResourceGroup request. -func (client StreamingJobsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string, expand string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - if len(expand) > 0 { - queryParameters["$expand"] = autorest.Encode("query", expand) - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) ListByResourceGroupResponder(resp *http.Response) (result StreamingJobListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// listByResourceGroupNextResults retrieves the next set of results, if any. -func (client StreamingJobsClient) listByResourceGroupNextResults(ctx context.Context, lastResults StreamingJobListResult) (result StreamingJobListResult, err error) { - req, err := lastResults.streamingJobListResultPreparer(ctx) - if err != nil { - return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") - } - if req == nil { - return - } - resp, err := client.ListByResourceGroupSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - return result, autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") - } - result, err = client.ListByResourceGroupResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") - } - return -} - -// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. -func (client StreamingJobsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, expand string) (result StreamingJobListResultIterator, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.ListByResourceGroup") - defer func() { - sc := -1 - if result.Response().Response.Response != nil { - sc = result.page.Response().Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - result.page, err = client.ListByResourceGroup(ctx, resourceGroupName, expand) - return -} - -// Scale scales a streaming job when the job is running. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// scaleJobParameters - parameters applicable to a scale streaming job operation. -func (client StreamingJobsClient) Scale(ctx context.Context, resourceGroupName string, jobName string, scaleJobParameters *ScaleStreamingJobParameters) (result StreamingJobsScaleFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Scale") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Scale", err.Error()) - } - - req, err := client.ScalePreparer(ctx, resourceGroupName, jobName, scaleJobParameters) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Scale", nil, "Failure preparing request") - return - } - - result, err = client.ScaleSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Scale", result.Response(), "Failure sending request") - return - } - - return -} - -// ScalePreparer prepares the Scale request. -func (client StreamingJobsClient) ScalePreparer(ctx context.Context, resourceGroupName string, jobName string, scaleJobParameters *ScaleStreamingJobParameters) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if scaleJobParameters != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(scaleJobParameters)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ScaleSender sends the Scale request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) ScaleSender(req *http.Request) (future StreamingJobsScaleFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// ScaleResponder handles the response to the Scale request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) ScaleResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByClosing()) - result.Response = resp - return -} - -// Start starts a streaming job. Once a job is started it will start processing input events and produce output. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// startJobParameters - parameters applicable to a start streaming job operation. -func (client StreamingJobsClient) Start(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (result StreamingJobsStartFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Start") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Start", err.Error()) - } - - req, err := client.StartPreparer(ctx, resourceGroupName, jobName, startJobParameters) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", nil, "Failure preparing request") - return - } - - result, err = client.StartSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Start", result.Response(), "Failure sending request") - return - } - - return -} - -// StartPreparer prepares the Start request. -func (client StreamingJobsClient) StartPreparer(ctx context.Context, resourceGroupName string, jobName string, startJobParameters *StartStreamingJobParameters) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start", pathParameters), - autorest.WithQueryParameters(queryParameters)) - if startJobParameters != nil { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithJSON(startJobParameters)) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// StartSender sends the Start request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) StartSender(req *http.Request) (future StreamingJobsStartFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// StartResponder handles the response to the Start request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) StartResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByClosing()) - result.Response = resp - return -} - -// Stop stops a running streaming job. This will cause a running streaming job to stop processing input events and -// producing output. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -func (client StreamingJobsClient) Stop(ctx context.Context, resourceGroupName string, jobName string) (result StreamingJobsStopFuture, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Stop") - defer func() { - sc := -1 - if result.FutureAPI != nil && result.FutureAPI.Response() != nil { - sc = result.FutureAPI.Response().StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Stop", err.Error()) - } - - req, err := client.StopPreparer(ctx, resourceGroupName, jobName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", nil, "Failure preparing request") - return - } - - result, err = client.StopSender(req) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Stop", result.Response(), "Failure sending request") - return - } - - return -} - -// StopPreparer prepares the Stop request. -func (client StreamingJobsClient) StopPreparer(ctx context.Context, resourceGroupName string, jobName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsPost(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// StopSender sends the Stop request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) StopSender(req *http.Request) (future StreamingJobsStopFuture, err error) { - var resp *http.Response - future.FutureAPI = &azure.Future{} - resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) - if err != nil { - return - } - var azf azure.Future - azf, err = azure.NewFutureFromResponse(resp) - future.FutureAPI = &azf - future.Result = future.result - return -} - -// StopResponder handles the response to the Stop request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) StopResponder(resp *http.Response) (result autorest.Response, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), - autorest.ByClosing()) - result.Response = resp - return -} - -// Update updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a -// streaming job without affecting the rest the job definition. -// Parameters: -// streamingJob - a streaming job object. The properties specified here will overwrite the corresponding -// properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set -// to null here will mean that the corresponding property in the existing input will remain the same and not -// change as a result of this PATCH operation. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// ifMatch - the ETag of the streaming job. Omit this value to always overwrite the current record set. Specify -// the last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client StreamingJobsClient) Update(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (result StreamingJob, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/StreamingJobsClient.Update") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.StreamingJobsClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, streamingJob, resourceGroupName, jobName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", nil, "Failure preparing request") - return - } - - resp, err := client.UpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure sending request") - return - } - - result, err = client.UpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.StreamingJobsClient", "Update", resp, "Failure responding to request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client StreamingJobsClient) UpdatePreparer(ctx context.Context, streamingJob StreamingJob, resourceGroupName string, jobName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}", pathParameters), - autorest.WithJSON(streamingJob), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client StreamingJobsClient) UpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client StreamingJobsClient) UpdateResponder(resp *http.Response) (result StreamingJob, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/subscriptions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/subscriptions.go deleted file mode 100644 index 13f52ef9e8cc..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/subscriptions.go +++ /dev/null @@ -1,113 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// SubscriptionsClient is the stream Analytics Client -type SubscriptionsClient struct { - BaseClient -} - -// NewSubscriptionsClient creates an instance of the SubscriptionsClient client. -func NewSubscriptionsClient(subscriptionID string) SubscriptionsClient { - return NewSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewSubscriptionsClientWithBaseURI creates an instance of the SubscriptionsClient client using a custom endpoint. -// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) SubscriptionsClient { - return SubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// ListQuotas retrieves the subscription's current quota information in a particular region. -// Parameters: -// location - the region in which to retrieve the subscription's quota information. You can find out which -// regions Azure Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/ -func (client SubscriptionsClient) ListQuotas(ctx context.Context, location string) (result SubscriptionQuotasListResult, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/SubscriptionsClient.ListQuotas") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.SubscriptionsClient", "ListQuotas", err.Error()) - } - - req, err := client.ListQuotasPreparer(ctx, location) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", nil, "Failure preparing request") - return - } - - resp, err := client.ListQuotasSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure sending request") - return - } - - result, err = client.ListQuotasResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure responding to request") - return - } - - return -} - -// ListQuotasPreparer prepares the ListQuotas request. -func (client SubscriptionsClient) ListQuotasPreparer(ctx context.Context, location string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "location": autorest.Encode("path", location), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// ListQuotasSender sends the ListQuotas request. The method will close the -// http.Response Body if it receives an error. -func (client SubscriptionsClient) ListQuotasSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// ListQuotasResponder handles the response to the ListQuotas request. The method always -// closes the http.Response Body. -func (client SubscriptionsClient) ListQuotasResponder(resp *http.Response) (result SubscriptionQuotasListResult, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/transformations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/transformations.go deleted file mode 100644 index 1a80cd882cc0..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/transformations.go +++ /dev/null @@ -1,326 +0,0 @@ -package streamanalytics - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -import ( - "context" - "github.com/Azure/go-autorest/autorest" - "github.com/Azure/go-autorest/autorest/azure" - "github.com/Azure/go-autorest/autorest/validation" - "github.com/Azure/go-autorest/tracing" - "net/http" -) - -// TransformationsClient is the stream Analytics Client -type TransformationsClient struct { - BaseClient -} - -// NewTransformationsClient creates an instance of the TransformationsClient client. -func NewTransformationsClient(subscriptionID string) TransformationsClient { - return NewTransformationsClientWithBaseURI(DefaultBaseURI, subscriptionID) -} - -// NewTransformationsClientWithBaseURI creates an instance of the TransformationsClient client using a custom endpoint. -// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). -func NewTransformationsClientWithBaseURI(baseURI string, subscriptionID string) TransformationsClient { - return TransformationsClient{NewWithBaseURI(baseURI, subscriptionID)} -} - -// CreateOrReplace creates a transformation or replaces an already existing transformation under an existing streaming -// job. -// Parameters: -// transformation - the definition of the transformation that will be used to create a new transformation or -// replace the existing one under the streaming job. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// transformationName - the name of the transformation. -// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. -// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. -// ifNoneMatch - set to '*' to allow a new transformation to be created, but to prevent updating an existing -// transformation. Other values will result in a 412 Pre-condition Failed response. -func (client TransformationsClient) CreateOrReplace(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (result Transformation, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.CreateOrReplace") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.TransformationsClient", "CreateOrReplace", err.Error()) - } - - req, err := client.CreateOrReplacePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch, ifNoneMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", nil, "Failure preparing request") - return - } - - resp, err := client.CreateOrReplaceSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure sending request") - return - } - - result, err = client.CreateOrReplaceResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "CreateOrReplace", resp, "Failure responding to request") - return - } - - return -} - -// CreateOrReplacePreparer prepares the CreateOrReplace request. -func (client TransformationsClient) CreateOrReplacePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string, ifNoneMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - "transformationName": autorest.Encode("path", transformationName), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPut(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), - autorest.WithJSON(transformation), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - if len(ifNoneMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-None-Match", autorest.String(ifNoneMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// CreateOrReplaceSender sends the CreateOrReplace request. The method will close the -// http.Response Body if it receives an error. -func (client TransformationsClient) CreateOrReplaceSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// CreateOrReplaceResponder handles the response to the CreateOrReplace request. The method always -// closes the http.Response Body. -func (client TransformationsClient) CreateOrReplaceResponder(resp *http.Response) (result Transformation, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Get gets details about the specified transformation. -// Parameters: -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// transformationName - the name of the transformation. -func (client TransformationsClient) Get(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (result Transformation, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Get") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.TransformationsClient", "Get", err.Error()) - } - - req, err := client.GetPreparer(ctx, resourceGroupName, jobName, transformationName) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", nil, "Failure preparing request") - return - } - - resp, err := client.GetSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure sending request") - return - } - - result, err = client.GetResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Get", resp, "Failure responding to request") - return - } - - return -} - -// GetPreparer prepares the Get request. -func (client TransformationsClient) GetPreparer(ctx context.Context, resourceGroupName string, jobName string, transformationName string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - "transformationName": autorest.Encode("path", transformationName), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsGet(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), - autorest.WithQueryParameters(queryParameters)) - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// GetSender sends the Get request. The method will close the -// http.Response Body if it receives an error. -func (client TransformationsClient) GetSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// GetResponder handles the response to the Get request. The method always -// closes the http.Response Body. -func (client TransformationsClient) GetResponder(resp *http.Response) (result Transformation, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} - -// Update updates an existing transformation under an existing streaming job. This can be used to partially update (ie. -// update one or two properties) a transformation without affecting the rest the job or transformation definition. -// Parameters: -// transformation - a Transformation object. The properties specified here will overwrite the corresponding -// properties in the existing transformation (ie. Those properties will be updated). Any properties that are -// set to null here will mean that the corresponding property in the existing transformation will remain the -// same and not change as a result of this PATCH operation. -// resourceGroupName - the name of the resource group. The name is case insensitive. -// jobName - the name of the streaming job. -// transformationName - the name of the transformation. -// ifMatch - the ETag of the transformation. Omit this value to always overwrite the current transformation. -// Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. -func (client TransformationsClient) Update(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (result Transformation, err error) { - if tracing.IsEnabled() { - ctx = tracing.StartSpan(ctx, fqdn+"/TransformationsClient.Update") - defer func() { - sc := -1 - if result.Response.Response != nil { - sc = result.Response.Response.StatusCode - } - tracing.EndSpan(ctx, sc, err) - }() - } - if err := validation.Validate([]validation.Validation{ - {TargetValue: client.SubscriptionID, - Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}}, - {TargetValue: resourceGroupName, - Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil}, - {Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil}, - {Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil { - return result, validation.NewError("streamanalytics.TransformationsClient", "Update", err.Error()) - } - - req, err := client.UpdatePreparer(ctx, transformation, resourceGroupName, jobName, transformationName, ifMatch) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", nil, "Failure preparing request") - return - } - - resp, err := client.UpdateSender(req) - if err != nil { - result.Response = autorest.Response{Response: resp} - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure sending request") - return - } - - result, err = client.UpdateResponder(resp) - if err != nil { - err = autorest.NewErrorWithError(err, "streamanalytics.TransformationsClient", "Update", resp, "Failure responding to request") - return - } - - return -} - -// UpdatePreparer prepares the Update request. -func (client TransformationsClient) UpdatePreparer(ctx context.Context, transformation Transformation, resourceGroupName string, jobName string, transformationName string, ifMatch string) (*http.Request, error) { - pathParameters := map[string]interface{}{ - "jobName": autorest.Encode("path", jobName), - "resourceGroupName": autorest.Encode("path", resourceGroupName), - "subscriptionId": autorest.Encode("path", client.SubscriptionID), - "transformationName": autorest.Encode("path", transformationName), - } - - const APIVersion = "2020-03-01" - queryParameters := map[string]interface{}{ - "api-version": APIVersion, - } - - preparer := autorest.CreatePreparer( - autorest.AsContentType("application/json; charset=utf-8"), - autorest.AsPatch(), - autorest.WithBaseURL(client.BaseURI), - autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}", pathParameters), - autorest.WithJSON(transformation), - autorest.WithQueryParameters(queryParameters)) - if len(ifMatch) > 0 { - preparer = autorest.DecoratePreparer(preparer, - autorest.WithHeader("If-Match", autorest.String(ifMatch))) - } - return preparer.Prepare((&http.Request{}).WithContext(ctx)) -} - -// UpdateSender sends the Update request. The method will close the -// http.Response Body if it receives an error. -func (client TransformationsClient) UpdateSender(req *http.Request) (*http.Response, error) { - return client.Send(req, azure.DoRetryWithRegistration(client.Client)) -} - -// UpdateResponder handles the response to the Update request. The method always -// closes the http.Response Body. -func (client TransformationsClient) UpdateResponder(resp *http.Response) (result Transformation, err error) { - err = autorest.Respond( - resp, - azure.WithErrorUnlessStatusCode(http.StatusOK), - autorest.ByUnmarshallingJSON(&result), - autorest.ByClosing()) - result.Response = autorest.Response{Response: resp} - return -} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/version.go b/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/version.go deleted file mode 100644 index e14484ec2646..000000000000 --- a/vendor/github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics/version.go +++ /dev/null @@ -1,19 +0,0 @@ -package streamanalytics - -import "github.com/Azure/azure-sdk-for-go/version" - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. - -// UserAgent returns the UserAgent string to use when sending http.Requests. -func UserAgent() string { - return "Azure-SDK-For-Go/" + Version() + " streamanalytics/2020-03-01" -} - -// Version returns the semantic version (see http://semver.org) of the client. -func Version() string { - return version.Number -} diff --git a/vendor/modules.txt b/vendor/modules.txt index 72be104f0aa2..3297aa6b8648 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -71,7 +71,6 @@ github.com/Azure/azure-sdk-for-go/services/servicefabric/mgmt/2021-06-01/service github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2021-09-01/storage github.com/Azure/azure-sdk-for-go/services/storagecache/mgmt/2021-09-01/storagecache github.com/Azure/azure-sdk-for-go/services/storagesync/mgmt/2020-03-01/storagesync -github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics github.com/Azure/azure-sdk-for-go/services/subscription/mgmt/2020-09-01/subscription github.com/Azure/azure-sdk-for-go/services/timeseriesinsights/mgmt/2020-05-15/timeseriesinsights github.com/Azure/azure-sdk-for-go/services/web/mgmt/2021-02-01/web From 33d7731087edcb5d44348b02be4429edfb890e3b Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 22 Nov 2022 18:41:40 +0100 Subject: [PATCH 07/14] update get opts for streaming jobs --- .../stream_analytics_job_resource.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index d76a531fa1fe..795cc791d11d 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -253,8 +253,8 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte Sku: &streamingjobs.Sku{ Name: utils.ToPtr(streamingjobs.SkuNameStandard), }, - ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), - CompatibilityLevel: utils.ToPtr(streamingjobs.CompatibilityLevel(d.Get("compatibility_level").(string))), + ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), + //CompatibilityLevel: utils.ToPtr(streamingjobs.CompatibilityLevel(d.Get("compatibility_level").(string))), EventsLateArrivalMaxDelayInSeconds: utils.Int64(int64(d.Get("events_late_arrival_max_delay_in_seconds").(int))), EventsOutOfOrderMaxDelayInSeconds: utils.Int64(int64(d.Get("events_out_of_order_max_delay_in_seconds").(int))), EventsOutOfOrderPolicy: utils.ToPtr(streamingjobs.EventsOutOfOrderPolicy(d.Get("events_out_of_order_policy").(string))), @@ -265,6 +265,11 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte Tags: tags.Expand(d.Get("tags").(map[string]interface{})), } + if _, ok := d.GetOk("compatibility_level"); ok { + compatibilityLevel := d.Get("compatibility_level").(string) + props.Properties.CompatibilityLevel = utils.ToPtr(streamingjobs.CompatibilityLevel(compatibilityLevel)) + } + if contentStoragePolicy == string(streamingjobs.ContentStoragePolicyJobStorageAccount) { if v, ok := d.GetOk("job_storage_account"); ok { props.Properties.JobStorageAccount = expandJobStorageAccount(v.([]interface{})) @@ -345,7 +350,9 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) return err } - var opts streamingjobs.GetOperationOptions + opts := streamingjobs.GetOperationOptions{ + Expand: utils.ToPtr("transformation"), + } resp, err := client.Get(ctx, *id, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { From 690cd2255db2aa27f434e8b7098d73641be615e9 Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 22 Nov 2022 18:57:03 +0100 Subject: [PATCH 08/14] fix type assertion for functions --- ...tream_analytics_function_javascript_uda_resource.go | 10 +++++----- ...tream_analytics_function_javascript_udf_resource.go | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go index a53bbb2776d2..8cd60e3fcf79 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go @@ -193,13 +193,13 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("converting to an Aggregate Function") } - binding, ok := function.Properties.Binding.(functions.JavaScriptFunctionBindingProperties) - if !ok { - return fmt.Errorf("converting Binding to a JavaScript Function Binding") - } + binding := function.Properties.Binding.(functions.JavaScriptFunctionBinding) + //if !ok { + // return fmt.Errorf("converting Binding to a JavaScript Function Binding") + //} script := "" - if v := binding.Script; v != nil { + if v := binding.Properties.Script; v != nil { script = *v } d.Set("script", script) diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go index e09a4f2c4e11..ac7c38d0a3d8 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_udf_resource.go @@ -186,7 +186,7 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("retrieving %s: %+v", id, err) } - d.Set("name", id.JobName) + d.Set("name", id.FunctionName) d.Set("stream_analytics_job_name", id.JobName) d.Set("resource_group_name", id.ResourceGroupName) @@ -197,13 +197,13 @@ func resourceStreamAnalyticsFunctionUDFRead(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("converting to Scalar Function") } - binding, ok := function.Properties.Binding.(functions.JavaScriptFunctionBindingProperties) + binding, ok := function.Properties.Binding.(functions.JavaScriptFunctionBinding) if !ok { return fmt.Errorf("converting to Binding") } script := "" - if v := binding.Script; v != nil { + if v := binding.Properties.Script; v != nil { script = *v } d.Set("script", script) From 3725a2862ac1b147d6a1e947900b3e21d21bbba9 Mon Sep 17 00:00:00 2001 From: Steph Date: Wed, 23 Nov 2022 07:03:33 +0100 Subject: [PATCH 09/14] tflint --- ...lytics_function_javascript_uda_resource.go | 3 - .../stream_analytics_job_data_source.go | 5 +- ...tics_reference_input_blob_resource_test.go | 234 +++++++------- ...ics_reference_input_mssql_resource_test.go | 130 ++++---- ...ics_stream_input_eventhub_resource_test.go | 302 +++++++++--------- ..._stream_input_eventhub_v2_resource_test.go | 286 ++++++++--------- ...ytics_stream_input_iothub_resource_test.go | 190 +++++------ 7 files changed, 575 insertions(+), 575 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go index 8cd60e3fcf79..12532a71a456 100644 --- a/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go +++ b/internal/services/streamanalytics/stream_analytics_function_javascript_uda_resource.go @@ -194,9 +194,6 @@ func resourceStreamAnalyticsFunctionUDARead(d *pluginsdk.ResourceData, meta inte } binding := function.Properties.Binding.(functions.JavaScriptFunctionBinding) - //if !ok { - // return fmt.Errorf("converting Binding to a JavaScript Function Binding") - //} script := "" if v := binding.Properties.Script; v != nil { diff --git a/internal/services/streamanalytics/stream_analytics_job_data_source.go b/internal/services/streamanalytics/stream_analytics_job_data_source.go index 513ac760c394..993b93c8cccc 100644 --- a/internal/services/streamanalytics/stream_analytics_job_data_source.go +++ b/internal/services/streamanalytics/stream_analytics_job_data_source.go @@ -11,6 +11,7 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" + "github.com/hashicorp/terraform-provider-azurerm/utils" ) func dataSourceStreamAnalyticsJob() *pluginsdk.Resource { @@ -103,7 +104,9 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ defer cancel() id := streamingjobs.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) - var opts streamingjobs.GetOperationOptions + opts := streamingjobs.GetOperationOptions{ + Expand: utils.ToPtr("transformation"), + } resp, err := client.Get(ctx, id, opts) if err != nil { if response.WasNotFound(resp.HttpResponse) { diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go index f8a7dd7e6f6b..78cc2f490c5a 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource_test.go @@ -134,19 +134,19 @@ func (r StreamAnalyticsReferenceInputBlobResource) avro(data acceptance.TestData %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Avro" + } } `, template, data.RandomInteger) } @@ -157,21 +157,21 @@ func (r StreamAnalyticsReferenceInputBlobResource) csv(data acceptance.TestData) %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Csv" - encoding = "UTF8" - field_delimiter = "," - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Csv" + encoding = "UTF8" + field_delimiter = "," + } } `, template, data.RandomInteger) } @@ -182,20 +182,20 @@ func (r StreamAnalyticsReferenceInputBlobResource) json(data acceptance.TestData %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -206,33 +206,33 @@ func (r StreamAnalyticsReferenceInputBlobResource) updated(data acceptance.TestD %s resource "azurerm_storage_account" "updated" { - name = "acctestsa2%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" + name = "acctestsa2%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" } resource "azurerm_storage_container" "updated" { - name = "example2" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" + name = "example2" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" } resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.updated.name - storage_account_key = azurerm_storage_account.updated.primary_access_key - storage_container_name = azurerm_storage_container.updated.name - path_pattern = "some-other-pattern" - date_format = "yyyy-MM-dd" - time_format = "HH" - - serialization { - type = "Avro" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.updated.name + storage_account_key = azurerm_storage_account.updated.primary_access_key + storage_container_name = azurerm_storage_container.updated.name + path_pattern = "some-other-pattern" + date_format = "yyyy-MM-dd" + time_format = "HH" + + serialization { + type = "Avro" + } } `, template, data.RandomString, data.RandomInteger) } @@ -243,21 +243,21 @@ func (r StreamAnalyticsReferenceInputBlobResource) authenticationMode(data accep %s resource "azurerm_stream_analytics_reference_input_blob" "test" { - name = "acctestinput-%d" - stream_analytics_job_name = azurerm_stream_analytics_job.test.name - resource_group_name = azurerm_stream_analytics_job.test.resource_group_name - storage_account_name = azurerm_storage_account.test.name - storage_account_key = azurerm_storage_account.test.primary_access_key - storage_container_name = azurerm_storage_container.test.name - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" - authentication_mode = "Msi" - - serialization { - type = "Json" - encoding = "UTF8" - } + name = "acctestinput-%d" + stream_analytics_job_name = azurerm_stream_analytics_job.test.name + resource_group_name = azurerm_stream_analytics_job.test.resource_group_name + storage_account_name = azurerm_storage_account.test.name + storage_account_key = azurerm_storage_account.test.primary_access_key + storage_container_name = azurerm_storage_container.test.name + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + authentication_mode = "Msi" + + serialization { + type = "Json" + encoding = "UTF8" + } } `, template, data.RandomInteger) } @@ -268,22 +268,22 @@ func (r StreamAnalyticsReferenceInputBlobResource) requiresImport(data acceptanc %s resource "azurerm_stream_analytics_reference_input_blob" "import" { - name = azurerm_stream_analytics_reference_input_blob.test.name - stream_analytics_job_name = azurerm_stream_analytics_reference_input_blob.test.stream_analytics_job_name - resource_group_name = azurerm_stream_analytics_reference_input_blob.test.resource_group_name - storage_account_name = azurerm_stream_analytics_reference_input_blob.test.storage_account_name - storage_account_key = azurerm_stream_analytics_reference_input_blob.test.storage_account_key - storage_container_name = azurerm_stream_analytics_reference_input_blob.test.storage_container_name - path_pattern = azurerm_stream_analytics_reference_input_blob.test.path_pattern - date_format = azurerm_stream_analytics_reference_input_blob.test.date_format - time_format = azurerm_stream_analytics_reference_input_blob.test.time_format - dynamic "serialization" { - for_each = azurerm_stream_analytics_reference_input_blob.test.serialization - content { - encoding = lookup(serialization.value, "encoding", null) - type = serialization.value.type - } - } + name = azurerm_stream_analytics_reference_input_blob.test.name + stream_analytics_job_name = azurerm_stream_analytics_reference_input_blob.test.stream_analytics_job_name + resource_group_name = azurerm_stream_analytics_reference_input_blob.test.resource_group_name + storage_account_name = azurerm_stream_analytics_reference_input_blob.test.storage_account_name + storage_account_key = azurerm_stream_analytics_reference_input_blob.test.storage_account_key + storage_container_name = azurerm_stream_analytics_reference_input_blob.test.storage_container_name + path_pattern = azurerm_stream_analytics_reference_input_blob.test.path_pattern + date_format = azurerm_stream_analytics_reference_input_blob.test.date_format + time_format = azurerm_stream_analytics_reference_input_blob.test.time_format + dynamic "serialization" { + for_each = azurerm_stream_analytics_reference_input_blob.test.serialization + content { + encoding = lookup(serialization.value, "encoding", null) + type = serialization.value.type + } + } } `, template) } @@ -291,41 +291,41 @@ resource "azurerm_stream_analytics_reference_input_blob" "import" { func (r StreamAnalyticsReferenceInputBlobResource) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { - features {} + features {} } resource "azurerm_resource_group" "test" { - name = "acctestRG-%d" - location = "%s" + name = "acctestRG-%d" + location = "%s" } resource "azurerm_storage_account" "test" { - name = "acctestsa%s" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - account_tier = "Standard" - account_replication_type = "LRS" + name = "acctestsa%s" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + account_tier = "Standard" + account_replication_type = "LRS" } resource "azurerm_storage_container" "test" { - name = "example" - storage_account_name = azurerm_storage_account.test.name - container_access_type = "private" + name = "example" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" } resource "azurerm_stream_analytics_job" "test" { - name = "acctestjob-%d" - resource_group_name = azurerm_resource_group.test.name - location = azurerm_resource_group.test.location - compatibility_level = "1.0" - data_locale = "en-GB" - events_late_arrival_max_delay_in_seconds = 60 - events_out_of_order_max_delay_in_seconds = 50 - events_out_of_order_policy = "Adjust" - output_error_policy = "Drop" - streaming_units = 3 - - transformation_query = < Date: Wed, 23 Nov 2022 12:38:55 +0100 Subject: [PATCH 10/14] fix condition --- .../stream_analytics_stream_input_eventhub_v2_resource.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go index 75c2767087a9..2c80f8a3ff21 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go @@ -267,12 +267,12 @@ func (r StreamInputEventHubV2Resource) Read() sdk.ResourceFunc { if model := resp.Model; model != nil { if props := model.Properties; props != nil { - input, ok := props.(inputs.Input) + input, ok := props.(inputs.InputProperties) if !ok { return fmt.Errorf("converting to an Input") } - streamInput, ok := input.Properties.(inputs.StreamInputProperties) + streamInput, ok := input.(inputs.StreamInputProperties) if !ok { return fmt.Errorf("converting to a Stream Input") } @@ -384,7 +384,7 @@ func (r StreamInputEventHubV2Resource) CustomImporter() sdk.ResourceRunFunc { return fmt.Errorf("specified resource is not a Stream Input: %+v", err) } - if _, ok := streamInput.Datasource.(inputs.EventHubV2StreamInputDataSource); ok { + if _, ok := streamInput.Datasource.(inputs.EventHubV2StreamInputDataSource); !ok { return fmt.Errorf("specified input is not of type EventHubV2: %+v", err) } From 63b79d8368f362fa6d5f8b0c4d8ee5441b257535 Mon Sep 17 00:00:00 2001 From: Steph Date: Thu, 24 Nov 2022 09:31:55 +0100 Subject: [PATCH 11/14] use custom importer and standardize error message for type assertions --- .../stream_analytics_output_blob_resource.go | 6 +++--- .../stream_analytics_output_cosmosdb_resource.go | 2 +- .../stream_analytics_output_eventhub_resource.go | 6 +++--- .../stream_analytics_output_function_resource.go | 2 +- .../stream_analytics_output_mssql_resource.go | 6 +++--- .../stream_analytics_output_powerbi_resource.go | 2 +- .../stream_analytics_output_servicebus_queue_resource.go | 6 +++--- .../stream_analytics_output_servicebus_topic_resource.go | 6 +++--- .../stream_analytics_output_synapse_resource.go | 2 +- .../stream_analytics_output_table_resource.go | 2 +- .../stream_analytics_reference_input_blob_resource.go | 6 +++--- .../stream_analytics_reference_input_mssql_resource.go | 6 +++--- .../stream_analytics_stream_input_blob_resource.go | 4 ++-- .../stream_analytics_stream_input_eventhub_resource.go | 6 +++--- .../stream_analytics_stream_input_eventhub_v2_resource.go | 6 +++--- .../stream_analytics_stream_input_iothub_resource.go | 8 +++----- 16 files changed, 37 insertions(+), 39 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go index 7d183425aa88..a23e2453cba6 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go @@ -23,10 +23,10 @@ func resourceStreamAnalyticsOutputBlob() *pluginsdk.Resource { Read: resourceStreamAnalyticsOutputBlobRead, Update: resourceStreamAnalyticsOutputBlobCreateUpdate, Delete: resourceStreamAnalyticsOutputBlobDelete, - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }), + }, importStreamAnalyticsOutput(outputs.BlobOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -227,7 +227,7 @@ func resourceStreamAnalyticsOutputBlobRead(d *pluginsdk.ResourceData, meta inter if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.BlobOutputDataSource) if !ok { - return fmt.Errorf("converting to Blob Output") + return fmt.Errorf("converting %s to a Blob Output", *id) } dateFormat := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go index 1b761d8fec68..cd6e3e284a15 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go @@ -176,7 +176,7 @@ func (r OutputCosmosDBResource) Read() sdk.ResourceFunc { if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.DocumentDbOutputDataSource) if !ok { - return fmt.Errorf("converting to CosmosDb Output") + return fmt.Errorf("converting %s to a CosmosDb Output", *id) } streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go index dfc68fe37323..260ec2073f0e 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go @@ -22,10 +22,10 @@ func resourceStreamAnalyticsOutputEventHub() *pluginsdk.Resource { Read: resourceStreamAnalyticsOutputEventHubRead, Update: resourceStreamAnalyticsOutputEventHubCreateUpdate, Delete: resourceStreamAnalyticsOutputEventHubDelete, - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }), + }, importStreamAnalyticsOutput(outputs.EventHubOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -200,7 +200,7 @@ func resourceStreamAnalyticsOutputEventHubRead(d *pluginsdk.ResourceData, meta i if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.EventHubOutputDataSource) if !ok { - return fmt.Errorf("converting to EventHub Output") + return fmt.Errorf("converting %s to a EventHub Output", *id) } eventHubName := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource.go b/internal/services/streamanalytics/stream_analytics_output_function_resource.go index 9562570899f5..c4b4348dfeec 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource.go @@ -169,7 +169,7 @@ func (r OutputFunctionResource) Read() sdk.ResourceFunc { if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.AzureFunctionOutputDataSource) if !ok { - return fmt.Errorf("converting to Function Output") + return fmt.Errorf("converting %s to a Function Output", *id) } if output.Properties.FunctionAppName == nil || output.Properties.FunctionName == nil || output.Properties.MaxBatchCount == nil || output.Properties.MaxBatchSize == nil { diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go index 9ef6b4918df9..680b57f1f744 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go @@ -21,10 +21,10 @@ func resourceStreamAnalyticsOutputSql() *pluginsdk.Resource { Read: resourceStreamAnalyticsOutputSqlRead, Update: resourceStreamAnalyticsOutputSqlCreateUpdate, Delete: resourceStreamAnalyticsOutputSqlDelete, - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }), + }, importStreamAnalyticsOutput(outputs.AzureSqlDatabaseOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -197,7 +197,7 @@ func resourceStreamAnalyticsOutputSqlRead(d *pluginsdk.ResourceData, meta interf if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.AzureSqlDatabaseOutputDataSource) if !ok { - return fmt.Errorf("converting to SQL Output") + return fmt.Errorf("converting %s to a SQL Output", *id) } server := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go index ba72546f39e0..faec28850074 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -256,7 +256,7 @@ func (r OutputPowerBIResource) Read() sdk.ResourceFunc { if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.PowerBIOutputDataSource) if !ok { - return fmt.Errorf("converting to PowerBI Output") + return fmt.Errorf("converting %s to a PowerBI Output", *id) } streamingJobId := streamingjobs.NewStreamingJobID(id.SubscriptionId, id.ResourceGroupName, id.JobName) diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go index d47d6119fa43..5cedc16993d6 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go @@ -22,10 +22,10 @@ func resourceStreamAnalyticsOutputServiceBusQueue() *pluginsdk.Resource { Read: resourceStreamAnalyticsOutputServiceBusQueueRead, Update: resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate, Delete: resourceStreamAnalyticsOutputServiceBusQueueDelete, - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }), + }, importStreamAnalyticsOutput(outputs.ServiceBusQueueOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -204,7 +204,7 @@ func resourceStreamAnalyticsOutputServiceBusQueueRead(d *pluginsdk.ResourceData, if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.ServiceBusQueueOutputDataSource) if !ok { - return fmt.Errorf("converting to ServiceBus Queue Output") + return fmt.Errorf("converting %s to a ServiceBus Queue Output", *id) } queue := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go index 885b1ea1939c..7e9072a4fb7c 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go @@ -22,10 +22,10 @@ func resourceStreamAnalyticsOutputServiceBusTopic() *pluginsdk.Resource { Read: resourceStreamAnalyticsOutputServiceBusTopicRead, Update: resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate, Delete: resourceStreamAnalyticsOutputServiceBusTopicDelete, - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + Importer: pluginsdk.ImporterValidatingResourceIdThen(func(id string) error { _, err := outputs.ParseOutputID(id) return err - }), + }, importStreamAnalyticsOutput(outputs.ServiceBusTopicOutputDataSource{})), Timeouts: &pluginsdk.ResourceTimeout{ Create: pluginsdk.DefaultTimeout(30 * time.Minute), @@ -201,7 +201,7 @@ func resourceStreamAnalyticsOutputServiceBusTopicRead(d *pluginsdk.ResourceData, if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.ServiceBusTopicOutputDataSource) if !ok { - return fmt.Errorf("converting to ServiceBus Topic Output") + return fmt.Errorf("converting %s to a ServiceBus Topic Output", *id) } topicName := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go index 57381830c17a..858fb1072fbc 100644 --- a/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_synapse_resource.go @@ -172,7 +172,7 @@ func resourceStreamAnalyticsOutputSynapseRead(d *pluginsdk.ResourceData, meta in if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.AzureSynapseOutputDataSource) if !ok { - return fmt.Errorf("converting to Synapse Output") + return fmt.Errorf("converting %s to a Synapse Output", *id) } server := "" diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index f761bee360e4..b51d1d58cb1b 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -192,7 +192,7 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { if props := model.Properties; props != nil { output, ok := props.Datasource.(outputs.AzureTableOutputDataSource) if !ok { - return fmt.Errorf("converting to Table Output") + return fmt.Errorf("converting %s to a Table Output", *id) } if output.Properties.AccountName == nil || output.Properties.Table == nil || output.Properties.PartitionKey == nil || output.Properties.RowKey == nil || output.Properties.BatchSize == nil { diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index c8d5847b9e53..3524b04283d5 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -239,17 +239,17 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("blah") + return fmt.Errorf("converting %s to an Input", *id) } dataSource, ok := input.(inputs.ReferenceInputProperties) if !ok { - return fmt.Errorf("blah2") + return fmt.Errorf("converting %s to a Reference Input", *id) } referenceInputBlob, ok := dataSource.Datasource.(inputs.BlobReferenceInputDataSource) if !ok { - return fmt.Errorf("blah3") + return fmt.Errorf("converting %s to a Blob Reference Input", *id) } dateFormat := "" diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go index 34ddd360a46e..036b64746059 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go @@ -215,17 +215,17 @@ func resourceStreamAnalyticsReferenceInputMsSqlRead(d *pluginsdk.ResourceData, m if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("failed to convert to Input") + return fmt.Errorf("converting %s to an Input", *id) } reference, ok := input.(inputs.ReferenceInputProperties) if !ok { - return fmt.Errorf("failed to convert to Reference Input") + return fmt.Errorf("converting %s to Reference Input", *id) } referenceInputAzureSql, ok := reference.Datasource.(inputs.AzureSqlReferenceInputDataSource) if !ok { - return fmt.Errorf("failed to convert to Azure Sql Reference Input") + return fmt.Errorf("converting %s to an Azure Sql Reference Input", *id) } server := "" diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go index 55b4fa8e8281..2a783bcf2b60 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_blob_resource.go @@ -194,12 +194,12 @@ func resourceStreamAnalyticsStreamInputBlobRead(d *pluginsdk.ResourceData, meta if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("converting to an Input: %+v", err) + return fmt.Errorf("converting %s to an Input", *id) } streamInput, ok := input.(inputs.StreamInputProperties) if !ok { - return fmt.Errorf("converting to a Stream Input: %+v", err) + return fmt.Errorf("converting %s to a Stream Input", *id) } streamBlobInput, ok := streamInput.Datasource.(inputs.BlobStreamInputDataSource) diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go index 67799d59750f..045c18fa78f4 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go @@ -200,17 +200,17 @@ func resourceStreamAnalyticsStreamInputEventHubRead(d *pluginsdk.ResourceData, m if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("failed to convert to Input") + return fmt.Errorf("converting %s to an Input", *id) } streamInput, ok := input.(inputs.StreamInputProperties) if !ok { - return fmt.Errorf("failed to convert to Stream Input") + return fmt.Errorf("converting %s to a Stream Input", *id) } streamEventHubInput, ok := streamInput.Datasource.(inputs.EventHubStreamInputDataSource) if !ok { - return fmt.Errorf("failed to convert to an Event Hub Stream Input") + return fmt.Errorf("converting %s to an Event Hub Stream Input", *id) } if streamEventHubInputProps := streamEventHubInput.Properties; streamEventHubInputProps != nil { diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go index 2c80f8a3ff21..e3e0df955a78 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go @@ -269,17 +269,17 @@ func (r StreamInputEventHubV2Resource) Read() sdk.ResourceFunc { if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("converting to an Input") + return fmt.Errorf("converting %s to an Input", *id) } streamInput, ok := input.(inputs.StreamInputProperties) if !ok { - return fmt.Errorf("converting to a Stream Input") + return fmt.Errorf("converting %s to a Stream Input", *id) } eventHubV2Input, ok := streamInput.Datasource.(inputs.EventHubV2StreamInputDataSource) if !ok { - return fmt.Errorf("converting to an EventHub V2 Stream Input") + return fmt.Errorf("converting %s to an EventHub V2 Stream Input", *id) } if eventHubV2InputProps := eventHubV2Input.Properties; eventHubV2InputProps != nil { diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go index eff54bb78ac3..04a8106e6ec1 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_iothub_resource.go @@ -125,9 +125,7 @@ func resourceStreamAnalyticsStreamInputIoTHubCreateUpdate(d *pluginsdk.ResourceD props := inputs.Input{ Name: utils.String(id.InputName), Properties: &inputs.StreamInputProperties{ - //Type: streamanalytics.TypeBasicInputPropertiesTypeStream, Datasource: &inputs.IoTHubStreamInputDataSource{ - //Type: streamanalytics.TypeBasicStreamInputDataSourceTypeMicrosoftDevicesIotHubs, Properties: &inputs.IoTHubStreamInputDataSourceProperties{ ConsumerGroupName: utils.String(consumerGroupName), SharedAccessPolicyKey: utils.String(sharedAccessPolicyKey), @@ -184,17 +182,17 @@ func resourceStreamAnalyticsStreamInputIoTHubRead(d *pluginsdk.ResourceData, met if props := model.Properties; props != nil { input, ok := props.(inputs.InputProperties) if !ok { - return fmt.Errorf("converting to an Input: %+v", err) + return fmt.Errorf("converting %s to an Input", *id) } streamInput, ok := input.(inputs.StreamInputProperties) if !ok { - return fmt.Errorf("converting to a Stream Input: %+v", err) + return fmt.Errorf("converting %s to a Stream Input", *id) } streamIotHubInput, ok := streamInput.Datasource.(inputs.IoTHubStreamInputDataSource) if !ok { - return fmt.Errorf("converting Stream Input Blob to an Stream Input: %+v", err) + return fmt.Errorf("converting %s Stream Input Blob to an Stream Input", *id) } if streamIotHubInputProps := streamIotHubInput.Properties; streamIotHubInputProps != nil { From 9e72ad665b47822e783d54c92b2dff61b80b2497 Mon Sep 17 00:00:00 2001 From: Steph Date: Thu, 24 Nov 2022 13:01:15 +0100 Subject: [PATCH 12/14] fix tests --- .../stream_analytics_job_resource.go | 42 ++++++++++--------- ...ream_analytics_output_cosmosdb_resource.go | 2 +- ...ream_analytics_output_eventhub_resource.go | 10 +++++ 3 files changed, 34 insertions(+), 20 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index 795cc791d11d..836d2e08da0c 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -253,8 +253,7 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte Sku: &streamingjobs.Sku{ Name: utils.ToPtr(streamingjobs.SkuNameStandard), }, - ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), - //CompatibilityLevel: utils.ToPtr(streamingjobs.CompatibilityLevel(d.Get("compatibility_level").(string))), + ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), EventsLateArrivalMaxDelayInSeconds: utils.Int64(int64(d.Get("events_late_arrival_max_delay_in_seconds").(int))), EventsOutOfOrderMaxDelayInSeconds: utils.Int64(int64(d.Get("events_out_of_order_max_delay_in_seconds").(int))), EventsOutOfOrderPolicy: utils.ToPtr(streamingjobs.EventsOutOfOrderPolicy(d.Get("events_out_of_order_policy").(string))), @@ -313,27 +312,32 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte return fmt.Errorf("updating %s: %+v", id, err) } - var getOpts streamingjobs.GetOperationOptions - job, err := client.Get(ctx, id, getOpts) - if err != nil { - return err - } + if d.HasChanges("streaming_units", "transformation_query") { + transformationUpdate := transformations.Transformation{ + Name: utils.String("main"), + Properties: &transformations.TransformationProperties{ + Query: utils.String(d.Get("transformation_query").(string)), + }, + } - if job.Model != nil && job.Model.Properties != nil { - if job.Model.Properties.Transformation != nil && job.Model.Properties.Transformation.Name != nil { - transformationId := transformations.NewTransformationID(subscriptionId, id.ResourceGroupName, id.JobName, *transformation.Name) - transformation, err := transformationsClient.Get(ctx, transformationId) - if err != nil { - return fmt.Errorf("retrieving %s: %+v", transformationId, err) + if jobType == string(streamingjobs.JobTypeEdge) { + if _, ok := d.GetOk("streaming_units"); ok { + return fmt.Errorf("the job type `Edge` doesn't support `streaming_units`") } - - if transformation.Model != nil { - var updateOpts transformations.UpdateOperationOptions - if _, err := transformationsClient.Update(ctx, transformationId, *transformation.Model, updateOpts); err != nil { - return fmt.Errorf("updating transformation for %s: %+v", id, err) - } + } else { + if v, ok := d.GetOk("streaming_units"); ok { + transformationUpdate.Properties.StreamingUnits = utils.Int64(int64(v.(int))) + } else { + return fmt.Errorf("`streaming_units` must be set when `type` is `Cloud`") } } + + transformationId := transformations.NewTransformationID(subscriptionId, id.ResourceGroupName, id.JobName, *transformation.Name) + + var updateOpts transformations.UpdateOperationOptions + if _, err := transformationsClient.Update(ctx, transformationId, transformationUpdate, updateOpts); err != nil { + return fmt.Errorf("updating transformation for %s: %+v", id, err) + } } } diff --git a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go index cd6e3e284a15..91e6bc01fede 100644 --- a/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_cosmosdb_resource.go @@ -301,7 +301,7 @@ func (r OutputCosmosDBResource) CustomImporter() sdk.ResourceRunFunc { } props := resp.Model.Properties - if _, ok := props.Datasource.(outputs.DocumentDbOutputDataSourceProperties); !ok { + if _, ok := props.Datasource.(outputs.DocumentDbOutputDataSource); !ok { return fmt.Errorf("specified output is not of type") } return nil diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go index 260ec2073f0e..29bf9c364de1 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go @@ -232,6 +232,16 @@ func resourceStreamAnalyticsOutputEventHubRead(d *pluginsdk.ResourceData, meta i authMode = string(*v) } d.Set("authentication_mode", authMode) + + var propertyColumns []string + if v := output.Properties.PropertyColumns; v != nil { + propertyColumns = *v + } + d.Set("property_columns", propertyColumns) + + if err := d.Set("serialization", flattenStreamAnalyticsOutputSerialization(props.Serialization)); err != nil { + return fmt.Errorf("setting `serialization`: %+v", err) + } } } return nil From f623cda82e07a76e9957fc0f00973c3f5f9091be Mon Sep 17 00:00:00 2001 From: Steph Date: Thu, 24 Nov 2022 15:31:25 +0100 Subject: [PATCH 13/14] fix test --- .../stream_analytics_output_table_resource.go | 35 +++++++++---------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index b51d1d58cb1b..a004e98a0c4d 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -265,32 +265,29 @@ func (r OutputTableResource) Update() sdk.ResourceFunc { return fmt.Errorf("decoding: %+v", err) } - props := outputs.Output{ + props := &outputs.AzureTableOutputDataSourceProperties{ + AccountName: utils.String(state.StorageAccount), + AccountKey: utils.String(state.StorageAccountKey), + Table: utils.String(state.Table), + PartitionKey: utils.String(state.PartitionKey), + RowKey: utils.String(state.RowKey), + BatchSize: utils.Int64(state.BatchSize), + } + + if metadata.ResourceData.HasChange("columns_to_remove") { + props.ColumnsToRemove = &state.ColumnsToRemove + } + + output := outputs.Output{ Name: utils.String(state.Name), Properties: &outputs.OutputProperties{ Datasource: &outputs.AzureTableOutputDataSource{ - Properties: &outputs.AzureTableOutputDataSourceProperties{ - AccountName: utils.String(state.StorageAccount), - AccountKey: utils.String(state.StorageAccountKey), - Table: utils.String(state.Table), - PartitionKey: utils.String(state.PartitionKey), - RowKey: utils.String(state.RowKey), - BatchSize: utils.Int64(state.BatchSize), - }, + Properties: props, }, }, } - - if metadata.ResourceData.HasChange("columns_to_remove") { - tableOutput, ok := props.Properties.Datasource.(outputs.AzureTableOutputDataSourceProperties) - if !ok { - return fmt.Errorf("converting output data source to a table output: %+v", err) - } - tableOutput.ColumnsToRemove = &state.ColumnsToRemove - } - var opts outputs.UpdateOperationOptions - if _, err = client.Update(ctx, *id, props, opts); err != nil { + if _, err = client.Update(ctx, *id, output, opts); err != nil { return fmt.Errorf("updating %s: %+v", *id, err) } From 3d73ec2f377db7fe095649b7c8502d50246a6a92 Mon Sep 17 00:00:00 2001 From: Steph Date: Mon, 28 Nov 2022 11:34:45 +0100 Subject: [PATCH 14/14] add todo and nil check properties --- .../stream_analytics_output.go | 1 + ...ream_analytics_output_function_resource.go | 72 +++++++------- .../stream_analytics_output_table_resource.go | 96 ++++++++++--------- ...analytics_reference_input_blob_resource.go | 70 +++++++------- ...nalytics_reference_input_mssql_resource.go | 94 +++++++++--------- 5 files changed, 170 insertions(+), 163 deletions(-) diff --git a/internal/services/streamanalytics/stream_analytics_output.go b/internal/services/streamanalytics/stream_analytics_output.go index 9656c632008d..1eaa58fbf5aa 100644 --- a/internal/services/streamanalytics/stream_analytics_output.go +++ b/internal/services/streamanalytics/stream_analytics_output.go @@ -54,6 +54,7 @@ func importStreamAnalyticsOutput(expectType outputs.OutputDataSource) pluginsdk. return nil, fmt.Errorf("unable to convert output data source: %+v", props.Datasource) } + // TODO refactor to a switch if reflect.TypeOf(actualType) != reflect.TypeOf(expectType) { return nil, fmt.Errorf("stream analytics output has mismatched type, expected: %q, got %q", expectType, actualType) } diff --git a/internal/services/streamanalytics/stream_analytics_output_function_resource.go b/internal/services/streamanalytics/stream_analytics_output_function_resource.go index c4b4348dfeec..78c38e3cbccd 100644 --- a/internal/services/streamanalytics/stream_analytics_output_function_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_function_resource.go @@ -172,42 +172,44 @@ func (r OutputFunctionResource) Read() sdk.ResourceFunc { return fmt.Errorf("converting %s to a Function Output", *id) } - if output.Properties.FunctionAppName == nil || output.Properties.FunctionName == nil || output.Properties.MaxBatchCount == nil || output.Properties.MaxBatchSize == nil { - return nil + if output.Properties != nil { + if output.Properties.FunctionAppName == nil || output.Properties.FunctionName == nil || output.Properties.MaxBatchCount == nil || output.Properties.MaxBatchSize == nil { + return nil + } + + state := OutputFunctionResourceModel{ + Name: id.OutputName, + ResourceGroup: id.ResourceGroupName, + StreamAnalyticsJob: id.JobName, + ApiKey: metadata.ResourceData.Get("api_key").(string), + } + + functionApp := "" + if v := output.Properties.FunctionAppName; v != nil { + functionApp = *v + } + state.FunctionApp = functionApp + + functionName := "" + if v := output.Properties.FunctionName; v != nil { + functionName = *v + } + state.FunctionName = functionName + + batchMaxInBytes := 0 + if v := output.Properties.MaxBatchSize; v != nil { + batchMaxInBytes = int(*v) + } + state.BatchMaxInBytes = batchMaxInBytes + + batchMaxCount := 0 + if v := output.Properties.MaxBatchCount; v != nil { + batchMaxCount = int(*v) + } + state.BatchMaxCount = batchMaxCount + + return metadata.Encode(&state) } - - state := OutputFunctionResourceModel{ - Name: id.OutputName, - ResourceGroup: id.ResourceGroupName, - StreamAnalyticsJob: id.JobName, - ApiKey: metadata.ResourceData.Get("api_key").(string), - } - - functionApp := "" - if v := output.Properties.FunctionAppName; v != nil { - functionApp = *v - } - state.FunctionApp = functionApp - - functionName := "" - if v := output.Properties.FunctionName; v != nil { - functionName = *v - } - state.FunctionName = functionName - - batchMaxInBytes := 0 - if v := output.Properties.MaxBatchSize; v != nil { - batchMaxInBytes = int(*v) - } - state.BatchMaxInBytes = batchMaxInBytes - - batchMaxCount := 0 - if v := output.Properties.MaxBatchCount; v != nil { - batchMaxCount = int(*v) - } - state.BatchMaxCount = batchMaxCount - - return metadata.Encode(&state) } } return nil diff --git a/internal/services/streamanalytics/stream_analytics_output_table_resource.go b/internal/services/streamanalytics/stream_analytics_output_table_resource.go index a004e98a0c4d..54a8a9443074 100644 --- a/internal/services/streamanalytics/stream_analytics_output_table_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_table_resource.go @@ -195,54 +195,56 @@ func (r OutputTableResource) Read() sdk.ResourceFunc { return fmt.Errorf("converting %s to a Table Output", *id) } - if output.Properties.AccountName == nil || output.Properties.Table == nil || output.Properties.PartitionKey == nil || output.Properties.RowKey == nil || output.Properties.BatchSize == nil { - return nil + if output.Properties != nil { + if output.Properties.AccountName == nil || output.Properties.Table == nil || output.Properties.PartitionKey == nil || output.Properties.RowKey == nil || output.Properties.BatchSize == nil { + return nil + } + + state := OutputTableResourceModel{ + Name: id.OutputName, + ResourceGroup: id.ResourceGroupName, + StreamAnalyticsJob: id.JobName, + StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), + } + + accountName := "" + if v := output.Properties.AccountName; v != nil { + accountName = *v + } + state.StorageAccount = accountName + + table := "" + if v := output.Properties.Table; v != nil { + table = *v + } + state.Table = table + + partitonKey := "" + if v := output.Properties.PartitionKey; v != nil { + partitonKey = *v + } + state.PartitionKey = partitonKey + + rowKey := "" + if v := output.Properties.RowKey; v != nil { + rowKey = *v + } + state.RowKey = rowKey + + var batchSize int64 + if v := output.Properties.BatchSize; v != nil { + batchSize = *v + } + state.BatchSize = batchSize + + var columnsToRemove []string + if columns := output.Properties.ColumnsToRemove; columns != nil && len(*columns) > 0 { + columnsToRemove = *columns + } + state.ColumnsToRemove = columnsToRemove + + return metadata.Encode(&state) } - - state := OutputTableResourceModel{ - Name: id.OutputName, - ResourceGroup: id.ResourceGroupName, - StreamAnalyticsJob: id.JobName, - StorageAccountKey: metadata.ResourceData.Get("storage_account_key").(string), - } - - accountName := "" - if v := output.Properties.AccountName; v != nil { - accountName = *v - } - state.StorageAccount = accountName - - table := "" - if v := output.Properties.Table; v != nil { - table = *v - } - state.Table = table - - partitonKey := "" - if v := output.Properties.PartitionKey; v != nil { - partitonKey = *v - } - state.PartitionKey = partitonKey - - rowKey := "" - if v := output.Properties.RowKey; v != nil { - rowKey = *v - } - state.RowKey = rowKey - - var batchSize int64 - if v := output.Properties.BatchSize; v != nil { - batchSize = *v - } - state.BatchSize = batchSize - - var columnsToRemove []string - if columns := output.Properties.ColumnsToRemove; columns != nil && len(*columns) > 0 { - columnsToRemove = *columns - } - state.ColumnsToRemove = columnsToRemove - - return metadata.Encode(&state) } } return nil diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index 3524b04283d5..6ed874a3a241 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -252,45 +252,45 @@ func resourceStreamAnalyticsReferenceInputBlobRead(d *pluginsdk.ResourceData, me return fmt.Errorf("converting %s to a Blob Reference Input", *id) } - dateFormat := "" - if v := referenceInputBlob.Properties.DateFormat; v != nil { - dateFormat = *v + if referenceInputBlob.Properties != nil { + dateFormat := "" + if v := referenceInputBlob.Properties.DateFormat; v != nil { + dateFormat = *v + } + d.Set("date_format", dateFormat) + + pathPattern := "" + if v := referenceInputBlob.Properties.PathPattern; v != nil { + pathPattern = *v + } + d.Set("path_pattern", pathPattern) + + containerName := "" + if v := referenceInputBlob.Properties.Container; v != nil { + containerName = *v + } + d.Set("storage_container_name", containerName) + + timeFormat := "" + if v := referenceInputBlob.Properties.TimeFormat; v != nil { + timeFormat = *v + } + d.Set("time_format", timeFormat) + + authMode := "" + if v := referenceInputBlob.Properties.AuthenticationMode; v != nil { + authMode = string(*v) + } + d.Set("authentication_mode", authMode) + + if accounts := referenceInputBlob.Properties.StorageAccounts; accounts != nil && len(*accounts) > 0 { + account := (*accounts)[0] + d.Set("storage_account_name", account.AccountName) + } } - d.Set("date_format", dateFormat) - - pathPattern := "" - if v := referenceInputBlob.Properties.PathPattern; v != nil { - pathPattern = *v - } - d.Set("path_pattern", pathPattern) - - containerName := "" - if v := referenceInputBlob.Properties.Container; v != nil { - containerName = *v - } - d.Set("storage_container_name", containerName) - - timeFormat := "" - if v := referenceInputBlob.Properties.TimeFormat; v != nil { - timeFormat = *v - } - d.Set("time_format", timeFormat) - - authMode := "" - if v := referenceInputBlob.Properties.AuthenticationMode; v != nil { - authMode = string(*v) - } - d.Set("authentication_mode", authMode) - - if accounts := referenceInputBlob.Properties.StorageAccounts; accounts != nil && len(*accounts) > 0 { - account := (*accounts)[0] - d.Set("storage_account_name", account.AccountName) - } - if err := d.Set("serialization", flattenStreamAnalyticsStreamInputSerialization(dataSource.Serialization)); err != nil { return fmt.Errorf("setting `serialization`: %+v", err) } - } } diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go index 036b64746059..b75c71f7da86 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go @@ -228,53 +228,55 @@ func resourceStreamAnalyticsReferenceInputMsSqlRead(d *pluginsdk.ResourceData, m return fmt.Errorf("converting %s to an Azure Sql Reference Input", *id) } - server := "" - if v := referenceInputAzureSql.Properties.Server; v != nil { - server = *v + if referenceInputAzureSql.Properties != nil { + server := "" + if v := referenceInputAzureSql.Properties.Server; v != nil { + server = *v + } + d.Set("server", server) + + database := "" + if v := referenceInputAzureSql.Properties.Database; v != nil { + database = *v + } + d.Set("database", database) + + username := "" + if v := referenceInputAzureSql.Properties.User; v != nil { + username = *v + } + d.Set("username", username) + + refreshType := "" + if v := referenceInputAzureSql.Properties.RefreshType; v != nil { + refreshType = string(*v) + } + d.Set("refresh_type", refreshType) + + intervalDuration := "" + if v := referenceInputAzureSql.Properties.RefreshRate; v != nil { + intervalDuration = *v + } + d.Set("refresh_interval_duration", intervalDuration) + + fullSnapshotQuery := "" + if v := referenceInputAzureSql.Properties.FullSnapshotQuery; v != nil { + fullSnapshotQuery = *v + } + d.Set("full_snapshot_query", fullSnapshotQuery) + + deltaSnapshotQuery := "" + if v := referenceInputAzureSql.Properties.DeltaSnapshotQuery; v != nil { + deltaSnapshotQuery = *v + } + d.Set("delta_snapshot_query", deltaSnapshotQuery) + + table := "" + if v := referenceInputAzureSql.Properties.Table; v != nil { + table = *v + } + d.Set("table", table) } - d.Set("server", server) - - database := "" - if v := referenceInputAzureSql.Properties.Database; v != nil { - database = *v - } - d.Set("database", database) - - username := "" - if v := referenceInputAzureSql.Properties.User; v != nil { - username = *v - } - d.Set("username", username) - - refreshType := "" - if v := referenceInputAzureSql.Properties.RefreshType; v != nil { - refreshType = string(*v) - } - d.Set("refresh_type", refreshType) - - intervalDuration := "" - if v := referenceInputAzureSql.Properties.RefreshRate; v != nil { - intervalDuration = *v - } - d.Set("refresh_interval_duration", intervalDuration) - - fullSnapshotQuery := "" - if v := referenceInputAzureSql.Properties.FullSnapshotQuery; v != nil { - fullSnapshotQuery = *v - } - d.Set("full_snapshot_query", fullSnapshotQuery) - - deltaSnapshotQuery := "" - if v := referenceInputAzureSql.Properties.DeltaSnapshotQuery; v != nil { - deltaSnapshotQuery = *v - } - d.Set("delta_snapshot_query", deltaSnapshotQuery) - - table := "" - if v := referenceInputAzureSql.Properties.Table; v != nil { - table = *v - } - d.Set("table", table) } }