diff --git a/azurerm/provider.go b/azurerm/provider.go index 526627c17cfc..62e187e7a68c 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -474,6 +474,7 @@ func Provider() terraform.ResourceProvider { "azurerm_stream_analytics_output_eventhub": resourceArmStreamAnalyticsOutputEventHub(), "azurerm_stream_analytics_output_servicebus_queue": resourceArmStreamAnalyticsOutputServiceBusQueue(), "azurerm_stream_analytics_output_servicebus_topic": resourceArmStreamAnalyticsOutputServiceBusTopic(), + "azurerm_stream_analytics_reference_input_blob": resourceArmStreamAnalyticsReferenceInputBlob(), "azurerm_stream_analytics_stream_input_blob": resourceArmStreamAnalyticsStreamInputBlob(), "azurerm_stream_analytics_stream_input_eventhub": resourceArmStreamAnalyticsStreamInputEventHub(), "azurerm_stream_analytics_stream_input_iothub": resourceArmStreamAnalyticsStreamInputIoTHub(), diff --git a/azurerm/resource_arm_stream_analytics_reference_input_blob.go b/azurerm/resource_arm_stream_analytics_reference_input_blob.go new file mode 100644 index 000000000000..db3bb3fcf0f5 --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_reference_input_blob.go @@ -0,0 +1,281 @@ +package azurerm + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + "github.com/hashicorp/go-azure-helpers/response" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmStreamAnalyticsReferenceInputBlob() *schema.Resource { + return &schema.Resource{ + Create: resourceArmStreamAnalyticsReferenceInputBlobCreate, + Read: resourceArmStreamAnalyticsReferenceInputBlobRead, + Update: resourceArmStreamAnalyticsReferenceInputBlobUpdate, + Delete: resourceArmStreamAnalyticsReferenceInputBlobDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "stream_analytics_job_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "date_format": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "path_pattern": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "storage_account_key": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "storage_account_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "storage_container_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "time_format": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "serialization": azure.SchemaStreamAnalyticsStreamInputSerialization(), + }, + } +} + +func getBlobReferenceInputProps(ctx context.Context, d *schema.ResourceData) (streamanalytics.Input, error) { + name := d.Get("name").(string) + containerName := d.Get("storage_container_name").(string) + dateFormat := d.Get("date_format").(string) + pathPattern := d.Get("path_pattern").(string) + storageAccountKey := d.Get("storage_account_key").(string) + storageAccountName := d.Get("storage_account_name").(string) + timeFormat := d.Get("time_format").(string) + + serializationRaw := d.Get("serialization").([]interface{}) + serialization, err := azure.ExpandStreamAnalyticsStreamInputSerialization(serializationRaw) + if err != nil { + return streamanalytics.Input{}, fmt.Errorf("Error expanding `serialization`: %+v", err) + } + + props := streamanalytics.Input{ + Name: utils.String(name), + Properties: &streamanalytics.ReferenceInputProperties{ + Type: streamanalytics.TypeReference, + Datasource: &streamanalytics.BlobReferenceInputDataSource{ + Type: streamanalytics.TypeBasicReferenceInputDataSourceTypeMicrosoftStorageBlob, + BlobReferenceInputDataSourceProperties: &streamanalytics.BlobReferenceInputDataSourceProperties{ + Container: utils.String(containerName), + DateFormat: utils.String(dateFormat), + PathPattern: utils.String(pathPattern), + TimeFormat: utils.String(timeFormat), + StorageAccounts: &[]streamanalytics.StorageAccount{ + { + AccountName: utils.String(storageAccountName), + AccountKey: utils.String(storageAccountKey), + }, + }, + }, + }, + Serialization: serialization, + }, + } + + return props, nil +} + +func resourceArmStreamAnalyticsReferenceInputBlobCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).StreamAnalytics.InputsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*ArmClient).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure Stream Analytics Reference Input Blob creation.") + name := d.Get("name").(string) + jobName := d.Get("stream_analytics_job_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if features.ShouldResourcesBeImported() && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Stream Analytics Reference Input %q (Job %q / Resource Group %q): %s", name, jobName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_stream_analytics_reference_input_blob", *existing.ID) + } + } + + props, err := getBlobReferenceInputProps(ctx, d) + if err != nil { + return fmt.Errorf("Error creating the input props for resource creation: %v", err) + } + + if _, err := client.CreateOrReplace(ctx, props, resourceGroup, jobName, name, "", ""); err != nil { + return fmt.Errorf("Error Creating Stream Analytics Reference Input Blob %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return fmt.Errorf("Error retrieving Stream Analytics Reference Input Blob %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Cannot read ID of Stream Analytics Reference Input Blob %q (Job %q / Resource Group %q)", name, jobName, resourceGroup) + } + + d.SetId(*read.ID) + + return resourceArmStreamAnalyticsReferenceInputBlobRead(d, meta) +} + +func resourceArmStreamAnalyticsReferenceInputBlobUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).StreamAnalytics.InputsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*ArmClient).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Azure Stream Analytics Reference Input Blob creation.") + name := d.Get("name").(string) + jobName := d.Get("stream_analytics_job_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + props, err := getBlobReferenceInputProps(ctx, d) + if err != nil { + return fmt.Errorf("Error creating the input props for resource update: %v", err) + } + + if _, err := client.Update(ctx, props, resourceGroup, jobName, name, ""); err != nil { + return fmt.Errorf("Error Updating Stream Analytics Reference Input Blob %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + + return resourceArmStreamAnalyticsReferenceInputBlobRead(d, meta) +} + +func resourceArmStreamAnalyticsReferenceInputBlobRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).StreamAnalytics.InputsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*ArmClient).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + jobName := id.Path["streamingjobs"] + name := id.Path["inputs"] + + resp, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Reference Input Blob %q was not found in Stream Analytics Job %q / Resource Group %q - removing from state!", name, jobName, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Reference Input Blob %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + d.Set("stream_analytics_job_name", jobName) + + if props := resp.Properties; props != nil { + v, ok := props.AsReferenceInputProperties() + if !ok { + return fmt.Errorf("Error converting Reference Input Blob to a Reference Input: %+v", err) + } + + blobInputDataSource, ok := v.Datasource.AsBlobReferenceInputDataSource() + if !ok { + return fmt.Errorf("Error converting Reference Input Blob to an Blob Stream Input: %+v", err) + } + + d.Set("date_format", blobInputDataSource.DateFormat) + d.Set("path_pattern", blobInputDataSource.PathPattern) + d.Set("storage_container_name", blobInputDataSource.Container) + d.Set("time_format", blobInputDataSource.TimeFormat) + + if accounts := blobInputDataSource.StorageAccounts; accounts != nil && len(*accounts) > 0 { + account := (*accounts)[0] + d.Set("storage_account_name", account.AccountName) + } + + if err := d.Set("serialization", azure.FlattenStreamAnalyticsStreamInputSerialization(v.Serialization)); err != nil { + return fmt.Errorf("Error setting `serialization`: %+v", err) + } + } + + return nil +} + +func resourceArmStreamAnalyticsReferenceInputBlobDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).StreamAnalytics.InputsClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*ArmClient).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + jobName := id.Path["streamingjobs"] + name := id.Path["inputs"] + + if resp, err := client.Delete(ctx, resourceGroup, jobName, name); err != nil { + if !response.WasNotFound(resp.Response) { + return fmt.Errorf("Error deleting Reference Input Blob %q (Stream Analytics Job %q / Resource Group %q) %+v", name, jobName, resourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/resource_arm_stream_analytics_reference_input_blob_test.go b/azurerm/resource_arm_stream_analytics_reference_input_blob_test.go new file mode 100644 index 000000000000..8d3df25f0a13 --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_reference_input_blob_test.go @@ -0,0 +1,395 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features" +) + +func TestAccAzureRMStreamAnalyticsReferenceInputBlob_avro(t *testing.T) { + resourceName := "azurerm_stream_analytics_reference_input_blob.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(4) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_avro(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "storage_account_key", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsReferenceInputBlob_csv(t *testing.T) { + resourceName := "azurerm_stream_analytics_reference_input_blob.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(4) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_csv(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "storage_account_key", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsReferenceInputBlob_json(t *testing.T) { + resourceName := "azurerm_stream_analytics_reference_input_blob.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(4) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_json(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "storage_account_key", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsReferenceInputBlob_update(t *testing.T) { + resourceName := "azurerm_stream_analytics_reference_input_blob.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(4) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_json(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_updated(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "storage_account_key", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsReferenceInputBlob_requiresImport(t *testing.T) { + if !features.ShouldResourcesBeImported() { + t.Skip("Skipping since resources aren't required to be imported") + return + } + + resourceName := "azurerm_stream_analytics_reference_input_blob.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(4) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_json(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsReferenceInputBlob_requiresImport(ri, rs, location), + ExpectError: testRequiresImportError("azurerm_stream_analytics_reference_input_blob"), + }, + }, + }) +} + +func testCheckAzureRMStreamAnalyticsReferenceInputBlobExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + jobName := rs.Primary.Attributes["stream_analytics_job_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + conn := testAccProvider.Meta().(*ArmClient).StreamAnalytics.InputsClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return fmt.Errorf("Bad: Get on streamAnalyticsInputsClient: %+v", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("Bad: Stream Input %q (Stream Analytics Job %q / Resource Group %q) does not exist", name, jobName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMStreamAnalyticsReferenceInputBlobDestroy(s *terraform.State) error { + conn := testAccProvider.Meta().(*ArmClient).StreamAnalytics.InputsClient + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_stream_analytics_reference_input_blob" { + continue + } + + name := rs.Primary.Attributes["name"] + jobName := rs.Primary.Attributes["stream_analytics_job_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Stream Analytics Stream Input Blob still exists:\n%#v", resp.Properties) + } + } + + return nil +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_avro(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsReferenceInputBlob_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_reference_input_blob" "test" { + name = "acctestinput-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + storage_account_name = "${azurerm_storage_account.test.name}" + storage_account_key = "${azurerm_storage_account.test.primary_access_key}" + storage_container_name = "${azurerm_storage_container.test.name}" + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Avro" + } +} +`, template, rInt) +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_csv(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsReferenceInputBlob_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_reference_input_blob" "test" { + name = "acctestinput-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + storage_account_name = "${azurerm_storage_account.test.name}" + storage_account_key = "${azurerm_storage_account.test.primary_access_key}" + storage_container_name = "${azurerm_storage_container.test.name}" + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Csv" + encoding = "UTF8" + field_delimiter = "," + } +} +`, template, rInt) +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_json(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsReferenceInputBlob_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_reference_input_blob" "test" { + name = "acctestinput-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + storage_account_name = "${azurerm_storage_account.test.name}" + storage_account_key = "${azurerm_storage_account.test.primary_access_key}" + storage_container_name = "${azurerm_storage_container.test.name}" + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" + + serialization { + type = "Json" + encoding = "UTF8" + } +} +`, template, rInt) +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_updated(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsReferenceInputBlob_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_account" "updated" { + name = "acctestsa2%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "updated" { + name = "example2" + resource_group_name = "${azurerm_resource_group.test.name}" + storage_account_name = "${azurerm_storage_account.test.name}" + container_access_type = "private" +} + +resource "azurerm_stream_analytics_reference_input_blob" "test" { + name = "acctestinput-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + storage_account_name = "${azurerm_storage_account.updated.name}" + storage_account_key = "${azurerm_storage_account.updated.primary_access_key}" + storage_container_name = "${azurerm_storage_container.updated.name}" + path_pattern = "some-other-pattern" + date_format = "yyyy-MM-dd" + time_format = "HH" + + serialization { + type = "Avro" + } +} +`, template, rString, rInt) +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_requiresImport(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsReferenceInputBlob_json(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_reference_input_blob" "import" { + name = "${azurerm_stream_analytics_reference_input_blob.test.name}" + stream_analytics_job_name = "${azurerm_stream_analytics_reference_input_blob.test.stream_analytics_job_name}" + resource_group_name = "${azurerm_stream_analytics_reference_input_blob.test.resource_group_name}" + storage_account_name = "${azurerm_stream_analytics_reference_input_blob.test.storage_account_name}" + storage_account_key = "${azurerm_stream_analytics_reference_input_blob.test.storage_account_key}" + storage_container_name = "${azurerm_stream_analytics_reference_input_blob.test.storage_container_name}" + path_pattern = "${azurerm_stream_analytics_reference_input_blob.test.path_pattern}" + date_format = "${azurerm_stream_analytics_reference_input_blob.test.date_format}" + time_format = "${azurerm_stream_analytics_reference_input_blob.test.time_format}" + serialization = "${azurerm_stream_analytics_reference_input_blob.test.serialization}" +} +`, template) +} + +func testAccAzureRMStreamAnalyticsReferenceInputBlob_template(rInt int, rString string, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_storage_container" "test" { + name = "example" + resource_group_name = "${azurerm_resource_group.test.name}" + storage_account_name = "${azurerm_storage_account.test.name}" + container_access_type = "private" +} + +resource "azurerm_stream_analytics_job" "test" { + name = "acctestjob-%d" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + compatibility_level = "1.0" + data_locale = "en-GB" + events_late_arrival_max_delay_in_seconds = 60 + events_out_of_order_max_delay_in_seconds = 50 + events_out_of_order_policy = "Adjust" + output_error_policy = "Drop" + streaming_units = 3 + + transformation_query = < **NOTE:** This is required when `type` is set to `Csv` or `Json`. + +* `field_delimiter` - (Optional) The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are ` ` (space), `,` (comma), ` ` (tab), `|` (pipe) and `;`. + +-> **NOTE:** This is required when `type` is set to `Csv`. + +## Attributes Reference + +The following attributes are exported in addition to the arguments listed above: + +* `id` - The ID of the Stream Analytics Reference Input Blob. + +## Import + +Stream Analytics Reference Input Blob's can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_stream_analytics_reference_input_blob.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/inputs/input1 +``` diff --git a/website/docs/r/stream_analytics_stream_input_blob.html.markdown b/website/docs/r/stream_analytics_stream_input_blob.html.markdown index 20e2c43a8821..825f7e248af8 100644 --- a/website/docs/r/stream_analytics_stream_input_blob.html.markdown +++ b/website/docs/r/stream_analytics_stream_input_blob.html.markdown @@ -39,15 +39,15 @@ resource "azurerm_storage_container" "example" { } resource "azurerm_stream_analytics_stream_input_blob" "example" { - name = "eventhub-stream-input" - stream_analytics_job_name = "${data.azurerm_stream_analytics_job.example.name}" - resource_group_name = "${data.azurerm_stream_analytics_job.example.resource_group_name}" - storage_account_name = "${azurerm_storage_account.example.name}" - storage_account_key = "${azurerm_storage_account.example.primary_access_key}" - storage_container_name = "${azurerm_storage_container.example.name}" - path_pattern = "some-random-pattern" - date_format = "yyyy/MM/dd" - time_format = "HH" + name = "blob-stream-input" + stream_analytics_job_name = "${data.azurerm_stream_analytics_job.example.name}" + resource_group_name = "${data.azurerm_stream_analytics_job.example.resource_group_name}" + storage_account_name = "${azurerm_storage_account.example.name}" + storage_account_key = "${azurerm_storage_account.example.primary_access_key}" + storage_container_name = "${azurerm_storage_container.example.name}" + path_pattern = "some-random-pattern" + date_format = "yyyy/MM/dd" + time_format = "HH" serialization { type = "Json"