From 5c7dd38d98e01108eb55172bd2396a7028201cb4 Mon Sep 17 00:00:00 2001 From: Tom Harvey Date: Sat, 1 Sep 2018 14:31:15 +0100 Subject: [PATCH] Data Lake Store File: updating the ID (#1856) * Data Lake: exposing the `endpoint` field * Data Lake Store File: switching the ID ``` $ acctests azurerm TestAccAzureRMDataLakeStoreFileMigrateState === RUN TestAccAzureRMDataLakeStoreFileMigrateState 2018/08/31 18:59:29 [INFO] Found AzureRM Data Lake Store File State v0; migrating to v1 2018/08/31 18:59:29 [DEBUG] ARM Data Lake Store File Attributes before Migration: map[string]string{"account_name":"example", "remote_file_path":"/test/blob.vhd"} 2018/08/31 18:59:29 [DEBUG] ARM Data Lake Store File Attributes after State Migration: map[string]string{"remote_file_path":"/test/blob.vhd", "account_name":"example", "id":"example.azuredatalakestore.net/test/blob.vhd"} --- PASS: TestAccAzureRMDataLakeStoreFileMigrateState (0.00s) PASS ok github.com/terraform-providers/terraform-provider-azurerm/azurerm 0.411s ``` * Data Lake Store File: import support * Parsing the URI correctly --- azurerm/resource_arm_data_lake_store.go | 7 ++ azurerm/resource_arm_data_lake_store_file.go | 73 +++++++++++++++---- ...urce_arm_data_lake_store_file_migration.go | 39 ++++++++++ ...arm_data_lake_store_file_migration_test.go | 66 +++++++++++++++++ .../resource_arm_data_lake_store_file_test.go | 8 ++ website/docs/r/data_lake_store.html.markdown | 2 + .../docs/r/data_lake_store_file.html.markdown | 8 ++ 7 files changed, 187 insertions(+), 16 deletions(-) create mode 100644 azurerm/resource_arm_data_lake_store_file_migration.go create mode 100644 azurerm/resource_arm_data_lake_store_file_migration_test.go diff --git a/azurerm/resource_arm_data_lake_store.go b/azurerm/resource_arm_data_lake_store.go index 19498d7be8d8..687ba8cc671a 100644 --- a/azurerm/resource_arm_data_lake_store.go +++ b/azurerm/resource_arm_data_lake_store.go @@ -97,6 +97,11 @@ func resourceArmDataLakeStore() *schema.Resource { DiffSuppressFunc: suppress.CaseDifference, }, + "endpoint": { + Type: schema.TypeString, + Computed: true, + }, + "tags": tagsSchema(), }, } @@ -227,6 +232,8 @@ func resourceArmDateLakeStoreRead(d *schema.ResourceData, meta interface{}) erro if config := properties.EncryptionConfig; config != nil { d.Set("encryption_type", string(config.Type)) } + + d.Set("endpoint", properties.Endpoint) } flattenAndSetTags(d, resp.Tags) diff --git a/azurerm/resource_arm_data_lake_store_file.go b/azurerm/resource_arm_data_lake_store_file.go index 41659b9e6387..84172915c922 100644 --- a/azurerm/resource_arm_data_lake_store_file.go +++ b/azurerm/resource_arm_data_lake_store_file.go @@ -5,7 +5,9 @@ import ( "fmt" "io/ioutil" "log" + "net/url" "os" + "strings" "github.com/Azure/azure-sdk-for-go/services/datalake/store/2016-11-01/filesystem" "github.com/hashicorp/terraform/helper/schema" @@ -15,9 +17,14 @@ import ( func resourceArmDataLakeStoreFile() *schema.Resource { return &schema.Resource{ - Create: resourceArmDataLakeStoreFileCreate, - Read: resourceArmDataLakeStoreFileRead, - Delete: resourceArmDataLakeStoreFileDelete, + Create: resourceArmDataLakeStoreFileCreate, + Read: resourceArmDataLakeStoreFileRead, + Delete: resourceArmDataLakeStoreFileDelete, + MigrateState: resourceDataLakeStoreFileMigrateState, + SchemaVersion: 1, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, Schema: map[string]*schema.Schema{ "account_name": { @@ -69,8 +76,9 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{} return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err) } - d.SetId(remoteFilePath) - + // example.azuredatalakestore.net/test/example.txt + id := fmt.Sprintf("%s.%s%s", accountName, client.AdlsFileSystemDNSSuffix, remoteFilePath) + d.SetId(id) return resourceArmDataLakeStoreFileRead(d, meta) } @@ -78,19 +86,25 @@ func resourceArmDataLakeStoreFileRead(d *schema.ResourceData, meta interface{}) client := meta.(*ArmClient).dataLakeStoreFilesClient ctx := meta.(*ArmClient).StopContext - accountName := d.Get("account_name").(string) - remoteFilePath := d.Id() + id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) + if err != nil { + return err + } - resp, err := client.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true)) + resp, err := client.GetFileStatus(ctx, id.storageAccountName, id.filePath, utils.Bool(true)) if err != nil { if utils.ResponseWasNotFound(resp.Response) { - log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", remoteFilePath, accountName) + log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", id.filePath, id.storageAccountName) d.SetId("") return nil } - return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err) + + return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err) } + d.Set("account_name", id.storageAccountName) + d.Set("remote_file_path", id.filePath) + return nil } @@ -98,15 +112,42 @@ func resourceArmDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{} client := meta.(*ArmClient).dataLakeStoreFilesClient ctx := meta.(*ArmClient).StopContext - accountName := d.Get("account_name").(string) - remoteFilePath := d.Id() - resp, err := client.Delete(ctx, accountName, remoteFilePath, utils.Bool(false)) + id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix) if err != nil { - if response.WasNotFound(resp.Response.Response) { - return nil + return err + } + + resp, err := client.Delete(ctx, id.storageAccountName, id.filePath, utils.Bool(false)) + if err != nil { + if !response.WasNotFound(resp.Response.Response) { + return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err) } - return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err) } return nil } + +type dataLakeStoreFileId struct { + storageAccountName string + filePath string +} + +func parseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId, error) { + // Example: tomdevdls1.azuredatalakestore.net/test/example.txt + // we add a scheme to the start of this so it parses correctly + uri, err := url.Parse(fmt.Sprintf("https://%s", input)) + if err != nil { + return nil, fmt.Errorf("Error parsing %q as URI: %+v", input, err) + } + + // TODO: switch to pulling this from the Environment when it's available there + // BUG: https://github.com/Azure/go-autorest/issues/312 + replacement := fmt.Sprintf(".%s", suffix) + accountName := strings.Replace(uri.Host, replacement, "", -1) + + file := dataLakeStoreFileId{ + storageAccountName: accountName, + filePath: uri.Path, + } + return &file, nil +} diff --git a/azurerm/resource_arm_data_lake_store_file_migration.go b/azurerm/resource_arm_data_lake_store_file_migration.go new file mode 100644 index 000000000000..61d6fd25c87e --- /dev/null +++ b/azurerm/resource_arm_data_lake_store_file_migration.go @@ -0,0 +1,39 @@ +package azurerm + +import ( + "fmt" + "log" + + "github.com/hashicorp/terraform/terraform" +) + +func resourceDataLakeStoreFileMigrateState(v int, is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) { + switch v { + case 0: + log.Println("[INFO] Found AzureRM Data Lake Store File State v0; migrating to v1") + return resourceDataLakeStoreFileStateV0toV1(is, meta) + default: + return is, fmt.Errorf("Unexpected schema version: %d", v) + } +} + +func resourceDataLakeStoreFileStateV0toV1(is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) { + if is.Empty() { + log.Println("[DEBUG] Empty InstanceState; nothing to migrate.") + return is, nil + } + + log.Printf("[DEBUG] ARM Data Lake Store File Attributes before Migration: %#v", is.Attributes) + + client := meta.(*ArmClient).dataLakeStoreFilesClient + + storageAccountName := is.Attributes["account_name"] + filePath := is.Attributes["remote_file_path"] + newID := fmt.Sprintf("%s.%s%s", storageAccountName, client.AdlsFileSystemDNSSuffix, filePath) + is.Attributes["id"] = newID + is.ID = newID + + log.Printf("[DEBUG] ARM Data Lake Store File Attributes after State Migration: %#v", is.Attributes) + + return is, nil +} diff --git a/azurerm/resource_arm_data_lake_store_file_migration_test.go b/azurerm/resource_arm_data_lake_store_file_migration_test.go new file mode 100644 index 000000000000..7fcedf0811bc --- /dev/null +++ b/azurerm/resource_arm_data_lake_store_file_migration_test.go @@ -0,0 +1,66 @@ +package azurerm + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/terraform" +) + +// NOTE: this is intentionally an acceptance test (and we're not explicitly setting the env) +// as we want to run this depending on the cloud we're in. +func TestAccAzureRMDataLakeStoreFileMigrateState(t *testing.T) { + config := testGetAzureConfig(t) + if config == nil { + t.SkipNow() + return + } + + client, err := getArmClient(config) + if err != nil { + t.Fatal(fmt.Errorf("Error building ARM Client: %+v", err)) + return + } + + client.StopContext = testAccProvider.StopContext() + + filesClient := client.dataLakeStoreFilesClient + + cases := map[string]struct { + StateVersion int + ID string + InputAttributes map[string]string + ExpectedAttributes map[string]string + }{ + "v0_1_without_value": { + StateVersion: 0, + ID: "some_id", + InputAttributes: map[string]string{ + "remote_file_path": "/test/blob.vhd", + "account_name": "example", + }, + ExpectedAttributes: map[string]string{ + "id": fmt.Sprintf("example.%s/test/blob.vhd", filesClient.AdlsFileSystemDNSSuffix), + }, + }, + } + + for tn, tc := range cases { + is := &terraform.InstanceState{ + ID: tc.ID, + Attributes: tc.InputAttributes, + } + is, err := resourceDataLakeStoreFileMigrateState(tc.StateVersion, is, client) + + if err != nil { + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + for k, v := range tc.ExpectedAttributes { + actual := is.Attributes[k] + if actual != v { + t.Fatalf("Bad Data Lake Store File Migrate for %q: %q\n\n expected: %q", k, actual, v) + } + } + } +} diff --git a/azurerm/resource_arm_data_lake_store_file_test.go b/azurerm/resource_arm_data_lake_store_file_test.go index 3c45f240c712..7dbd430a8944 100644 --- a/azurerm/resource_arm_data_lake_store_file_test.go +++ b/azurerm/resource_arm_data_lake_store_file_test.go @@ -28,9 +28,16 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) { testCheckAzureRMDataLakeStoreFileExists(resourceName), ), }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"local_file_path"}, + }, }, }) } + func testCheckAzureRMDataLakeStoreFileExists(name string) resource.TestCheckFunc { return func(s *terraform.State) error { // Ensure we have enough information in state to look up in API @@ -96,6 +103,7 @@ resource "azurerm_data_lake_store" "test" { name = "unlikely23exst2acct%s" resource_group_name = "${azurerm_resource_group.test.name}" location = "%s" + firewall_state = "Disabled" } resource "azurerm_data_lake_store_file" "test" { diff --git a/website/docs/r/data_lake_store.html.markdown b/website/docs/r/data_lake_store.html.markdown index 489fae42da69..afb6fd05e01b 100644 --- a/website/docs/r/data_lake_store.html.markdown +++ b/website/docs/r/data_lake_store.html.markdown @@ -62,6 +62,8 @@ The following attributes are exported: * `id` - The Date Lake Store ID. +* `endpoint` - The Endpoint for the Data Lake Store. + ## Import Date Lake Store can be imported using the `resource id`, e.g. diff --git a/website/docs/r/data_lake_store_file.html.markdown b/website/docs/r/data_lake_store_file.html.markdown index 186523bb497a..660d55a65a48 100644 --- a/website/docs/r/data_lake_store_file.html.markdown +++ b/website/docs/r/data_lake_store_file.html.markdown @@ -43,3 +43,11 @@ The following arguments are supported: * `local_file_path` - (Required) The path to the local file to be added to the Data Lake Store. * `remote_file_path` - (Required) The path created for the file on the Data Lake Store. + +## Import + +Date Lake Store File's can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_lake_store_file.test example.azuredatalakestore.net/test/example.txt +```