Skip to content

Commit

Permalink
Data Lake Store File: updating the ID (#1856)
Browse files Browse the repository at this point in the history
* Data Lake: exposing the `endpoint` field

* Data Lake Store File: switching the ID

```
 $ acctests azurerm TestAccAzureRMDataLakeStoreFileMigrateState
=== RUN   TestAccAzureRMDataLakeStoreFileMigrateState
2018/08/31 18:59:29 [INFO] Found AzureRM Data Lake Store File State v0; migrating to v1
2018/08/31 18:59:29 [DEBUG] ARM Data Lake Store File Attributes before Migration: map[string]string{"account_name":"example", "remote_file_path":"/test/blob.vhd"}
2018/08/31 18:59:29 [DEBUG] ARM Data Lake Store File Attributes after State Migration: map[string]string{"remote_file_path":"/test/blob.vhd", "account_name":"example", "id":"example.azuredatalakestore.net/test/blob.vhd"}
--- PASS: TestAccAzureRMDataLakeStoreFileMigrateState (0.00s)
PASS
ok  	github.com/terraform-providers/terraform-provider-azurerm/azurerm	0.411s
```

* Data Lake Store File: import support

* Parsing the URI correctly
  • Loading branch information
tombuildsstuff authored Sep 1, 2018
1 parent d8653de commit 5c7dd38
Show file tree
Hide file tree
Showing 7 changed files with 187 additions and 16 deletions.
7 changes: 7 additions & 0 deletions azurerm/resource_arm_data_lake_store.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ func resourceArmDataLakeStore() *schema.Resource {
DiffSuppressFunc: suppress.CaseDifference,
},

"endpoint": {
Type: schema.TypeString,
Computed: true,
},

"tags": tagsSchema(),
},
}
Expand Down Expand Up @@ -227,6 +232,8 @@ func resourceArmDateLakeStoreRead(d *schema.ResourceData, meta interface{}) erro
if config := properties.EncryptionConfig; config != nil {
d.Set("encryption_type", string(config.Type))
}

d.Set("endpoint", properties.Endpoint)
}

flattenAndSetTags(d, resp.Tags)
Expand Down
73 changes: 57 additions & 16 deletions azurerm/resource_arm_data_lake_store_file.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ import (
"fmt"
"io/ioutil"
"log"
"net/url"
"os"
"strings"

"github.com/Azure/azure-sdk-for-go/services/datalake/store/2016-11-01/filesystem"
"github.com/hashicorp/terraform/helper/schema"
Expand All @@ -15,9 +17,14 @@ import (

func resourceArmDataLakeStoreFile() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataLakeStoreFileCreate,
Read: resourceArmDataLakeStoreFileRead,
Delete: resourceArmDataLakeStoreFileDelete,
Create: resourceArmDataLakeStoreFileCreate,
Read: resourceArmDataLakeStoreFileRead,
Delete: resourceArmDataLakeStoreFileDelete,
MigrateState: resourceDataLakeStoreFileMigrateState,
SchemaVersion: 1,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Schema: map[string]*schema.Schema{
"account_name": {
Expand Down Expand Up @@ -69,44 +76,78 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}
return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err)
}

d.SetId(remoteFilePath)

// example.azuredatalakestore.net/test/example.txt
id := fmt.Sprintf("%s.%s%s", accountName, client.AdlsFileSystemDNSSuffix, remoteFilePath)
d.SetId(id)
return resourceArmDataLakeStoreFileRead(d, meta)
}

func resourceArmDataLakeStoreFileRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataLakeStoreFilesClient
ctx := meta.(*ArmClient).StopContext

accountName := d.Get("account_name").(string)
remoteFilePath := d.Id()
id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix)
if err != nil {
return err
}

resp, err := client.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true))
resp, err := client.GetFileStatus(ctx, id.storageAccountName, id.filePath, utils.Bool(true))
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", remoteFilePath, accountName)
log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", id.filePath, id.storageAccountName)
d.SetId("")
return nil
}
return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err)

return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err)
}

d.Set("account_name", id.storageAccountName)
d.Set("remote_file_path", id.filePath)

return nil
}

func resourceArmDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataLakeStoreFilesClient
ctx := meta.(*ArmClient).StopContext

accountName := d.Get("account_name").(string)
remoteFilePath := d.Id()
resp, err := client.Delete(ctx, accountName, remoteFilePath, utils.Bool(false))
id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix)
if err != nil {
if response.WasNotFound(resp.Response.Response) {
return nil
return err
}

resp, err := client.Delete(ctx, id.storageAccountName, id.filePath, utils.Bool(false))
if err != nil {
if !response.WasNotFound(resp.Response.Response) {
return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err)
}
return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err)
}

return nil
}

type dataLakeStoreFileId struct {
storageAccountName string
filePath string
}

func parseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId, error) {
// Example: tomdevdls1.azuredatalakestore.net/test/example.txt
// we add a scheme to the start of this so it parses correctly
uri, err := url.Parse(fmt.Sprintf("https://%s", input))
if err != nil {
return nil, fmt.Errorf("Error parsing %q as URI: %+v", input, err)
}

// TODO: switch to pulling this from the Environment when it's available there
// BUG: https://github.com/Azure/go-autorest/issues/312
replacement := fmt.Sprintf(".%s", suffix)
accountName := strings.Replace(uri.Host, replacement, "", -1)

file := dataLakeStoreFileId{
storageAccountName: accountName,
filePath: uri.Path,
}
return &file, nil
}
39 changes: 39 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_migration.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package azurerm

import (
"fmt"
"log"

"github.com/hashicorp/terraform/terraform"
)

func resourceDataLakeStoreFileMigrateState(v int, is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) {
switch v {
case 0:
log.Println("[INFO] Found AzureRM Data Lake Store File State v0; migrating to v1")
return resourceDataLakeStoreFileStateV0toV1(is, meta)
default:
return is, fmt.Errorf("Unexpected schema version: %d", v)
}
}

func resourceDataLakeStoreFileStateV0toV1(is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) {
if is.Empty() {
log.Println("[DEBUG] Empty InstanceState; nothing to migrate.")
return is, nil
}

log.Printf("[DEBUG] ARM Data Lake Store File Attributes before Migration: %#v", is.Attributes)

client := meta.(*ArmClient).dataLakeStoreFilesClient

storageAccountName := is.Attributes["account_name"]
filePath := is.Attributes["remote_file_path"]
newID := fmt.Sprintf("%s.%s%s", storageAccountName, client.AdlsFileSystemDNSSuffix, filePath)
is.Attributes["id"] = newID
is.ID = newID

log.Printf("[DEBUG] ARM Data Lake Store File Attributes after State Migration: %#v", is.Attributes)

return is, nil
}
66 changes: 66 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_migration_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
package azurerm

import (
"fmt"
"testing"

"github.com/hashicorp/terraform/terraform"
)

// NOTE: this is intentionally an acceptance test (and we're not explicitly setting the env)
// as we want to run this depending on the cloud we're in.
func TestAccAzureRMDataLakeStoreFileMigrateState(t *testing.T) {
config := testGetAzureConfig(t)
if config == nil {
t.SkipNow()
return
}

client, err := getArmClient(config)
if err != nil {
t.Fatal(fmt.Errorf("Error building ARM Client: %+v", err))
return
}

client.StopContext = testAccProvider.StopContext()

filesClient := client.dataLakeStoreFilesClient

cases := map[string]struct {
StateVersion int
ID string
InputAttributes map[string]string
ExpectedAttributes map[string]string
}{
"v0_1_without_value": {
StateVersion: 0,
ID: "some_id",
InputAttributes: map[string]string{
"remote_file_path": "/test/blob.vhd",
"account_name": "example",
},
ExpectedAttributes: map[string]string{
"id": fmt.Sprintf("example.%s/test/blob.vhd", filesClient.AdlsFileSystemDNSSuffix),
},
},
}

for tn, tc := range cases {
is := &terraform.InstanceState{
ID: tc.ID,
Attributes: tc.InputAttributes,
}
is, err := resourceDataLakeStoreFileMigrateState(tc.StateVersion, is, client)

if err != nil {
t.Fatalf("bad: %s, err: %#v", tn, err)
}

for k, v := range tc.ExpectedAttributes {
actual := is.Attributes[k]
if actual != v {
t.Fatalf("Bad Data Lake Store File Migrate for %q: %q\n\n expected: %q", k, actual, v)
}
}
}
}
8 changes: 8 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,16 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) {
testCheckAzureRMDataLakeStoreFileExists(resourceName),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"local_file_path"},
},
},
})
}

func testCheckAzureRMDataLakeStoreFileExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
// Ensure we have enough information in state to look up in API
Expand Down Expand Up @@ -96,6 +103,7 @@ resource "azurerm_data_lake_store" "test" {
name = "unlikely23exst2acct%s"
resource_group_name = "${azurerm_resource_group.test.name}"
location = "%s"
firewall_state = "Disabled"
}
resource "azurerm_data_lake_store_file" "test" {
Expand Down
2 changes: 2 additions & 0 deletions website/docs/r/data_lake_store.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ The following attributes are exported:

* `id` - The Date Lake Store ID.

* `endpoint` - The Endpoint for the Data Lake Store.

## Import

Date Lake Store can be imported using the `resource id`, e.g.
Expand Down
8 changes: 8 additions & 0 deletions website/docs/r/data_lake_store_file.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,11 @@ The following arguments are supported:
* `local_file_path` - (Required) The path to the local file to be added to the Data Lake Store.

* `remote_file_path` - (Required) The path created for the file on the Data Lake Store.

## Import

Date Lake Store File's can be imported using the `resource id`, e.g.

```shell
terraform import azurerm_data_lake_store_file.test example.azuredatalakestore.net/test/example.txt
```

0 comments on commit 5c7dd38

Please sign in to comment.