Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Data Lake Store File: updating the ID #1856

Merged
merged 4 commits into from
Sep 1, 2018
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions azurerm/resource_arm_data_lake_store.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ func resourceArmDataLakeStore() *schema.Resource {
DiffSuppressFunc: suppress.CaseDifference,
},

"endpoint": {
Type: schema.TypeString,
Computed: true,
},

"tags": tagsSchema(),
},
}
Expand Down Expand Up @@ -227,6 +232,8 @@ func resourceArmDateLakeStoreRead(d *schema.ResourceData, meta interface{}) erro
if config := properties.EncryptionConfig; config != nil {
d.Set("encryption_type", string(config.Type))
}

d.Set("endpoint", properties.Endpoint)
}

flattenAndSetTags(d, resp.Tags)
Expand Down
64 changes: 48 additions & 16 deletions azurerm/resource_arm_data_lake_store_file.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ import (
"fmt"
"io/ioutil"
"log"
"net/url"
"os"
"strings"

"github.com/Azure/azure-sdk-for-go/services/datalake/store/2016-11-01/filesystem"
"github.com/hashicorp/terraform/helper/schema"
Expand All @@ -15,9 +17,11 @@ import (

func resourceArmDataLakeStoreFile() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataLakeStoreFileCreate,
Read: resourceArmDataLakeStoreFileRead,
Delete: resourceArmDataLakeStoreFileDelete,
Create: resourceArmDataLakeStoreFileCreate,
Read: resourceArmDataLakeStoreFileRead,
Delete: resourceArmDataLakeStoreFileDelete,
MigrateState: resourceDataLakeStoreFileMigrateState,
SchemaVersion: 1,

Schema: map[string]*schema.Schema{
"account_name": {
Expand Down Expand Up @@ -69,26 +73,30 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}
return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err)
}

d.SetId(remoteFilePath)

// example.azuredatalakestore.net/test/example.txt
id := fmt.Sprintf("%s.%s%s", accountName, client.AdlsFileSystemDNSSuffix, remoteFilePath)
d.SetId(id)
return resourceArmDataLakeStoreFileRead(d, meta)
}

func resourceArmDataLakeStoreFileRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataLakeStoreFilesClient
ctx := meta.(*ArmClient).StopContext

accountName := d.Get("account_name").(string)
remoteFilePath := d.Id()
id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix)
if err != nil {
return err
}

resp, err := client.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true))
resp, err := client.GetFileStatus(ctx, id.storageAccountName, id.filePath, utils.Bool(true))
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", remoteFilePath, accountName)
log.Printf("[WARN] Data Lake Store File %q was not found (Account %q)", id.filePath, id.storageAccountName)
d.SetId("")
return nil
}
return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err)

return fmt.Errorf("Error making Read request on Azure Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err)
}

return nil
Expand All @@ -98,15 +106,39 @@ func resourceArmDataLakeStoreFileDelete(d *schema.ResourceData, meta interface{}
client := meta.(*ArmClient).dataLakeStoreFilesClient
ctx := meta.(*ArmClient).StopContext

accountName := d.Get("account_name").(string)
remoteFilePath := d.Id()
resp, err := client.Delete(ctx, accountName, remoteFilePath, utils.Bool(false))
id, err := parseDataLakeStoreFileId(d.Id(), client.AdlsFileSystemDNSSuffix)
if err != nil {
if response.WasNotFound(resp.Response.Response) {
return nil
return err
}

resp, err := client.Delete(ctx, id.storageAccountName, id.filePath, utils.Bool(false))
if err != nil {
if !response.WasNotFound(resp.Response.Response) {
return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", id.filePath, id.storageAccountName, err)
}
return fmt.Errorf("Error issuing delete request for Data Lake Store File %q (Account %q): %+v", remoteFilePath, accountName, err)
}

return nil
}

type dataLakeStoreFileId struct {
storageAccountName string
filePath string
}

func parseDataLakeStoreFileId(input string, suffix string) (*dataLakeStoreFileId, error) {
uri, err := url.Parse(input)
if err != nil {
return nil, fmt.Errorf("Error parsing %q as URI: %+v", input, err)
}

// TODO: switch to pulling this from the Environment when it's available there
// BUG: https://github.com/Azure/go-autorest/issues/312
accountName := strings.Replace(input, fmt.Sprintf(".%s", suffix), "", 1)

file := dataLakeStoreFileId{
storageAccountName: accountName,
filePath: uri.Path,
}
return &file, nil
}
39 changes: 39 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_migration.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package azurerm

import (
"fmt"
"log"

"github.com/hashicorp/terraform/terraform"
)

func resourceDataLakeStoreFileMigrateState(v int, is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) {
switch v {
case 0:
log.Println("[INFO] Found AzureRM Data Lake Store File State v0; migrating to v1")
return resourceDataLakeStoreFileStateV0toV1(is, meta)
default:
return is, fmt.Errorf("Unexpected schema version: %d", v)
}
}

func resourceDataLakeStoreFileStateV0toV1(is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) {
if is.Empty() {
log.Println("[DEBUG] Empty InstanceState; nothing to migrate.")
return is, nil
}

log.Printf("[DEBUG] ARM Data Lake Store File Attributes before Migration: %#v", is.Attributes)

client := meta.(*ArmClient).dataLakeStoreFilesClient

storageAccountName := is.Attributes["account_name"]
filePath := is.Attributes["remote_file_path"]
newID := fmt.Sprintf("%s.%s%s", storageAccountName, client.AdlsFileSystemDNSSuffix, filePath)
is.Attributes["id"] = newID
is.ID = newID

log.Printf("[DEBUG] ARM Data Lake Store File Attributes after State Migration: %#v", is.Attributes)

return is, nil
}
66 changes: 66 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_migration_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
package azurerm

import (
"fmt"
"testing"

"github.com/hashicorp/terraform/terraform"
)

// NOTE: this is intentionally an acceptance test (and we're not explicitly setting the env)
// as we want to run this depending on the cloud we're in.
func TestAccAzureRMDataLakeStoreFileMigrateState(t *testing.T) {
config := testGetAzureConfig(t)
if config == nil {
t.SkipNow()
return
}

client, err := getArmClient(config)
if err != nil {
t.Fatal(fmt.Errorf("Error building ARM Client: %+v", err))
return
}

client.StopContext = testAccProvider.StopContext()

filesClient := client.dataLakeStoreFilesClient

cases := map[string]struct {
StateVersion int
ID string
InputAttributes map[string]string
ExpectedAttributes map[string]string
}{
"v0_1_without_value": {
StateVersion: 0,
ID: "some_id",
InputAttributes: map[string]string{
"remote_file_path": "/test/blob.vhd",
"account_name": "example",
},
ExpectedAttributes: map[string]string{
"id": fmt.Sprintf("example.%s/test/blob.vhd", filesClient.AdlsFileSystemDNSSuffix),
},
},
}

for tn, tc := range cases {
is := &terraform.InstanceState{
ID: tc.ID,
Attributes: tc.InputAttributes,
}
is, err := resourceDataLakeStoreFileMigrateState(tc.StateVersion, is, client)

if err != nil {
t.Fatalf("bad: %s, err: %#v", tn, err)
}

for k, v := range tc.ExpectedAttributes {
actual := is.Attributes[k]
if actual != v {
t.Fatalf("Bad Data Lake Store File Migrate for %q: %q\n\n expected: %q", k, actual, v)
}
}
}
}
2 changes: 2 additions & 0 deletions azurerm/resource_arm_data_lake_store_file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) {
},
})
}

func testCheckAzureRMDataLakeStoreFileExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
// Ensure we have enough information in state to look up in API
Expand Down Expand Up @@ -96,6 +97,7 @@ resource "azurerm_data_lake_store" "test" {
name = "unlikely23exst2acct%s"
resource_group_name = "${azurerm_resource_group.test.name}"
location = "%s"
firewall_state = "Disabled"
}

resource "azurerm_data_lake_store_file" "test" {
Expand Down
2 changes: 2 additions & 0 deletions website/docs/r/data_lake_store.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ The following attributes are exported:

* `id` - The Date Lake Store ID.

* `endpoint` - The Endpoint for the Data Lake Store.

## Import

Date Lake Store can be imported using the `resource id`, e.g.
Expand Down
8 changes: 8 additions & 0 deletions website/docs/r/data_lake_store_file.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,11 @@ The following arguments are supported:
* `local_file_path` - (Required) The path to the local file to be added to the Data Lake Store.

* `remote_file_path` - (Required) The path created for the file on the Data Lake Store.

## Import
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved

Date Lake Store File's can be imported using the `resource id`, e.g.

```shell
terraform import azurerm_data_lake_store_file.test example.azuredatalakestore.net/test/example.txt
```