Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Requiring Imports (3 of ...) #2537

Merged
merged 25 commits into from
Dec 20, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
9132c86
r/data_lake_analytics_account: requiring import
tombuildsstuff Dec 18, 2018
6107942
r/data_lake_analytics_firewall_rule: requiring import
tombuildsstuff Dec 18, 2018
ba588a7
removing a duplicated create only check
tombuildsstuff Dec 18, 2018
056517b
r/data_lake_store: requiring import
tombuildsstuff Dec 18, 2018
bbce5f3
r/data_lake_store_file: requiring import
tombuildsstuff Dec 18, 2018
d302d5a
r/data_lake_firewall_rule: requiring import
tombuildsstuff Dec 18, 2018
597fcc4
r/databricks_workspace: requiring import
tombuildsstuff Dec 18, 2018
7f818bb
r/dev_test_lab: requiring import
tombuildsstuff Dec 18, 2018
63ef4ad
r/dev_test_linux_virtual_machine: requiring import
tombuildsstuff Dec 18, 2018
e86ee72
r/dev_test_policy: requiring import
tombuildsstuff Dec 18, 2018
4496b41
r/dev_test_virtual_network: requiring import
tombuildsstuff Dec 18, 2018
d6fef80
r/dev_test_windows_virtual_machine: requiring import
tombuildsstuff Dec 18, 2018
be060d5
r/devspace_controller: requiring import
tombuildsstuff Dec 18, 2018
b1a76e8
r/dns_a_record: requiring import
tombuildsstuff Dec 18, 2018
6e8f134
dns: refactoring
tombuildsstuff Dec 18, 2018
a6ecaf4
r/dns_aaaa_record: requiring import
tombuildsstuff Dec 18, 2018
a2758c4
r/dns_caa_record: requiring import
tombuildsstuff Dec 18, 2018
f73abac
r/dns_cname_record: requiring import
tombuildsstuff Dec 18, 2018
1d18a5d
r/dns_mx_record: requiring import
tombuildsstuff Dec 18, 2018
1883a9b
r/dns_ns_record: requiring import
tombuildsstuff Dec 18, 2018
b1e05e1
r/dns_ptr_record: requiring import
tombuildsstuff Dec 18, 2018
1710e73
r/dns_srv_record: requiring import
tombuildsstuff Dec 18, 2018
7b5288a
r/dns_txt_record: requiring import
tombuildsstuff Dec 18, 2018
a428a95
r/dns_zone: requiring import
tombuildsstuff Dec 18, 2018
5c41300
r/eventgrid_topic: requiring import
tombuildsstuff Dec 18, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion azurerm/resource_arm_data_lake_analytics_account.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"log"

"github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"

"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response"
Expand Down Expand Up @@ -73,8 +74,22 @@ func resourceArmDateLakeAnalyticsAccountCreate(d *schema.ResourceData, meta inte
ctx := meta.(*ArmClient).StopContext

name := d.Get("name").(string)
location := azureRMNormalizeLocation(d.Get("location").(string))
resourceGroup := d.Get("resource_group_name").(string)

if requireResourcesToBeImported {
existing, err := client.Get(ctx, resourceGroup, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Lake Analytics Account %q (Resource Group %q): %s", name, resourceGroup, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_lake_analytics_account", *existing.ID)
}
}

location := azureRMNormalizeLocation(d.Get("location").(string))
storeAccountName := d.Get("default_store_account_name").(string)
tier := d.Get("tier").(string)
tags := d.Get("tags").(map[string]interface{})
Expand Down
55 changes: 51 additions & 4 deletions azurerm/resource_arm_data_lake_analytics_account_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,35 @@ func TestAccAzureRMDataLakeAnalyticsAccount_basic(t *testing.T) {
})
}

func TestAccAzureRMDataLakeAnalyticsAccount_requiresImport(t *testing.T) {
if !requireResourcesToBeImported {
t.Skip("Skipping since resources aren't required to be imported")
return
}

resourceName := "azurerm_data_lake_analytics_account.test"
ri := acctest.RandInt()
location := testLocation()

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataLakeAnalyticsAccountDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataLakeAnalyticsAccount_basic(ri, location),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataLakeAnalyticsAccountExists(resourceName),
),
},
{
Config: testAccAzureRMDataLakeAnalyticsAccount_requiresImport(ri, location),
ExpectError: testRequiresImportError("azurerm_data_lake_analytics_account"),
},
},
})
}

func TestAccAzureRMDataLakeAnalyticsAccount_tier(t *testing.T) {
resourceName := "azurerm_data_lake_analytics_account.test"
ri := acctest.RandInt()
Expand Down Expand Up @@ -151,6 +180,7 @@ func testCheckAzureRMDataLakeAnalyticsAccountDestroy(s *terraform.State) error {
}

func testAccAzureRMDataLakeAnalyticsAccount_basic(rInt int, location string) string {
template := testAccAzureRMDataLakeStore_basic(rInt, location)
return fmt.Sprintf(`
%s

Expand All @@ -161,10 +191,25 @@ resource "azurerm_data_lake_analytics_account" "test" {

default_store_account_name = "${azurerm_data_lake_store.test.name}"
}
`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15])
`, template, strconv.Itoa(rInt)[0:15])
}

func testAccAzureRMDataLakeAnalyticsAccount_requiresImport(rInt int, location string) string {
template := testAccAzureRMDataLakeAnalyticsAccount_basic(rInt, location)
return fmt.Sprintf(`
%s

resource "azurerm_data_lake_analytics_account" "import" {
name = "${azurerm_data_lake_analytics_account.test.name}"
resource_group_name = "${azurerm_data_lake_analytics_account.test.resource_group_name}"
location = "${azurerm_data_lake_analytics_account.test.location}"
default_store_account_name = "${azurerm_data_lake_analytics_account.test.default_store_account_name}"
}
`, template)
}

func testAccAzureRMDataLakeAnalyticsAccount_tier(rInt int, location string) string {
template := testAccAzureRMDataLakeStore_basic(rInt, location)
return fmt.Sprintf(`
%s

Expand All @@ -177,10 +222,11 @@ resource "azurerm_data_lake_analytics_account" "test" {

default_store_account_name = "${azurerm_data_lake_store.test.name}"
}
`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15])
`, template, strconv.Itoa(rInt)[0:15])
}

func testAccAzureRMDataLakeAnalyticsAccount_withTags(rInt int, location string) string {
template := testAccAzureRMDataLakeStore_basic(rInt, location)
return fmt.Sprintf(`
%s

Expand All @@ -196,10 +242,11 @@ resource "azurerm_data_lake_analytics_account" "test" {
cost_center = "MSFT"
}
}
`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15])
`, template, strconv.Itoa(rInt)[0:15])
}

func testAccAzureRMDataLakeAnalyticsAccount_withTagsUpdate(rInt int, location string) string {
template := testAccAzureRMDataLakeStore_basic(rInt, location)
return fmt.Sprintf(`
%s

Expand All @@ -214,5 +261,5 @@ resource "azurerm_data_lake_analytics_account" "test" {
environment = "staging"
}
}
`, testAccAzureRMDataLakeStore_basic(rInt, location), strconv.Itoa(rInt)[0:15])
`, template, strconv.Itoa(rInt)[0:15])
}
19 changes: 17 additions & 2 deletions azurerm/resource_arm_data_lake_analytics_firewall_rule.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"log"

"github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/account"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"

"github.com/hashicorp/terraform/helper/schema"

Expand Down Expand Up @@ -64,10 +65,24 @@ func resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate(d *schema.ResourceData
name := d.Get("name").(string)
accountName := d.Get("account_name").(string)
resourceGroup := d.Get("resource_group_name").(string)

if requireResourcesToBeImported && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroup, accountName, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q): %s", name, accountName, resourceGroup, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_lake_analytics_firewall_rule", *existing.ID)
}
}

startIPAddress := d.Get("start_ip_address").(string)
endIPAddress := d.Get("end_ip_address").(string)

log.Printf("[INFO] preparing arguments for Date Lake Analytics Firewall Rule creation %q (Resource Group %q)", name, resourceGroup)
log.Printf("[INFO] preparing arguments for Date Lake Analytics Firewall Rule creation %q (Account %q / Resource Group %q)", name, accountName, resourceGroup)

dateLakeStore := account.CreateOrUpdateFirewallRuleParameters{
CreateOrUpdateFirewallRuleProperties: &account.CreateOrUpdateFirewallRuleProperties{
Expand All @@ -85,7 +100,7 @@ func resourceArmDateLakeAnalyticsFirewallRuleCreateUpdate(d *schema.ResourceData
return fmt.Errorf("Error retrieving Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q): %+v", name, accountName, resourceGroup, err)
}
if read.ID == nil {
return fmt.Errorf("Cannot read Data Lake Analytics %q (Account %q / Resource Group %q) ID", name, accountName, resourceGroup)
return fmt.Errorf("Cannot read Data Lake Analytics Firewall Rule %q (Account %q / Resource Group %q) ID", name, accountName, resourceGroup)
}

d.SetId(*read.ID)
Expand Down
48 changes: 48 additions & 0 deletions azurerm/resource_arm_data_lake_analytics_firewall_rule_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,39 @@ func TestAccAzureRMDataLakeAnalyticsFirewallRule_basic(t *testing.T) {
})
}

func TestAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(t *testing.T) {
if !requireResourcesToBeImported {
t.Skip("Skipping since resources aren't required to be imported")
return
}

resourceName := "azurerm_data_lake_analytics_firewall_rule.test"
ri := acctest.RandInt()
location := testLocation()
startIP := "1.1.1.1"
endIP := "2.2.2.2"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataLakeAnalyticsFirewallRuleDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataLakeAnalyticsFirewallRule_basic(ri, location, startIP, endIP),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataLakeAnalyticsFirewallRuleExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "start_ip_address", startIP),
resource.TestCheckResourceAttr(resourceName, "end_ip_address", endIP),
),
},
{
Config: testAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(ri, location, startIP, endIP),
ExpectError: testRequiresImportError("azurerm_data_lake_analytics_firewall_rule"),
},
},
})
}

func TestAccAzureRMDataLakeAnalyticsFirewallRule_update(t *testing.T) {
resourceName := "azurerm_data_lake_analytics_firewall_rule.test"
ri := acctest.RandInt()
Expand Down Expand Up @@ -185,3 +218,18 @@ resource "azurerm_data_lake_analytics_firewall_rule" "test" {
}
`, rInt, location, strconv.Itoa(rInt)[0:10], startIP, endIP)
}

func testAccAzureRMDataLakeAnalyticsFirewallRule_requiresImport(rInt int, location, startIP, endIP string) string {
template := testAccAzureRMDataLakeAnalyticsFirewallRule_basic(rInt, location, startIP, endIP)
return fmt.Sprintf(`
%s

resource "azurerm_data_lake_analytics_firewall_rule" "import" {
name = "${azurerm_data_lake_analytics_firewall_rule.test.name}"
account_name = "${azurerm_data_lake_analytics_firewall_rule.test.account_name}"
resource_group_name = "${azurerm_data_lake_analytics_firewall_rule.test.resource_group_name}"
start_ip_address = "${azurerm_data_lake_analytics_firewall_rule.test.start_ip_address}"
end_ip_address = "${azurerm_data_lake_analytics_firewall_rule.test.end_ip_address}"
}
`, template)
}
17 changes: 16 additions & 1 deletion azurerm/resource_arm_data_lake_store.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

Expand Down Expand Up @@ -112,8 +113,22 @@ func resourceArmDateLakeStoreCreate(d *schema.ResourceData, meta interface{}) er
ctx := meta.(*ArmClient).StopContext

name := d.Get("name").(string)
location := azureRMNormalizeLocation(d.Get("location").(string))
resourceGroup := d.Get("resource_group_name").(string)

if requireResourcesToBeImported && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroup, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Lake Store %q (Resource Group %q): %s", name, resourceGroup, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_lake_store", *existing.ID)
}
}

location := azureRMNormalizeLocation(d.Get("location").(string))
tier := d.Get("tier").(string)

encryptionState := account.EncryptionState(d.Get("encryption_state").(string))
Expand Down
19 changes: 17 additions & 2 deletions azurerm/resource_arm_data_lake_store_file.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/Azure/azure-sdk-for-go/services/datalake/store/2016-11-01/filesystem"
"github.com/hashicorp/terraform/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

Expand Down Expand Up @@ -59,6 +60,22 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}
remoteFilePath := d.Get("remote_file_path").(string)
localFilePath := d.Get("local_file_path").(string)

// example.azuredatalakestore.net/test/example.txt
id := fmt.Sprintf("%s.%s%s", accountName, client.AdlsFileSystemDNSSuffix, remoteFilePath)

if requireResourcesToBeImported {
existing, err := client.GetFileStatus(ctx, accountName, remoteFilePath, utils.Bool(true))
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Lake Store File %q (Account %q): %s", remoteFilePath, accountName, err)
}
}

if existing.FileStatus != nil && existing.FileStatus.ModificationTime != nil {
return tf.ImportAsExistsError("azurerm_data_lake_store_file", id)
}
}

file, err := os.Open(localFilePath)
if err != nil {
return fmt.Errorf("error opening file %q: %+v", localFilePath, err)
Expand All @@ -76,8 +93,6 @@ func resourceArmDataLakeStoreFileCreate(d *schema.ResourceData, meta interface{}
return fmt.Errorf("Error issuing create request for Data Lake Store File %q : %+v", remoteFilePath, err)
}

// example.azuredatalakestore.net/test/example.txt
id := fmt.Sprintf("%s.%s%s", accountName, client.AdlsFileSystemDNSSuffix, remoteFilePath)
d.SetId(id)
return resourceArmDataLakeStoreFileRead(d, meta)
}
Expand Down
48 changes: 46 additions & 2 deletions azurerm/resource_arm_data_lake_store_file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,37 @@ func TestAccAzureRMDataLakeStoreFile_basic(t *testing.T) {
})
}

func TestAccAzureRMDataLakeStoreFile_requiresimport(t *testing.T) {
if !requireResourcesToBeImported {
t.Skip("Skipping since resources aren't required to be imported")
return
}

resourceName := "azurerm_data_lake_store_file.test"

ri := acctest.RandInt()
rs := acctest.RandString(4)
location := testLocation()

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataLakeStoreFileDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMDataLakeStoreFile_basic(ri, rs, location),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataLakeStoreFileExists(resourceName),
),
},
{
Config: testAccAzureRMDataLakeStoreFile_requiresImport(ri, rs, location),
ExpectError: testRequiresImportError("azurerm_data_lake_store_file"),
},
},
})
}

func testCheckAzureRMDataLakeStoreFileExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
// Ensure we have enough information in state to look up in API
Expand Down Expand Up @@ -92,7 +123,7 @@ func testCheckAzureRMDataLakeStoreFileDestroy(s *terraform.State) error {
return nil
}

func testAccAzureRMDataLakeStoreFile_basic(rInt int, rs, location string) string {
func testAccAzureRMDataLakeStoreFile_basic(rInt int, rString, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestRG-%d"
Expand All @@ -111,5 +142,18 @@ resource "azurerm_data_lake_store_file" "test" {
account_name = "${azurerm_data_lake_store.test.name}"
local_file_path = "./testdata/application_gateway_test.cer"
}
`, rInt, location, rs, location)
`, rInt, location, rString, location)
}

func testAccAzureRMDataLakeStoreFile_requiresImport(rInt int, rString, location string) string {
template := testAccAzureRMDataLakeStoreFile_basic(rInt, rString, location)
return fmt.Sprintf(`
%s

resource "azurerm_data_lake_store_file" "import" {
remote_file_path = "${azurerm_data_lake_store_file.test.remote_file_path}"
account_name = "${azurerm_data_lake_store_file.test.name}"
local_file_path = "./testdata/application_gateway_test.cer"
}
`, template)
}
Loading