Skip to content

Commit

Permalink
Ensure oneOf condition is honored when expanding the job configs (#12378
Browse files Browse the repository at this point in the history
) (#20453)

[upstream:4d9d705df98f4e4ccc701178ff5e23ac5d8df011]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Nov 22, 2024
1 parent 7cec0be commit 4aa048f
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 25 deletions.
3 changes: 3 additions & 0 deletions .changelog/12378.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:bug
dataproc: ensured oneOf condition is honored when expanding the job configuration for Hive, Pig, Spark-sql, and Presto in `google_dataproc_job`.
```
58 changes: 41 additions & 17 deletions google/services/dataproc/resource_dataproc_job.go
Original file line number Diff line number Diff line change
Expand Up @@ -931,11 +931,17 @@ func flattenHiveJob(job *dataproc.HiveJob) []map[string]interface{} {
func expandHiveJob(config map[string]interface{}) *dataproc.HiveJob {
job := &dataproc.HiveJob{}
if v, ok := config["query_file_uri"]; ok {
job.QueryFileUri = v.(string)
queryFileUri := v.(string)
if len(queryFileUri) != 0 {
job.QueryFileUri = v.(string)
}
}
if v, ok := config["query_list"]; ok {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
queryList := v.([]interface{})
if len(queryList) != 0 {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(queryList),
}
}
}
if v, ok := config["continue_on_failure"]; ok {
Expand Down Expand Up @@ -1039,11 +1045,17 @@ func flattenPigJob(job *dataproc.PigJob) []map[string]interface{} {
func expandPigJob(config map[string]interface{}) *dataproc.PigJob {
job := &dataproc.PigJob{}
if v, ok := config["query_file_uri"]; ok {
job.QueryFileUri = v.(string)
queryFileUri := v.(string)
if len(queryFileUri) != 0 {
job.QueryFileUri = v.(string)
}
}
if v, ok := config["query_list"]; ok {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
queryList := v.([]interface{})
if len(queryList) != 0 {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(queryList),
}
}
}
if v, ok := config["continue_on_failure"]; ok {
Expand Down Expand Up @@ -1140,11 +1152,17 @@ func flattenSparkSqlJob(job *dataproc.SparkSqlJob) []map[string]interface{} {
func expandSparkSqlJob(config map[string]interface{}) *dataproc.SparkSqlJob {
job := &dataproc.SparkSqlJob{}
if v, ok := config["query_file_uri"]; ok {
job.QueryFileUri = v.(string)
queryFileUri := v.(string)
if len(queryFileUri) != 0 {
job.QueryFileUri = v.(string)
}
}
if v, ok := config["query_list"]; ok {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
queryList := v.([]interface{})
if len(queryList) != 0 {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(queryList),
}
}
}
if v, ok := config["script_variables"]; ok {
Expand Down Expand Up @@ -1241,20 +1259,26 @@ func flattenPrestoJob(job *dataproc.PrestoJob) []map[string]interface{} {

func expandPrestoJob(config map[string]interface{}) *dataproc.PrestoJob {
job := &dataproc.PrestoJob{}
if v, ok := config["query_file_uri"]; ok {
queryFileUri := v.(string)
if len(queryFileUri) != 0 {
job.QueryFileUri = v.(string)
}
}
if v, ok := config["query_list"]; ok {
queryList := v.([]interface{})
if len(queryList) != 0 {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(queryList),
}
}
}
if v, ok := config["client_tags"]; ok {
job.ClientTags = tpgresource.ConvertStringArr(v.([]interface{}))
}
if v, ok := config["continue_on_failure"]; ok {
job.ContinueOnFailure = v.(bool)
}
if v, ok := config["query_file_uri"]; ok {
job.QueryFileUri = v.(string)
}
if v, ok := config["query_list"]; ok {
job.QueryList = &dataproc.QueryList{
Queries: tpgresource.ConvertStringArr(v.([]interface{})),
}
}
if v, ok := config["properties"]; ok {
job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{}))
}
Expand Down
43 changes: 35 additions & 8 deletions google/services/dataproc/resource_dataproc_job_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -265,22 +265,17 @@ func TestAccDataprocJob_Pig(t *testing.T) {
})
}

func TestAccDataprocJob_SparkSql(t *testing.T) {
func testAccDataprocJobSparkSql(t *testing.T, config string) {
t.Parallel()

var job dataproc.Job
rnd := acctest.RandString(t, 10)
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckDataprocJobDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataprocJob_sparksql(rnd, subnetworkName),
Config: config,
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocJobExists(t, "google_dataproc_job.sparksql", &job),

Expand All @@ -301,6 +296,20 @@ func TestAccDataprocJob_SparkSql(t *testing.T) {
})
}

func TestAccDataprocJob_SparkSql_QueryList(t *testing.T) {
rnd := acctest.RandString(t, 10)
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName))
}

func TestAccDataprocJob_SparkSql_QueryFile(t *testing.T) {
rnd := acctest.RandString(t, 10)
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
testAccDataprocJobSparkSql(t, testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName))
}

func TestAccDataprocJob_Presto(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -833,7 +842,7 @@ resource "google_dataproc_job" "pig" {

}

func testAccDataprocJob_sparksql(rnd, subnetworkName string) string {
func testAccDataprocJob_SparkSql_QueryList(rnd, subnetworkName string) string {
return fmt.Sprintf(
singleNodeClusterConfig+`
resource "google_dataproc_job" "sparksql" {
Expand All @@ -855,6 +864,24 @@ resource "google_dataproc_job" "sparksql" {

}

func testAccDataprocJob_SparkSql_QueryFile(rnd, subnetworkName string) string {
return fmt.Sprintf(
singleNodeClusterConfig+`
resource "google_dataproc_job" "sparksql" {
region = google_dataproc_cluster.basic.region
force_delete = true
placement {
cluster_name = google_dataproc_cluster.basic.name
}
sparksql_config {
query_file_uri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/spark-sql/natality/cigarette_correlations.sql"
}
}
`, rnd, subnetworkName)

}

func testAccDataprocJob_presto(rnd, subnetworkName string) string {
return fmt.Sprintf(`
resource "google_dataproc_cluster" "basic" {
Expand Down

0 comments on commit 4aa048f

Please sign in to comment.