Skip to content

Commit

Permalink
[SPARK-20465][CORE] Throws a proper exception when any temp directory…
Browse files Browse the repository at this point in the history
… could not be got

## What changes were proposed in this pull request?

This PR proposes to throw an exception with better message rather than `ArrayIndexOutOfBoundsException` when temp directories could not be created.

Running the commands below:

```bash
./bin/spark-shell --conf spark.local.dir=/NONEXISTENT_DIR_ONE,/NONEXISTENT_DIR_TWO
```

produces ...

**Before**

```
Exception in thread "main" java.lang.ExceptionInInitializerError
        ...
Caused by: java.lang.ArrayIndexOutOfBoundsException: 0
        ...
```

**After**

```
Exception in thread "main" java.lang.ExceptionInInitializerError
        ...
Caused by: java.io.IOException: Failed to get a temp directory under [/NONEXISTENT_DIR_ONE,/NONEXISTENT_DIR_TWO].
        ...
```

## How was this patch tested?

Unit tests in `LocalDirsSuite.scala`.

Author: hyukjinkwon <[email protected]>

Closes #17768 from HyukjinKwon/throws-temp-dir-exception.
  • Loading branch information
HyukjinKwon authored and srowen committed Apr 28, 2017
1 parent 59e3a56 commit 8c911ad
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 4 deletions.
6 changes: 5 additions & 1 deletion core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -740,7 +740,11 @@ private[spark] object Utils extends Logging {
* always return a single directory.
*/
def getLocalDir(conf: SparkConf): String = {
getOrCreateLocalRootDirs(conf)(0)
getOrCreateLocalRootDirs(conf).headOption.getOrElse {
val configuredLocalDirs = getConfiguredLocalDirs(conf)
throw new IOException(
s"Failed to get a temp directory under [${configuredLocalDirs.mkString(",")}].")
}
}

private[spark] def isRunningInYarnContainer(conf: SparkConf): Boolean = {
Expand Down
23 changes: 20 additions & 3 deletions core/src/test/scala/org/apache/spark/storage/LocalDirsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.storage

import java.io.File
import java.io.{File, IOException}

import org.scalatest.BeforeAndAfter

Expand All @@ -33,22 +33,39 @@ class LocalDirsSuite extends SparkFunSuite with BeforeAndAfter {
Utils.clearLocalRootDirs()
}

after {
Utils.clearLocalRootDirs()
}

test("Utils.getLocalDir() returns a valid directory, even if some local dirs are missing") {
// Regression test for SPARK-2974
assert(!new File("/NONEXISTENT_DIR").exists())
assert(!new File("/NONEXISTENT_PATH").exists())
val conf = new SparkConf(false)
.set("spark.local.dir", s"/NONEXISTENT_PATH,${System.getProperty("java.io.tmpdir")}")
assert(new File(Utils.getLocalDir(conf)).exists())
}

test("SPARK_LOCAL_DIRS override also affects driver") {
// Regression test for SPARK-2975
assert(!new File("/NONEXISTENT_DIR").exists())
assert(!new File("/NONEXISTENT_PATH").exists())
// spark.local.dir only contains invalid directories, but that's not a problem since
// SPARK_LOCAL_DIRS will override it on both the driver and workers:
val conf = new SparkConfWithEnv(Map("SPARK_LOCAL_DIRS" -> System.getProperty("java.io.tmpdir")))
.set("spark.local.dir", "/NONEXISTENT_PATH")
assert(new File(Utils.getLocalDir(conf)).exists())
}

test("Utils.getLocalDir() throws an exception if any temporary directory cannot be retrieved") {
val path1 = "/NONEXISTENT_PATH_ONE"
val path2 = "/NONEXISTENT_PATH_TWO"
assert(!new File(path1).exists())
assert(!new File(path2).exists())
val conf = new SparkConf(false).set("spark.local.dir", s"$path1,$path2")
val message = intercept[IOException] {
Utils.getLocalDir(conf)
}.getMessage
// If any temporary directory could not be retrieved under the given paths above, it should
// throw an exception with the message that includes the paths.
assert(message.contains(s"$path1,$path2"))
}
}

0 comments on commit 8c911ad

Please sign in to comment.