diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 94bd5d9178929..5d10a1f84493c 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -497,23 +497,9 @@ class ExternalAppendOnlyMap[K, V, C]( private def cleanup() { batchIndex = batchOffsets.length // Prevent reading any other batch val ds = deserializeStream - val fs = fileStream deserializeStream = null fileStream = null - - if (ds != null) { - try { - ds.close() - } catch { - case e: IOException => - // Make sure we at least close the file handle - if (fs != null) { - try { fs.close() } catch { case e2: IOException => } - } - throw e - } - } - + ds.close() file.delete() } } diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala index c755098f27948..b04c50bd3e196 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala @@ -618,23 +618,9 @@ private[spark] class ExternalSorter[K, V, C]( def cleanup() { batchId = batchOffsets.length // Prevent reading any other batch val ds = deserializeStream - val fs = fileStream deserializeStream = null fileStream = null - - if (ds != null) { - try { - ds.close() - } catch { - case e: IOException => - // Make sure we at least close the file handle - if (fs != null) { - try { fs.close() } catch { case e2: IOException => } - } - throw e - } - } - + ds.close() // NOTE: We don't do file.delete() here because that is done in ExternalSorter.stop(). // This should also be fixed in ExternalAppendOnlyMap. }