Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

[NSE-481] JVM heap memory leak on memory leak tracker facilities (Arrow Allocator) #489

Merged
merged 2 commits into from
Aug 25, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import scala.collection.JavaConverters._
import com.intel.oap.spark.sql.execution.datasources.v2.arrow._
import org.apache.arrow.dataset.jni.NativeMemoryPool
import org.apache.arrow.memory.BufferAllocator
import org.apache.arrow.memory.RootAllocator

import org.apache.spark.TaskContext
import org.apache.spark.internal.Logging
Expand All @@ -40,15 +41,12 @@ object SparkMemoryUtils extends Logging {
}

val sharedMetrics = new NativeSQLMemoryMetrics()
val defaultAllocator: BufferAllocator = {

val globalAlloc = globalAllocator()
val defaultAllocator: BufferAllocator = {
val al = new SparkManagedAllocationListener(
new NativeSQLMemoryConsumer(getTaskMemoryManager(), Spiller.NO_OP),
sharedMetrics)
val parent = globalAlloc
parent.newChildAllocator("Spark Managed Allocator - " +
UUID.randomUUID().toString, al, 0, parent.getLimit)
new RootAllocator(al, Long.MaxValue)
}

val defaultMemoryPool: NativeMemoryPoolWrapper = {
Expand Down Expand Up @@ -77,7 +75,7 @@ object SparkMemoryUtils extends Logging {
val al = new SparkManagedAllocationListener(
new NativeSQLMemoryConsumer(getTaskMemoryManager(), spiller),
sharedMetrics)
val parent = globalAllocator()
val parent = defaultAllocator
val alloc = parent.newChildAllocator("Spark Managed Allocator - " +
UUID.randomUUID().toString, al, 0, parent.getLimit).asInstanceOf[BufferAllocator]
allocators.add(alloc)
Expand Down Expand Up @@ -121,7 +119,8 @@ object SparkMemoryUtils extends Logging {
}

def release(): Unit = {
for (allocator <- allocators.asScala) {
for (allocator <- allocators.asScala.reverse) {
// reversed iterating: close children first
val allocated = allocator.getAllocatedMemory
if (allocated == 0L) {
close(allocator)
Expand Down Expand Up @@ -212,9 +211,8 @@ object SparkMemoryUtils extends Logging {
}

def contextAllocator(): BufferAllocator = {
val globalAlloc = globalAllocator()
if (!inSparkTask()) {
return globalAlloc
return globalAllocator()
}
getTaskMemoryResources().defaultAllocator
}
Expand Down