diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 7beeae0334ff..5f3e2b3da736 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -27,9 +27,9 @@
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.PrivateCellUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
@@ -58,7 +58,8 @@
  */
 @InterfaceAudience.Public
 public class FuzzyRowFilter extends FilterBase {
-  private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+
+  private static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
 
   // the wildcard byte is 1 on the user side. but the filter converts it internally
   // in preprocessMask. This was changed in HBASE-15676 due to a bug with using 0.
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 2ce213131b8f..0ed868628e38 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -60,6 +60,10 @@
       <groupId>org.apache.hbase.thirdparty</groupId>
       <artifactId>hbase-shaded-netty</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.thirdparty</groupId>
+      <artifactId>hbase-unsafe</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffAllocator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffAllocator.java
index 1ef206438558..711153b77627 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffAllocator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBuffAllocator.java
@@ -29,10 +29,10 @@
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.nio.SingleByteBuff;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.apache.hadoop.hbase.util.UnsafeAccess;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import sun.nio.ch.DirectBuffer;
 
 import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
 
@@ -362,11 +362,8 @@ public ByteBuff allocate(int size) {
   public void clean() {
     while (!buffers.isEmpty()) {
       ByteBuffer b = buffers.poll();
-      if (b instanceof DirectBuffer) {
-        DirectBuffer db = (DirectBuffer) b;
-        if (db.cleaner() != null) {
-          db.cleaner().clean();
-        }
+      if (b.isDirect()) {
+        UnsafeAccess.freeDirectBuffer(b);
       }
     }
     this.usedBufCount.set(0);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
index 5945e4c3c819..320696ada703 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
@@ -24,12 +24,11 @@
 import java.nio.channels.FileChannel;
 import java.nio.channels.ReadableByteChannel;
 import org.apache.hadoop.hbase.io.ByteBuffAllocator.Recycler;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.ObjectIntPair;
 import org.apache.hadoop.hbase.util.UnsafeAccess;
-import org.apache.hadoop.hbase.util.UnsafeAvailChecker;
 import org.apache.yetus.audience.InterfaceAudience;
-import sun.nio.ch.DirectBuffer;
 
 /**
  * An implementation of ByteBuff where a single BB backs the BBI. This just acts as a wrapper over a
@@ -38,8 +37,8 @@
 @InterfaceAudience.Private
 public class SingleByteBuff extends ByteBuff {
 
-  private static final boolean UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
-  private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+  private static final boolean UNSAFE_AVAIL = HBasePlatformDependent.isUnsafeAvailable();
+  private static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
 
   // Underlying BB
   private final ByteBuffer buf;
@@ -63,7 +62,7 @@ public SingleByteBuff(Recycler recycler, ByteBuffer buf) {
       this.unsafeOffset = UnsafeAccess.BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset();
       this.unsafeRef = buf.array();
     } else {
-      this.unsafeOffset = ((DirectBuffer) buf).address();
+      this.unsafeOffset = UnsafeAccess.directBufferAddress(buf);
     }
   }
 
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
index 95716d661a61..82f699ab7b52 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
@@ -31,17 +31,16 @@
 import org.apache.hadoop.hbase.io.ByteBufferWriter;
 import org.apache.hadoop.hbase.io.util.StreamUtils;
 import org.apache.hadoop.hbase.nio.ByteBuff;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.yetus.audience.InterfaceAudience;
-import sun.nio.ch.DirectBuffer;
 
 /**
  * Utility functions for working with byte buffers, such as reading/writing variable-length long
  * numbers.
  * @deprecated This class will become IA.Private in HBase 3.0. Downstream folks shouldn't use it.
  */
-@SuppressWarnings("restriction")
 @Deprecated
 @InterfaceAudience.Public
 public final class ByteBufferUtils {
@@ -50,8 +49,8 @@ public final class ByteBufferUtils {
   public final static int NEXT_BIT_SHIFT = 7;
   public final static int NEXT_BIT_MASK = 1 << 7;
   @InterfaceAudience.Private
-  final static boolean UNSAFE_AVAIL = UnsafeAvailChecker.isAvailable();
-  public final static boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+  final static boolean UNSAFE_AVAIL = HBasePlatformDependent.isUnsafeAvailable();
+  public final static boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
 
   private ByteBufferUtils() {
   }
@@ -91,11 +90,10 @@ static class ComparerHolder {
 
     static Comparer getBestComparer() {
       try {
-        Class<?> theClass = Class.forName(UNSAFE_COMPARER_NAME);
+        Class<? extends Comparer> theClass =
+          Class.forName(UNSAFE_COMPARER_NAME).asSubclass(Comparer.class);
 
-        @SuppressWarnings("unchecked")
-        Comparer comparer = (Comparer) theClass.getConstructor().newInstance();
-        return comparer;
+        return theClass.getConstructor().newInstance();
       } catch (Throwable t) { // ensure we really catch *everything*
         return PureJavaComparer.INSTANCE;
       }
@@ -152,7 +150,7 @@ public int compareTo(byte[] buf1, int o1, int l1, ByteBuffer buf2, int o2, int l
         long offset2Adj;
         Object refObj2 = null;
         if (buf2.isDirect()) {
-          offset2Adj = o2 + ((DirectBuffer) buf2).address();
+          offset2Adj = o2 + UnsafeAccess.directBufferAddress(buf2);
         } else {
           offset2Adj = o2 + buf2.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
           refObj2 = buf2.array();
@@ -166,13 +164,13 @@ public int compareTo(ByteBuffer buf1, int o1, int l1, ByteBuffer buf2, int o2, i
         long offset1Adj, offset2Adj;
         Object refObj1 = null, refObj2 = null;
         if (buf1.isDirect()) {
-          offset1Adj = o1 + ((DirectBuffer) buf1).address();
+          offset1Adj = o1 + UnsafeAccess.directBufferAddress(buf1);
         } else {
           offset1Adj = o1 + buf1.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
           refObj1 = buf1.array();
         }
         if (buf2.isDirect()) {
-          offset2Adj = o2 + ((DirectBuffer) buf2).address();
+          offset2Adj = o2 + UnsafeAccess.directBufferAddress(buf2);
         } else {
           offset2Adj = o2 + buf2.arrayOffset() + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET;
           refObj2 = buf2.array();
@@ -189,12 +187,11 @@ static class ConverterHolder {
 
     static Converter getBestConverter() {
       try {
-        Class<?> theClass = Class.forName(UNSAFE_CONVERTER_NAME);
+        Class<? extends Converter> theClass =
+          Class.forName(UNSAFE_CONVERTER_NAME).asSubclass(Converter.class);
 
         // yes, UnsafeComparer does implement Comparer<byte[]>
-        @SuppressWarnings("unchecked")
-        Converter converter = (Converter) theClass.getConstructor().newInstance();
-        return converter;
+        return theClass.getConstructor().newInstance();
       } catch (Throwable t) { // ensure we really catch *everything*
         return PureJavaConverter.INSTANCE;
       }
@@ -932,8 +929,8 @@ static int compareToUnsafe(Object obj1, long o1, int l1, Object obj2, long o2, i
      * 64-bit.
      */
     for (i = 0; i < strideLimit; i += stride) {
-      long lw = UnsafeAccess.theUnsafe.getLong(obj1, o1 + (long) i);
-      long rw = UnsafeAccess.theUnsafe.getLong(obj2, o2 + (long) i);
+      long lw = HBasePlatformDependent.getLong(obj1, o1 + (long) i);
+      long rw = HBasePlatformDependent.getLong(obj2, o2 + (long) i);
       if (lw != rw) {
         if (!UnsafeAccess.LITTLE_ENDIAN) {
           return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;
@@ -953,8 +950,8 @@ static int compareToUnsafe(Object obj1, long o1, int l1, Object obj2, long o2, i
 
     // The epilogue to cover the last (minLength % stride) elements.
     for (; i < minLength; i++) {
-      int il = (UnsafeAccess.theUnsafe.getByte(obj1, o1 + i) & 0xFF);
-      int ir = (UnsafeAccess.theUnsafe.getByte(obj2, o2 + i) & 0xFF);
+      int il = (HBasePlatformDependent.getByte(obj1, o1 + i) & 0xFF);
+      int ir = (HBasePlatformDependent.getByte(obj2, o2 + i) & 0xFF);
       if (il != ir) {
         return il - ir;
       }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index ef35f729d325..d405bb57b919 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -42,13 +42,13 @@
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import sun.misc.Unsafe;
 
 import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
 
@@ -56,7 +56,6 @@
  * Utility class that handles byte arrays, conversions to/from other types, comparisons, hash code
  * generation, manufacturing keys for HashMaps or HashSets, and can be used as key in maps or trees.
  */
-@SuppressWarnings("restriction")
 @InterfaceAudience.Public
 @edu.umd.cs.findbugs.annotations.SuppressWarnings(
     value = "EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
@@ -127,7 +126,7 @@ public class Bytes implements Comparable<Bytes> {
   public static final int ESTIMATED_HEAP_TAX = 16;
 
   @InterfaceAudience.Private
-  static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
+  static final boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();
 
   /**
    * Returns length of the byte array, returning 0 if the array is null. Useful for calculating
@@ -1428,22 +1427,18 @@ int putShort(byte[] bytes, int offset, short val) {
 
     protected static final class UnsafeConverter extends Converter {
 
-      static final Unsafe theUnsafe;
-
       public UnsafeConverter() {
       }
 
       static {
-        if (UNSAFE_UNALIGNED) {
-          theUnsafe = UnsafeAccess.theUnsafe;
-        } else {
+        if (!UNSAFE_UNALIGNED) {
           // It doesn't matter what we throw;
           // it's swallowed in getBestComparer().
           throw new Error();
         }
 
         // sanity check - this should never fail
-        if (theUnsafe.arrayIndexScale(byte[].class) != 1) {
+        if (HBasePlatformDependent.arrayIndexScale(byte[].class) != 1) {
           throw new AssertionError();
         }
       }
@@ -1482,7 +1477,7 @@ int putShort(byte[] bytes, int offset, short val) {
 
   /**
    * Provides a lexicographical comparer implementation; either a Java implementation or a faster
-   * implementation based on {@link Unsafe}.
+   * implementation based on {@code Unsafe}.
    * <p>
    * Uses reflection to gracefully fall back to the Java implementation if {@code Unsafe} isn't
    * available.
@@ -1539,18 +1534,15 @@ public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, i
     enum UnsafeComparer implements Comparer<byte[]> {
       INSTANCE;
 
-      static final Unsafe theUnsafe;
       static {
-        if (UNSAFE_UNALIGNED) {
-          theUnsafe = UnsafeAccess.theUnsafe;
-        } else {
+        if (!UNSAFE_UNALIGNED) {
           // It doesn't matter what we throw;
           // it's swallowed in getBestComparer().
           throw new Error();
         }
 
         // sanity check - this should never fail
-        if (theUnsafe.arrayIndexScale(byte[].class) != 1) {
+        if (HBasePlatformDependent.arrayIndexScale(byte[].class) != 1) {
           throw new AssertionError();
         }
       }
@@ -1585,8 +1577,8 @@ public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, i
          * than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit.
          */
         for (i = 0; i < strideLimit; i += stride) {
-          long lw = theUnsafe.getLong(buffer1, offset1Adj + i);
-          long rw = theUnsafe.getLong(buffer2, offset2Adj + i);
+          long lw = HBasePlatformDependent.getLong(buffer1, offset1Adj + i);
+          long rw = HBasePlatformDependent.getLong(buffer2, offset2Adj + i);
           if (lw != rw) {
             if (!UnsafeAccess.LITTLE_ENDIAN) {
               return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 7371b22d95d2..6ac3edbc2d77 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -21,6 +21,7 @@
 import java.lang.reflect.Modifier;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentSkipListMap;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -178,13 +179,19 @@ private static final class HeaderSize {
       private byte a;
     }
 
+    private static final int ARRAY_OBJECT_INDEX_SCALE =
+      HBasePlatformDependent.arrayIndexScale(Object[].class);
+
+    private static final int ARRAY_BYTE_INDEX_SCALE =
+      HBasePlatformDependent.arrayIndexScale(byte[].class);
+
     public UnsafeLayout() {
     }
 
     @Override
     int headerSize() {
       try {
-        return (int) UnsafeAccess.theUnsafe
+        return (int) HBasePlatformDependent
           .objectFieldOffset(HeaderSize.class.getDeclaredField("a"));
       } catch (NoSuchFieldException | SecurityException e) {
         LOG.error(e.toString(), e);
@@ -194,21 +201,19 @@ int headerSize() {
 
     @Override
     int arrayHeaderSize() {
-      return UnsafeAccess.theUnsafe.arrayBaseOffset(byte[].class);
+      return HBasePlatformDependent.arrayBaseOffset(byte[].class);
     }
 
     @Override
-    @SuppressWarnings("static-access")
     int oopSize() {
       // Unsafe.addressSize() returns 8, even with CompressedOops. This is how many bytes each
       // element is allocated in an Object[].
-      return UnsafeAccess.theUnsafe.ARRAY_OBJECT_INDEX_SCALE;
+      return ARRAY_OBJECT_INDEX_SCALE;
     }
 
     @Override
-    @SuppressWarnings("static-access")
     long sizeOfByteArray(int len) {
-      return align(ARRAY + len * UnsafeAccess.theUnsafe.ARRAY_BYTE_INDEX_SCALE);
+      return align(ARRAY + len * ARRAY_BYTE_INDEX_SCALE);
     }
   }
 
@@ -216,7 +221,10 @@ private static MemoryLayout getMemoryLayout() {
     // Have a safeguard in case Unsafe estimate is wrong. This is static context, there is
     // no configuration, so we look at System property.
     String enabled = System.getProperty("hbase.memorylayout.use.unsafe");
-    if (UnsafeAvailChecker.isAvailable() && (enabled == null || Boolean.parseBoolean(enabled))) {
+    if (
+      HBasePlatformDependent.isUnsafeAvailable()
+        && (enabled == null || Boolean.parseBoolean(enabled))
+    ) {
       LOG.debug("Using Unsafe to estimate memory layout");
       return new UnsafeLayout();
     }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
index 8e69b4ba5b69..40bd7baa4b24 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
@@ -17,26 +17,18 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import java.lang.reflect.Field;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import sun.misc.Unsafe;
-import sun.nio.ch.DirectBuffer;
+
+import org.apache.hbase.thirdparty.io.netty.util.internal.PlatformDependent;
 
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public final class UnsafeAccess {
 
-  private static final Logger LOG = LoggerFactory.getLogger(UnsafeAccess.class);
-
-  public static final Unsafe theUnsafe;
-
   /** The offset to the first element in a byte array. */
   public static final long BYTE_ARRAY_BASE_OFFSET;
 
@@ -48,22 +40,8 @@ public final class UnsafeAccess {
   // during a large copy
   static final long UNSAFE_COPY_THRESHOLD = 1024L * 1024L;
   static {
-    theUnsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
-      @Override
-      public Object run() {
-        try {
-          Field f = Unsafe.class.getDeclaredField("theUnsafe");
-          f.setAccessible(true);
-          return f.get(null);
-        } catch (Throwable e) {
-          LOG.warn("sun.misc.Unsafe is not accessible", e);
-        }
-        return null;
-      }
-    });
-
-    if (theUnsafe != null) {
-      BYTE_ARRAY_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
+    if (HBasePlatformDependent.isUnsafeAvailable()) {
+      BYTE_ARRAY_BASE_OFFSET = HBasePlatformDependent.arrayBaseOffset(byte[].class);
     } else {
       BYTE_ARRAY_BASE_OFFSET = -1;
     }
@@ -81,9 +59,10 @@ private UnsafeAccess() {
    */
   public static short toShort(byte[] bytes, int offset) {
     if (LITTLE_ENDIAN) {
-      return Short.reverseBytes(theUnsafe.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
+      return Short
+        .reverseBytes(HBasePlatformDependent.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
     } else {
-      return theUnsafe.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
+      return HBasePlatformDependent.getShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
     }
   }
 
@@ -95,9 +74,10 @@ public static short toShort(byte[] bytes, int offset) {
    */
   public static int toInt(byte[] bytes, int offset) {
     if (LITTLE_ENDIAN) {
-      return Integer.reverseBytes(theUnsafe.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
+      return Integer
+        .reverseBytes(HBasePlatformDependent.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
     } else {
-      return theUnsafe.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
+      return HBasePlatformDependent.getInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
     }
   }
 
@@ -109,9 +89,10 @@ public static int toInt(byte[] bytes, int offset) {
    */
   public static long toLong(byte[] bytes, int offset) {
     if (LITTLE_ENDIAN) {
-      return Long.reverseBytes(theUnsafe.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
+      return Long
+        .reverseBytes(HBasePlatformDependent.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET));
     } else {
-      return theUnsafe.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
+      return HBasePlatformDependent.getLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET);
     }
   }
 
@@ -127,7 +108,7 @@ public static int putShort(byte[] bytes, int offset, short val) {
     if (LITTLE_ENDIAN) {
       val = Short.reverseBytes(val);
     }
-    theUnsafe.putShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
+    HBasePlatformDependent.putShort(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
     return offset + Bytes.SIZEOF_SHORT;
   }
 
@@ -142,7 +123,7 @@ public static int putInt(byte[] bytes, int offset, int val) {
     if (LITTLE_ENDIAN) {
       val = Integer.reverseBytes(val);
     }
-    theUnsafe.putInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
+    HBasePlatformDependent.putInt(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
     return offset + Bytes.SIZEOF_INT;
   }
 
@@ -157,14 +138,15 @@ public static int putLong(byte[] bytes, int offset, long val) {
     if (LITTLE_ENDIAN) {
       val = Long.reverseBytes(val);
     }
-    theUnsafe.putLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
+    HBasePlatformDependent.putLong(bytes, offset + BYTE_ARRAY_BASE_OFFSET, val);
     return offset + Bytes.SIZEOF_LONG;
   }
 
   // APIs to read primitive data from a ByteBuffer using Unsafe way
   /**
    * Reads a short value at the given buffer's offset considering it was written in big-endian
-   * format. nn * @return short value at offset
+   * format.
+   * @return short value at offset
    */
   public static short toShort(ByteBuffer buf, int offset) {
     if (LITTLE_ENDIAN) {
@@ -175,28 +157,32 @@ public static short toShort(ByteBuffer buf, int offset) {
 
   /**
    * Reads a short value at the given Object's offset considering it was written in big-endian
-   * format. nn * @return short value at offset
+   * format.
+   * @return short value at offset
    */
   public static short toShort(Object ref, long offset) {
     if (LITTLE_ENDIAN) {
-      return Short.reverseBytes(theUnsafe.getShort(ref, offset));
+      return Short.reverseBytes(HBasePlatformDependent.getShort(ref, offset));
     }
-    return theUnsafe.getShort(ref, offset);
+    return HBasePlatformDependent.getShort(ref, offset);
   }
 
   /**
-   * Reads bytes at the given offset as a short value. nn * @return short value at offset
+   * Reads bytes at the given offset as a short value.
+   * @return short value at offset
    */
-  static short getAsShort(ByteBuffer buf, int offset) {
+  private static short getAsShort(ByteBuffer buf, int offset) {
     if (buf.isDirect()) {
-      return theUnsafe.getShort(((DirectBuffer) buf).address() + offset);
+      return HBasePlatformDependent.getShort(directBufferAddress(buf) + offset);
     }
-    return theUnsafe.getShort(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
+    return HBasePlatformDependent.getShort(buf.array(),
+      BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
   }
 
   /**
    * Reads an int value at the given buffer's offset considering it was written in big-endian
-   * format. nn * @return int value at offset
+   * format.
+   * @return int value at offset
    */
   public static int toInt(ByteBuffer buf, int offset) {
     if (LITTLE_ENDIAN) {
@@ -211,24 +197,27 @@ public static int toInt(ByteBuffer buf, int offset) {
    */
   public static int toInt(Object ref, long offset) {
     if (LITTLE_ENDIAN) {
-      return Integer.reverseBytes(theUnsafe.getInt(ref, offset));
+      return Integer.reverseBytes(HBasePlatformDependent.getInt(ref, offset));
     }
-    return theUnsafe.getInt(ref, offset);
+    return HBasePlatformDependent.getInt(ref, offset);
   }
 
   /**
-   * Reads bytes at the given offset as an int value. nn * @return int value at offset
+   * Reads bytes at the given offset as an int value.
+   * @return int value at offset
    */
-  static int getAsInt(ByteBuffer buf, int offset) {
+  private static int getAsInt(ByteBuffer buf, int offset) {
     if (buf.isDirect()) {
-      return theUnsafe.getInt(((DirectBuffer) buf).address() + offset);
+      return HBasePlatformDependent.getInt(directBufferAddress(buf) + offset);
     }
-    return theUnsafe.getInt(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
+    return HBasePlatformDependent.getInt(buf.array(),
+      BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
   }
 
   /**
    * Reads a long value at the given buffer's offset considering it was written in big-endian
-   * format. nn * @return long value at offset
+   * format.
+   * @return long value at offset
    */
   public static long toLong(ByteBuffer buf, int offset) {
     if (LITTLE_ENDIAN) {
@@ -239,23 +228,49 @@ public static long toLong(ByteBuffer buf, int offset) {
 
   /**
    * Reads a long value at the given Object's offset considering it was written in big-endian
-   * format. nn * @return long value at offset
+   * format.
+   * @return long value at offset
    */
   public static long toLong(Object ref, long offset) {
     if (LITTLE_ENDIAN) {
-      return Long.reverseBytes(theUnsafe.getLong(ref, offset));
+      return Long.reverseBytes(HBasePlatformDependent.getLong(ref, offset));
     }
-    return theUnsafe.getLong(ref, offset);
+    return HBasePlatformDependent.getLong(ref, offset);
   }
 
   /**
-   * Reads bytes at the given offset as a long value. nn * @return long value at offset
+   * Reads bytes at the given offset as a long value.
+   * @return long value at offset
    */
-  static long getAsLong(ByteBuffer buf, int offset) {
+  private static long getAsLong(ByteBuffer buf, int offset) {
     if (buf.isDirect()) {
-      return theUnsafe.getLong(((DirectBuffer) buf).address() + offset);
+      return HBasePlatformDependent.getLong(directBufferAddress(buf) + offset);
     }
-    return theUnsafe.getLong(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
+    return HBasePlatformDependent.getLong(buf.array(),
+      BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
+  }
+
+  /**
+   * Returns the byte at the given offset
+   * @param buf    the buffer to read
+   * @param offset the offset at which the byte has to be read
+   * @return the byte at the given offset
+   */
+  public static byte toByte(ByteBuffer buf, int offset) {
+    if (buf.isDirect()) {
+      return HBasePlatformDependent.getByte(directBufferAddress(buf) + offset);
+    } else {
+      return HBasePlatformDependent.getByte(buf.array(),
+        BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
+    }
+  }
+
+  /**
+   * Returns the byte at the given offset of the object
+   * @return the byte at the given offset
+   */
+  public static byte toByte(Object ref, long offset) {
+    return HBasePlatformDependent.getByte(ref, offset);
   }
 
   /**
@@ -270,9 +285,10 @@ public static int putInt(ByteBuffer buf, int offset, int val) {
       val = Integer.reverseBytes(val);
     }
     if (buf.isDirect()) {
-      theUnsafe.putInt(((DirectBuffer) buf).address() + offset, val);
+      HBasePlatformDependent.putInt(directBufferAddress(buf) + offset, val);
     } else {
-      theUnsafe.putInt(buf.array(), offset + buf.arrayOffset() + BYTE_ARRAY_BASE_OFFSET, val);
+      HBasePlatformDependent.putInt(buf.array(),
+        offset + buf.arrayOffset() + BYTE_ARRAY_BASE_OFFSET, val);
     }
     return offset + Bytes.SIZEOF_INT;
   }
@@ -285,7 +301,7 @@ public static void copy(byte[] src, int srcOffset, ByteBuffer dest, int destOffs
     long destAddress = destOffset;
     Object destBase = null;
     if (dest.isDirect()) {
-      destAddress = destAddress + ((DirectBuffer) dest).address();
+      destAddress = destAddress + directBufferAddress(dest);
     } else {
       destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
       destBase = dest.array();
@@ -297,7 +313,7 @@ public static void copy(byte[] src, int srcOffset, ByteBuffer dest, int destOffs
   private static void unsafeCopy(Object src, long srcAddr, Object dst, long destAddr, long len) {
     while (len > 0) {
       long size = (len > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : len;
-      theUnsafe.copyMemory(src, srcAddr, dst, destAddr, size);
+      HBasePlatformDependent.copyMemory(src, srcAddr, dst, destAddr, size);
       len -= size;
       srcAddr += size;
       destAddr += size;
@@ -306,13 +322,18 @@ private static void unsafeCopy(Object src, long srcAddr, Object dst, long destAd
 
   /**
    * Copies specified number of bytes from given offset of {@code src} ByteBuffer to the
-   * {@code dest} array. nnnnn
+   * {@code dest} array.
+   * @param src        source buffer
+   * @param srcOffset  offset into source buffer
+   * @param dest       destination array
+   * @param destOffset offset into destination buffer
+   * @param length     length of data to copy
    */
   public static void copy(ByteBuffer src, int srcOffset, byte[] dest, int destOffset, int length) {
     long srcAddress = srcOffset;
     Object srcBase = null;
     if (src.isDirect()) {
-      srcAddress = srcAddress + ((DirectBuffer) src).address();
+      srcAddress = srcAddress + directBufferAddress(src);
     } else {
       srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
       srcBase = src.array();
@@ -323,20 +344,25 @@ public static void copy(ByteBuffer src, int srcOffset, byte[] dest, int destOffs
 
   /**
    * Copies specified number of bytes from given offset of {@code src} buffer into the {@code dest}
-   * buffer. nnnnn
+   * buffer.
+   * @param src        source buffer
+   * @param srcOffset  offset into source buffer
+   * @param dest       destination buffer
+   * @param destOffset offset into destination buffer
+   * @param length     length of data to copy
    */
   public static void copy(ByteBuffer src, int srcOffset, ByteBuffer dest, int destOffset,
     int length) {
     long srcAddress, destAddress;
     Object srcBase = null, destBase = null;
     if (src.isDirect()) {
-      srcAddress = srcOffset + ((DirectBuffer) src).address();
+      srcAddress = srcOffset + directBufferAddress(src);
     } else {
       srcAddress = (long) srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
       srcBase = src.array();
     }
     if (dest.isDirect()) {
-      destAddress = destOffset + ((DirectBuffer) dest).address();
+      destAddress = destOffset + directBufferAddress(dest);
     } else {
       destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
       destBase = dest.array();
@@ -357,9 +383,10 @@ public static int putShort(ByteBuffer buf, int offset, short val) {
       val = Short.reverseBytes(val);
     }
     if (buf.isDirect()) {
-      theUnsafe.putShort(((DirectBuffer) buf).address() + offset, val);
+      HBasePlatformDependent.putShort(directBufferAddress(buf) + offset, val);
     } else {
-      theUnsafe.putShort(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
+      HBasePlatformDependent.putShort(buf.array(),
+        BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
     }
     return offset + Bytes.SIZEOF_SHORT;
   }
@@ -376,9 +403,10 @@ public static int putLong(ByteBuffer buf, int offset, long val) {
       val = Long.reverseBytes(val);
     }
     if (buf.isDirect()) {
-      theUnsafe.putLong(((DirectBuffer) buf).address() + offset, val);
+      HBasePlatformDependent.putLong(directBufferAddress(buf) + offset, val);
     } else {
-      theUnsafe.putLong(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
+      HBasePlatformDependent.putLong(buf.array(),
+        BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, val);
     }
     return offset + Bytes.SIZEOF_LONG;
   }
@@ -392,31 +420,20 @@ public static int putLong(ByteBuffer buf, int offset, long val) {
    */
   public static int putByte(ByteBuffer buf, int offset, byte b) {
     if (buf.isDirect()) {
-      theUnsafe.putByte(((DirectBuffer) buf).address() + offset, b);
+      HBasePlatformDependent.putByte(directBufferAddress(buf) + offset, b);
     } else {
-      theUnsafe.putByte(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, b);
+      HBasePlatformDependent.putByte(buf.array(),
+        BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset, b);
     }
     return offset + 1;
   }
 
-  /**
-   * Returns the byte at the given offset
-   * @param buf    the buffer to read
-   * @param offset the offset at which the byte has to be read
-   * @return the byte at the given offset
-   */
-  public static byte toByte(ByteBuffer buf, int offset) {
-    if (buf.isDirect()) {
-      return theUnsafe.getByte(((DirectBuffer) buf).address() + offset);
-    } else {
-      return theUnsafe.getByte(buf.array(), BYTE_ARRAY_BASE_OFFSET + buf.arrayOffset() + offset);
-    }
+  public static long directBufferAddress(ByteBuffer buf) {
+    return PlatformDependent.directBufferAddress(buf);
   }
 
-  /**
-   * Returns the byte at the given offset of the object nn * @return the byte at the given offset
-   */
-  public static byte toByte(Object ref, long offset) {
-    return theUnsafe.getByte(ref, offset);
+  public static void freeDirectBuffer(ByteBuffer buffer) {
+    // here we just use the method in netty
+    PlatformDependent.freeDirectBuffer(buffer);
   }
 }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
deleted file mode 100644
index 00f2c0d8cf36..000000000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.util;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@InterfaceAudience.Private
-public class UnsafeAvailChecker {
-
-  private static final String CLASS_NAME = "sun.misc.Unsafe";
-  private static final Logger LOG = LoggerFactory.getLogger(UnsafeAvailChecker.class);
-  private static boolean avail = false;
-  private static boolean unaligned = false;
-
-  static {
-    avail = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
-      @Override
-      public Boolean run() {
-        try {
-          Class<?> clazz = Class.forName(CLASS_NAME);
-          Field f = clazz.getDeclaredField("theUnsafe");
-          f.setAccessible(true);
-          Object theUnsafe = f.get(null);
-          if (theUnsafe == null) {
-            LOG.warn("Could not get static instance from sun.misc.Unsafe");
-            return false;
-          }
-          // Check for availability of all methods used by UnsafeAccess
-          Method m;
-          try {
-            m = clazz.getDeclaredMethod("arrayBaseOffset", Class.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing arrayBaseOffset(Class)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("copyMemory", Object.class, long.class, Object.class,
-              long.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing copyMemory(Object,long,Object,long,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getByte", Object.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getByte(Object,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getShort", long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getShort(long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getShort", Object.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getShort(Object,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getInt", long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getInt(long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getInt", Object.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getInt(Object,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getLong", long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getLong(long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("getLong", Object.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing getLong(Object,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putByte", long.class, byte.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putByte(long,byte)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putByte", Object.class, long.class, byte.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putByte(Object,long,byte)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putShort", long.class, short.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putShort(long,short)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putShort", Object.class, long.class, short.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putShort(Object,long,short)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putInt", long.class, int.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putInt(long,int)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putInt", Object.class, long.class, int.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putInt(Object,long,int)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putLong", long.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putLong(long,long)");
-              return false;
-            }
-            m = clazz.getDeclaredMethod("putLong", Object.class, long.class, long.class);
-            if (m == null) {
-              LOG.warn("sun.misc.Unsafe is missing putLong(Object,long,long)");
-              return false;
-            }
-            // theUnsafe is accessible and all methods are available
-            return true;
-          } catch (Throwable e) {
-            LOG.warn("sun.misc.Unsafe is missing one or more required methods", e);
-          }
-        } catch (Throwable e) {
-          LOG.warn("sun.misc.Unsafe is not available/accessible", e);
-        }
-        return false;
-      }
-    });
-    // When Unsafe itself is not available/accessible consider unaligned as false.
-    if (avail) {
-      String arch = System.getProperty("os.arch");
-      if ("ppc64".equals(arch) || "ppc64le".equals(arch) || "aarch64".equals(arch)) {
-        // java.nio.Bits.unaligned() wrongly returns false on ppc (JDK-8165231),
-        unaligned = true;
-      } else {
-        try {
-          // Using java.nio.Bits#unaligned() to check for unaligned-access capability
-          Class<?> clazz = Class.forName("java.nio.Bits");
-          Method m = clazz.getDeclaredMethod("unaligned");
-          m.setAccessible(true);
-          unaligned = (Boolean) m.invoke(null);
-        } catch (Exception e) {
-          LOG.warn("java.nio.Bits#unaligned() check failed."
-            + "Unsafe based read/write of primitive types won't be used", e);
-        }
-      }
-    }
-  }
-
-  /**
-   * @return true when running JVM is having sun's Unsafe package available in it and it is
-   *         accessible.
-   */
-  public static boolean isAvailable() {
-    return avail;
-  }
-
-  /**
-   * @return true when running JVM is having sun's Unsafe package available in it and underlying
-   *         system having unaligned-access capability.
-   */
-  public static boolean unaligned() {
-    return unaligned;
-  }
-
-  private UnsafeAvailChecker() {
-    // private constructor to avoid instantiation
-  }
-}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
index 6bf633ed62d5..815cb9d6b63e 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
@@ -49,6 +49,7 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.io.WritableUtils;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -106,14 +107,14 @@ static void disableUnsafe() throws Exception {
   }
 
   static void detectAvailabilityOfUnsafe() throws Exception {
-    if (ByteBufferUtils.UNSAFE_AVAIL != UnsafeAvailChecker.isAvailable()) {
-      setUnsafe(UNSAFE_AVAIL_NAME, UnsafeAvailChecker.isAvailable());
+    if (ByteBufferUtils.UNSAFE_AVAIL != HBasePlatformDependent.isUnsafeAvailable()) {
+      setUnsafe(UNSAFE_AVAIL_NAME, HBasePlatformDependent.isUnsafeAvailable());
     }
-    if (ByteBufferUtils.UNSAFE_UNALIGNED != UnsafeAvailChecker.unaligned()) {
-      setUnsafe(UNSAFE_UNALIGNED_NAME, UnsafeAvailChecker.unaligned());
+    if (ByteBufferUtils.UNSAFE_UNALIGNED != HBasePlatformDependent.unaligned()) {
+      setUnsafe(UNSAFE_UNALIGNED_NAME, HBasePlatformDependent.unaligned());
     }
-    assertEquals(ByteBufferUtils.UNSAFE_AVAIL, UnsafeAvailChecker.isAvailable());
-    assertEquals(ByteBufferUtils.UNSAFE_UNALIGNED, UnsafeAvailChecker.unaligned());
+    assertEquals(ByteBufferUtils.UNSAFE_AVAIL, HBasePlatformDependent.isUnsafeAvailable());
+    assertEquals(ByteBufferUtils.UNSAFE_UNALIGNED, HBasePlatformDependent.unaligned());
   }
 
   public TestByteBufferUtils(boolean useUnsafeIfPossible) throws Exception {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index 84637bb7ad10..8560863dae9a 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -37,6 +37,7 @@
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.io.WritableUtils;
 import org.junit.Assert;
 import org.junit.ClassRule;
@@ -79,7 +80,7 @@ private static void testShort(boolean unsafe) throws Exception {
         assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
       }
     } finally {
-      setUnsafe(UnsafeAvailChecker.unaligned());
+      setUnsafe(HBasePlatformDependent.unaligned());
     }
   }
 
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
index aaba50fa187d..89d71b403af7 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
@@ -20,17 +20,17 @@
 import java.io.IOException;
 import java.util.Map;
 import java.util.TreeMap;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.DefaultValue;
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ajax.JSON;
 
 /**
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
index 2e48a7c9a6a7..5817d071f02c 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/MockHttpApiRule.java
@@ -28,11 +28,11 @@
 import java.util.regex.Pattern;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.MediaType;
 import org.junit.rules.ExternalResource;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Request;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.RequestLog;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Server;
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
index cd410d332f34..8c5339605e29 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
@@ -31,14 +31,6 @@
 import java.util.Objects;
 import java.util.Optional;
 import java.util.concurrent.Callable;
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.Invocation;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriBuilder;
 import javax.xml.ws.http.HTTPException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -53,6 +45,14 @@
 import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
 import org.apache.hbase.thirdparty.com.google.gson.JsonObject;
 import org.apache.hbase.thirdparty.com.google.gson.JsonParser;
+import org.apache.hbase.thirdparty.javax.ws.rs.client.Client;
+import org.apache.hbase.thirdparty.javax.ws.rs.client.ClientBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.client.Entity;
+import org.apache.hbase.thirdparty.javax.ws.rs.client.Invocation;
+import org.apache.hbase.thirdparty.javax.ws.rs.client.WebTarget;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriBuilder;
 import org.apache.hbase.thirdparty.org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
 
 /**
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index de7159794f18..13dd9529ee16 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -165,10 +165,6 @@
       <groupId>com.sun.activation</groupId>
       <artifactId>javax.activation</artifactId>
     </dependency>
-    <dependency>
-      <groupId>com.fasterxml.jackson.jaxrs</groupId>
-      <artifactId>jackson-jaxrs-json-provider</artifactId>
-    </dependency>
     <dependency>
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-annotations</artifactId>
@@ -181,6 +177,10 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hbase.thirdparty</groupId>
+      <artifactId>hbase-shaded-jackson-jaxrs-json-provider</artifactId>
+    </dependency>
     <dependency>
       <!-- We *might* need this for XMLStreamReader use in RemoteAdmin
            TODO figure out if we can remove it.
@@ -515,15 +515,6 @@
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-auth</artifactId>
         </dependency>
-        <!--Needed when jdk11/hadoop3 else complaint about
-          NoSuchMethodError: 'java.util.Map javax.ws.rs.core.Application.getProperties()'
-          when REST server is started.
-          -->
-        <dependency>
-          <groupId>org.glassfish.jaxb</groupId>
-          <artifactId>jaxb-runtime</artifactId>
-          <version>2.3.2</version>
-        </dependency>
       </dependencies>
     </profile>
     <profile>
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
index 75794b84cc8e..61dede2ae835 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
@@ -18,15 +18,16 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class ExistsResource extends ResourceBase {
 
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index 86cc2173e342..68d774e420ca 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -18,12 +18,6 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.rest.model.CellModel;
@@ -34,6 +28,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class MultiRowResource extends ResourceBase implements Constants {
   private static final Logger LOG = LoggerFactory.getLogger(MultiRowResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 13ae57a2f3a8..b661e46f928b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -19,18 +19,6 @@
 
 import java.io.IOException;
 import javax.servlet.ServletContext;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.client.Admin;
@@ -41,6 +29,19 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
+import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.POST;
+import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.HttpHeaders;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 /**
  * Implements the following REST end points:
  * <p>
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 87802c4f9192..a3c0e2d2f1a5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -19,18 +19,19 @@
 
 import java.io.IOException;
 import javax.servlet.ServletContext;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.rest.model.NamespacesModel;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 /**
  * Implements REST GET list of all namespaces.
  * <p>
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
index 7bf9538b2860..eadd6a9334bc 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
@@ -20,8 +20,6 @@
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.List;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.StreamingOutput;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.client.Result;
@@ -34,6 +32,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.StreamingOutput;
+
 @InterfaceAudience.Private
 public class ProtobufStreamingOutput implements StreamingOutput {
   private static final Logger LOG = LoggerFactory.getLogger(ProtobufStreamingOutput.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 0439e9b96a1e..83da3a191d64 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hbase.rest;
 
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.lang.management.ManagementFactory;
 import java.util.ArrayList;
 import java.util.EnumSet;
@@ -48,6 +47,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index f9f59e571dbe..21c973026030 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -19,13 +19,6 @@
 
 import java.io.IOException;
 import java.util.List;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -40,6 +33,14 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class RegionsResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(RegionsResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
index 8337f91c7825..0f00479ff152 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
@@ -18,14 +18,15 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+
 @InterfaceAudience.Private
 public class ResourceBase implements Constants {
 
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index 2dc873d5bb41..9baf7aa7c045 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -18,15 +18,6 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.rest.model.TableListModel;
 import org.apache.hadoop.hbase.rest.model.TableModel;
@@ -34,6 +25,16 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @Path("/")
 @InterfaceAudience.Private
 public class RootResource extends ResourceBase {
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index e61a842acc40..16259c341674 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -20,18 +20,6 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.Cell.Type;
@@ -53,6 +41,19 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
+import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.POST;
+import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.HttpHeaders;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class RowResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(RowResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index 45bf6a722977..81ab8e24692f 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -19,15 +19,6 @@
 
 import java.io.IOException;
 import java.util.Base64;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.TableNotFoundException;
@@ -39,6 +30,16 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class ScannerInstanceResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(ScannerInstanceResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 9866ec83fe66..1c2929aab7af 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -24,15 +24,6 @@
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.rest.model.ScannerModel;
@@ -40,6 +31,16 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
+import org.apache.hbase.thirdparty.javax.ws.rs.POST;
+import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class ScannerResource extends ResourceBase {
 
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index db7df4bf5f41..ee15cecccd39 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -19,18 +19,6 @@
 
 import java.io.IOException;
 import java.util.Map;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import javax.xml.namespace.QName;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -46,6 +34,19 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
+import org.apache.hbase.thirdparty.javax.ws.rs.DELETE;
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.POST;
+import org.apache.hbase.thirdparty.javax.ws.rs.PUT;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class SchemaResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(SchemaResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index c91864e8e7d8..85b3b3f6556f 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -20,13 +20,6 @@
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.Map;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.ClusterMetrics;
 import org.apache.hadoop.hbase.ClusterMetrics.Option;
 import org.apache.hadoop.hbase.RegionMetrics;
@@ -38,6 +31,14 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class StorageClusterStatusResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(StorageClusterStatusResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index f658eeac0ab8..ea7641e54cdb 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -19,19 +19,20 @@
 
 import java.io.IOException;
 import java.util.EnumSet;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.ClusterMetrics.Option;
 import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class StorageClusterVersionResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(StorageClusterVersionResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f68e6a413f9e..c10ad88b4b71 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -19,11 +19,6 @@
 
 import java.io.IOException;
 import java.util.List;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.Encoded;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.TableName;
@@ -38,6 +33,12 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.DefaultValue;
+import org.apache.hbase.thirdparty.javax.ws.rs.Encoded;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.PathParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.QueryParam;
+
 @InterfaceAudience.Private
 public class TableResource extends ResourceBase {
 
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
index 32ddf47aed12..e30beaa37df7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
@@ -23,14 +23,6 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import javax.ws.rs.GET;
-import javax.ws.rs.HeaderParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.StreamingOutput;
-import javax.ws.rs.core.UriInfo;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
@@ -45,6 +37,15 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.HeaderParam;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.StreamingOutput;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 @InterfaceAudience.Private
 public class TableScanResource extends ResourceBase {
   private static final Logger LOG = LoggerFactory.getLogger(TableScanResource.class);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index 0a790161b758..8b71f7086452 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -19,19 +19,20 @@
 
 import java.io.IOException;
 import javax.servlet.ServletContext;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.hbase.rest.model.VersionModel;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.GET;
+import org.apache.hbase.thirdparty.javax.ws.rs.Path;
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.CacheControl;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Context;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.UriInfo;
+
 /**
  * Implements REST software version reporting
  * <p>
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
index c2011d7bafd5..fc59d14d56c7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hbase.rest.model;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import com.google.protobuf.ByteString;
 import java.io.IOException;
 import java.io.Serializable;
@@ -27,7 +26,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableSet;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
@@ -75,6 +73,9 @@
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 /**
  * A representation of Scanner parameters.
  *
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java
index d564d4832b77..e87e516c5cfe 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java
@@ -20,8 +20,6 @@
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Set;
-import javax.ws.rs.ext.ContextResolver;
-import javax.ws.rs.ext.Provider;
 import javax.xml.bind.JAXBContext;
 import org.apache.hadoop.hbase.rest.model.CellModel;
 import org.apache.hadoop.hbase.rest.model.CellSetModel;
@@ -40,6 +38,9 @@
 import org.apache.hadoop.hbase.rest.model.VersionModel;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.ContextResolver;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
+
 /**
  * Plumbing for hooking up Jersey's JSON entity body encoding and decoding support to JAXB. Modify
  * how the context is created (by using e.g. a different configuration builder) to control how JSON
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index 73f7eece40d6..7c3f6f8ea401 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -23,18 +23,19 @@
 import java.lang.annotation.Annotation;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Type;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyReader;
-import javax.ws.rs.ext.Provider;
 import org.apache.hadoop.hbase.rest.Constants;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Consumes;
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyReader;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
+
 /**
  * Adapter for hooking up Jersey content processing dispatch to ProtobufMessageHandler interface
  * capable handlers for decoding protobuf input.
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java
index e31e03417a8e..973665f00fa9 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java
@@ -21,16 +21,17 @@
 import java.io.OutputStream;
 import java.lang.annotation.Annotation;
 import java.lang.reflect.Type;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
 import org.apache.hadoop.hbase.rest.Constants;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyWriter;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
+
 /**
  * An adapter between Jersey and Object.toString(). Hooks up plain text output to the Jersey content
  * handling framework. Jersey will first call getSize() to learn the number of bytes that will be
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java
index 9723d1f89458..1d95e6f343e7 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java
@@ -21,16 +21,17 @@
 import java.io.OutputStream;
 import java.lang.annotation.Annotation;
 import java.lang.reflect.Type;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
 import org.apache.hadoop.hbase.rest.Constants;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.javax.ws.rs.Produces;
+import org.apache.hbase.thirdparty.javax.ws.rs.WebApplicationException;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MultivaluedMap;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.MessageBodyWriter;
+import org.apache.hbase.thirdparty.javax.ws.rs.ext.Provider;
+
 /**
  * An adapter between Jersey and ProtobufMessageHandler implementors. Hooks up protobuf output
  * producing methods to the Jersey content handling framework. Jersey will first call getSize() to
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index fb49c53ea89b..774d7eaba291 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -20,13 +20,11 @@
 import static org.junit.Assert.assertEquals;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.util.HashMap;
 import java.util.Map;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
@@ -49,6 +47,9 @@
 import org.junit.Before;
 import org.junit.BeforeClass;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 public class RowResourceBase {
   protected static final String TABLE = "TestRowResource";
 
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
index fc9f7e98a11f..61734734871d 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
@@ -20,10 +20,8 @@
 import static org.junit.Assert.assertEquals;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.IOException;
 import java.util.Collection;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
@@ -54,6 +52,9 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 @Category({ RestTests.class, MediumTests.class })
 @RunWith(Parameterized.class)
 public class TestMultiRowResource {
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
index 71c66271d827..2d09ae0da4a2 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
@@ -23,7 +23,6 @@
 import static org.junit.Assert.assertTrue;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
@@ -32,7 +31,6 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import org.apache.hadoop.conf.Configuration;
@@ -61,6 +59,9 @@
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 @Category({ RestTests.class, MediumTests.class })
 public class TestNamespacesInstanceResource {
   @ClassRule
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestRESTServerSSL.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestRESTServerSSL.java
index e5d94dd5ab78..5731dd94fc66 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestRESTServerSSL.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestRESTServerSSL.java
@@ -20,6 +20,7 @@
 import static org.junit.Assert.assertEquals;
 
 import java.io.File;
+import java.lang.reflect.Method;
 import java.security.KeyPair;
 import java.security.cert.X509Certificate;
 import java.util.Optional;
@@ -39,7 +40,8 @@
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import sun.security.x509.AlgorithmId;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Category({ RestTests.class, MediumTests.class })
 public class TestRESTServerSSL {
@@ -48,6 +50,8 @@ public class TestRESTServerSSL {
   public static final HBaseClassTestRule CLASS_RULE =
     HBaseClassTestRule.forClass(TestRESTServerSSL.class);
 
+  private static final Logger LOG = LoggerFactory.getLogger(TestRESTServerSSL.class);
+
   private static final String KEY_STORE_PASSWORD = "myKSPassword";
   private static final String TRUST_STORE_PASSWORD = "myTSPassword";
 
@@ -57,12 +61,23 @@ public class TestRESTServerSSL {
   private static File keyDir;
   private Configuration conf;
 
+  // Workaround for jdk8 292 bug. See https://github.com/bcgit/bc-java/issues/941
+  // Below is a workaround described in above URL. Issue fingered first in comments in
+  // HBASE-25920 Support Hadoop 3.3.1
+  private static void initializeAlgorithmId() {
+    try {
+      Class<?> algoId = Class.forName("sun.security.x509.AlgorithmId");
+      Method method = algoId.getMethod("get", String.class);
+      method.setAccessible(true);
+      method.invoke(null, "PBEWithSHA1AndDESede");
+    } catch (Exception e) {
+      LOG.warn("failed to initialize AlgorithmId", e);
+    }
+  }
+
   @BeforeClass
   public static void beforeClass() throws Exception {
-    // Workaround for jdk8 252 bug. See https://github.com/bcgit/bc-java/issues/941
-    // Below is a workaround described in above URL. Issue fingered first in comments in
-    // HBASE-25920 Support Hadoop 3.3.1
-    AlgorithmId.get("PBEWithSHA1AndDESede");
+    initializeAlgorithmId();
     keyDir = initKeystoreDir();
     KeyPair keyPair = KeyStoreTestUtil.generateKeyPair("RSA");
     X509Certificate serverCertificate =
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSecureRESTServer.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSecureRESTServer.java
index 6c5fcd3d6f2a..48ef22355e93 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSecureRESTServer.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSecureRESTServer.java
@@ -22,14 +22,12 @@
 import static org.junit.Assert.assertTrue;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.File;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.security.Principal;
 import java.security.PrivilegedExceptionAction;
-import javax.ws.rs.core.MediaType;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -96,6 +94,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 /**
  * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache
  * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and unreachable w/o.
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
index b65a58466d84..2e7a4af13b7e 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
@@ -26,7 +26,6 @@
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.DataInputStream;
 import java.io.EOFException;
 import java.io.IOException;
@@ -37,7 +36,6 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Unmarshaller;
@@ -73,6 +71,9 @@
 import org.xml.sax.InputSource;
 import org.xml.sax.XMLReader;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 @Category({ RestTests.class, MediumTests.class })
 public class TestTableScan {
   @ClassRule
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index eec1ba4b0b19..542d39ada9b9 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -22,10 +22,8 @@
 import static org.junit.Assert.assertTrue;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -46,6 +44,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 @Category({ RestTests.class, MediumTests.class })
 public class TestVersionResource {
 
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
index 4856ac8965ce..4cfe70e06399 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
@@ -21,18 +21,19 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.util.Base64;
-import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import org.apache.hadoop.hbase.rest.provider.JAXBContextResolver;
 import org.junit.Test;
 
+import org.apache.hbase.thirdparty.com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+import org.apache.hbase.thirdparty.javax.ws.rs.core.MediaType;
+
 public abstract class TestModelBase<T> {
 
   protected String AS_XML;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java
index 2818c8c51900..499f8e4e31fe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.java
@@ -203,41 +203,37 @@ private String opWithClientMeterName(Object op) {
         return "";
       }
       MetaTableOps ops = opsNameMap.get(op.getClass());
-      String opWithClientMeterName = "";
+      if (ops == null) {
+        return "";
+      }
       switch (ops) {
         case GET:
-          opWithClientMeterName = String.format("MetaTable_client_%s_get_request", clientIP);
-          break;
+          return String.format("MetaTable_client_%s_get_request", clientIP);
         case PUT:
-          opWithClientMeterName = String.format("MetaTable_client_%s_put_request", clientIP);
-          break;
+          return String.format("MetaTable_client_%s_put_request", clientIP);
         case DELETE:
-          opWithClientMeterName = String.format("MetaTable_client_%s_delete_request", clientIP);
-          break;
+          return String.format("MetaTable_client_%s_delete_request", clientIP);
         default:
-          break;
+          return "";
       }
-      return opWithClientMeterName;
     }
 
     private String opMeterName(Object op) {
       // Extract meter name containing the access type
       MetaTableOps ops = opsNameMap.get(op.getClass());
-      String opMeterName = "";
+      if (ops == null) {
+        return "";
+      }
       switch (ops) {
         case GET:
-          opMeterName = "MetaTable_get_request";
-          break;
+          return "MetaTable_get_request";
         case PUT:
-          opMeterName = "MetaTable_put_request";
-          break;
+          return "MetaTable_put_request";
         case DELETE:
-          opMeterName = "MetaTable_delete_request";
-          break;
+          return "MetaTable_delete_request";
         default:
-          break;
+          return "";
       }
-      return opMeterName;
     }
 
     private String tableMeterName(String tableName) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
index f032a1a3b6c5..8e45cb772d22 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
@@ -20,6 +20,7 @@
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
 
 /**
@@ -46,8 +47,12 @@ public class LruCachedBlockQueue implements HeapSize {
    * @param blockSize expected average size of blocks
    */
   public LruCachedBlockQueue(long maxSize, long blockSize) {
+    Preconditions.checkArgument(blockSize > 0, "negative blockSize %s", blockSize);
+    Preconditions.checkArgument(maxSize > 0, "negative maxSize %s", maxSize);
     int initialSize = (int) (maxSize / blockSize);
-    if (initialSize == 0) initialSize++;
+    if (initialSize == 0) {
+      initialSize++;
+    }
     queue = MinMaxPriorityQueue.expectedSize(initialSize).create();
     heapSize = 0;
     this.maxSize = maxSize;
@@ -61,6 +66,10 @@ public LruCachedBlockQueue(long maxSize, long blockSize) {
    * side effect of this call.
    * @param cb block to try to add to the queue
    */
+  @edu.umd.cs.findbugs.annotations.SuppressWarnings(
+      value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE",
+      justification = "head can not be null as heapSize is greater than maxSize,"
+        + " which means we have something in the queue")
   public void add(LruCachedBlock cb) {
     if (heapSize < maxSize) {
       queue.add(cb);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
index c5956c28fc5d..daa25cee1de2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
@@ -22,6 +22,7 @@
 import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.common.collect.MinMaxPriorityQueue;
 
 /**
@@ -49,6 +50,8 @@ public class CachedEntryQueue {
    * @param blockSize expected average size of blocks
    */
   public CachedEntryQueue(long maxSize, long blockSize) {
+    Preconditions.checkArgument(blockSize > 0, "negative blockSize %s", blockSize);
+    Preconditions.checkArgument(maxSize > 0, "negative maxSize %s", maxSize);
     int initialSize = (int) (maxSize / blockSize);
     if (initialSize == 0) {
       initialSize++;
@@ -66,6 +69,10 @@ public CachedEntryQueue(long maxSize, long blockSize) {
    * side effect of this call.
    * @param entry a bucket entry with key to try to add to the queue
    */
+  @edu.umd.cs.findbugs.annotations.SuppressWarnings(
+      value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE",
+      justification = "head can not be null as cacheSize is greater than maxSize,"
+        + " which means we have something in the queue")
   public void add(Map.Entry<BlockCacheKey, BucketEntry> entry) {
     if (cacheSize < maxSize) {
       queue.add(entry);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index dcf7270dabb5..c9cc4d257501 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -156,6 +156,7 @@
 import org.apache.hadoop.hbase.security.access.ZKPermissionWatcher;
 import org.apache.hadoop.hbase.trace.SpanReceiverHost;
 import org.apache.hadoop.hbase.trace.TraceUtil;
+import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
 import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
@@ -192,7 +193,6 @@
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import sun.misc.Signal;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@@ -735,7 +735,7 @@ protected String getUseThisHostnameInstead(Configuration conf) throws IOExceptio
    */
   private static void setupWindows(final Configuration conf, ConfigurationManager cm) {
     if (!SystemUtils.IS_OS_WINDOWS) {
-      Signal.handle(new Signal("HUP"), signal -> {
+      HBasePlatformDependent.handle("HUP", (number, name) -> {
         conf.reloadConfiguration();
         cm.notifyAllObservers(conf);
       });
diff --git a/pom.xml b/pom.xml
index b33fd5573510..e477bfb0d299 100755
--- a/pom.xml
+++ b/pom.xml
@@ -535,6 +535,7 @@
     <maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm</maven.build.timestamp.format>
     <buildDate>${maven.build.timestamp}</buildDate>
     <compileSource>1.8</compileSource>
+    <releaseTarget>8</releaseTarget>
     <!-- Build dependencies -->
     <maven.min.version>3.0.4</maven.min.version>
     <java.min.version>${compileSource}</java.min.version>
@@ -563,8 +564,8 @@
     <httpclient.version>4.5.13</httpclient.version>
     <httpcore.version>4.4.13</httpcore.version>
     <metrics-core.version>3.2.6</metrics-core.version>
-    <jackson.version>2.10.1</jackson.version>
-    <jackson.databind.version>2.10.1</jackson.databind.version>
+    <jackson.version>2.13.1</jackson.version>
+    <jackson.databind.version>2.13.1</jackson.databind.version>
     <jaxb-api.version>2.3.1</jaxb-api.version>
     <servlet.api.version>3.1.0</servlet.api.version>
     <wx.rs.api.version>2.1.1</wx.rs.api.version>
@@ -624,8 +625,8 @@
     <wagon.ssh.version>2.12</wagon.ssh.version>
     <xml.maven.version>1.0.1</xml.maven.version>
     <spotless.version>2.22.2</spotless.version>
+    <hbase-thirdparty.version>4.1.0</hbase-thirdparty.version>
     <maven-site.version>3.12.0</maven-site.version>
-    <hbase-thirdparty.version>3.5.1</hbase-thirdparty.version>
     <!-- Intraproject jar naming properties -->
     <!-- TODO this is pretty ugly, but works for the moment.
       Modules are pretty heavy-weight things, so doing this work isn't too bad. -->
@@ -1166,11 +1167,6 @@
         <artifactId>joni</artifactId>
         <version>${joni.version}</version>
       </dependency>
-      <dependency>
-        <groupId>com.fasterxml.jackson.jaxrs</groupId>
-        <artifactId>jackson-jaxrs-json-provider</artifactId>
-        <version>${jackson.version}</version>
-      </dependency>
       <dependency>
         <groupId>com.fasterxml.jackson.core</groupId>
         <artifactId>jackson-annotations</artifactId>
@@ -1390,6 +1386,16 @@
         <artifactId>hbase-shaded-jersey</artifactId>
         <version>${hbase-thirdparty.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hbase.thirdparty</groupId>
+        <artifactId>hbase-shaded-jackson-jaxrs-json-provider</artifactId>
+        <version>${hbase-thirdparty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hbase.thirdparty</groupId>
+        <artifactId>hbase-unsafe</artifactId>
+        <version>${hbase-thirdparty.version}</version>
+      </dependency>
       <dependency>
         <groupId>com.sun.xml.ws</groupId>
         <artifactId>jaxws-ri</artifactId>
@@ -2071,6 +2077,22 @@
                     <bannedImport>org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting</bannedImport>
                   </bannedImports>
                 </restrictImports>
+                <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
+                  <includeTestCode>true</includeTestCode>
+                  <commentLineBufferSize>512</commentLineBufferSize>
+                  <reason>Use shaded javax.ws.rs in hbase-thirdparty</reason>
+                  <bannedImports>
+                    <bannedImport>javax.ws.rs.**</bannedImport>
+                  </bannedImports>
+                </restrictImports>
+                <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
+                  <includeTestCode>true</includeTestCode>
+                  <commentLineBufferSize>512</commentLineBufferSize>
+                  <reason>Use shaded jackson-jaxrs-json-provider in hbase-thirdparty</reason>
+                  <bannedImports>
+                    <bannedImport>com.fasterxml.jackson.jaxrs.**</bannedImport>
+                  </bannedImports>
+                </restrictImports>
               </rules>
             </configuration>
           </execution>
@@ -2597,6 +2619,7 @@
         <jdk>[1.11,)</jdk>
       </activation>
       <properties>
+        <maven.compiler.release>${releaseTarget}</maven.compiler.release>
         <!-- TODO: replicate logic for windows support -->
         <argLine>--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED ${hbase-surefire.argLine}</argLine>
         <!-- We need a minimum HDFS version of 3.2.0 for HADOOP-12760 -->