Skip to content

Commit

Permalink
HBASE-26582 Prune use of Random and SecureRandom objects (#4118)
Browse files Browse the repository at this point in the history
Avoid the pattern where a Random object is allocated, used once or twice, and
then left for GC. This pattern triggers warnings from some static analysis tools
because this pattern leads to poor effective randomness. In a few cases we were
legitimately suffering from this issue; in others a change is still good to
reduce noise in analysis results.

Use ThreadLocalRandom where there is no requirement to set the seed to gain
good reuse.

Where useful relax use of SecureRandom to simply Random or ThreadLocalRandom,
which are unlikely to block if the system entropy pool is low, if we don't need
crypographically strong randomness for the use case. The exception to this is
normalization of use of Bytes#random to fill byte arrays with randomness.
Because Bytes#random may be used to generate key material it must be backed by
SecureRandom.

Signed-off-by: Duo Zhang <[email protected]>
  • Loading branch information
apurtell authored Mar 8, 2022
1 parent 39ecaa1 commit 1047194
Show file tree
Hide file tree
Showing 186 changed files with 689 additions and 817 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand All @@ -44,6 +43,7 @@
import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
Expand All @@ -57,7 +57,6 @@
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.io.netty.channel.Channel;
import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;
import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
Expand All @@ -72,13 +71,9 @@ public class TestFanOutOneBlockAsyncDFSOutput extends AsyncFSTestBase {
HBaseClassTestRule.forClass(TestFanOutOneBlockAsyncDFSOutput.class);

private static final Logger LOG = LoggerFactory.getLogger(TestFanOutOneBlockAsyncDFSOutput.class);

private static DistributedFileSystem FS;

private static EventLoopGroup EVENT_LOOP_GROUP;

private static Class<? extends Channel> CHANNEL_CLASS;

private static int READ_TIMEOUT_MS = 2000;

private static StreamSlowMonitor MONITOR;
Expand All @@ -104,14 +99,16 @@ public static void tearDown() throws IOException, InterruptedException {
shutdownMiniDFSCluster();
}

private static final Random RNG = new Random(); // This test depends on Random#setSeed

static void writeAndVerify(FileSystem fs, Path f, AsyncFSOutput out)
throws IOException, InterruptedException, ExecutionException {
List<CompletableFuture<Long>> futures = new ArrayList<>();
byte[] b = new byte[10];
Random rand = new Random(12345);
// test pipelined flush
RNG.setSeed(12345);
for (int i = 0; i < 10; i++) {
rand.nextBytes(b);
RNG.nextBytes(b);
out.write(b);
futures.add(out.flush(false));
futures.add(out.flush(false));
Expand All @@ -123,11 +120,11 @@ static void writeAndVerify(FileSystem fs, Path f, AsyncFSOutput out)
out.close();
assertEquals(b.length * 10, fs.getFileStatus(f).getLen());
byte[] actual = new byte[b.length];
rand.setSeed(12345);
RNG.setSeed(12345);
try (FSDataInputStream in = fs.open(f)) {
for (int i = 0; i < 10; i++) {
in.readFully(actual);
rand.nextBytes(b);
RNG.nextBytes(b);
assertArrayEquals(b, actual);
}
assertEquals(-1, in.read());
Expand All @@ -150,7 +147,7 @@ public void testRecover() throws IOException, InterruptedException, ExecutionExc
FanOutOneBlockAsyncDFSOutput out = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR);
byte[] b = new byte[10];
ThreadLocalRandom.current().nextBytes(b);
Bytes.random(b);
out.write(b, 0, b.length);
out.flush(false).get();
// restart one datanode which causes one connection broken
Expand Down Expand Up @@ -260,7 +257,7 @@ public void testWriteLargeChunk() throws IOException, InterruptedException, Exec
FanOutOneBlockAsyncDFSOutput out = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
false, (short) 3, 1024 * 1024 * 1024, eventLoop, CHANNEL_CLASS, MONITOR);
byte[] b = new byte[50 * 1024 * 1024];
ThreadLocalRandom.current().nextBytes(b);
Bytes.random(b);
out.write(b);
out.flush(false);
assertEquals(b.length, out.flush(false).get().longValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.io.asyncfs;


import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

Expand All @@ -29,12 +28,12 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
Expand All @@ -57,7 +56,6 @@
import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;


/**
* Testcase for HBASE-26679, here we introduce a separate test class and not put the testcase in
* {@link TestFanOutOneBlockAsyncDFSOutput} because we will send heartbeat to DN when there is no
Expand Down Expand Up @@ -191,7 +189,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception
});

byte[] b = new byte[10];
ThreadLocalRandom.current().nextBytes(b);
Bytes.random(b);
OUT.write(b, 0, b.length);
CompletableFuture<Long> future = OUT.flush(false);
/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -420,10 +420,9 @@ protected List<RegionInfo> randomRegions(int numRegions) {
protected List<RegionInfo> createRegions(int numRegions, TableName tableName) {
List<RegionInfo> regions = new ArrayList<>(numRegions);
byte[] start = new byte[16];
Bytes.random(start);
byte[] end = new byte[16];
Random rand = ThreadLocalRandom.current();
rand.nextBytes(start);
rand.nextBytes(end);
Bytes.random(end);
for (int i = 0; i < numRegions; i++) {
Bytes.putInt(start, 0, numRegions << 1);
Bytes.putInt(end, 0, (numRegions << 1) + 1);
Expand All @@ -440,19 +439,18 @@ protected List<RegionInfo> createRegions(int numRegions, TableName tableName) {
protected List<RegionInfo> randomRegions(int numRegions, int numTables) {
List<RegionInfo> regions = new ArrayList<>(numRegions);
byte[] start = new byte[16];
Bytes.random(start);
byte[] end = new byte[16];
Random rand = ThreadLocalRandom.current();
rand.nextBytes(start);
rand.nextBytes(end);
Bytes.random(end);
for (int i = 0; i < numRegions; i++) {
if (!regionQueue.isEmpty()) {
regions.add(regionQueue.poll());
continue;
}
Bytes.putInt(start, 0, numRegions << 1);
Bytes.putInt(end, 0, (numRegions << 1) + 1);
TableName tableName =
TableName.valueOf("table" + (numTables > 0 ? rand.nextInt(numTables) : i));
TableName tableName = TableName.valueOf("table" +
(numTables > 0 ? ThreadLocalRandom.current().nextInt(numTables) : i));
RegionInfo hri = RegionInfoBuilder.newBuilder(tableName)
.setStartKey(start)
.setEndKey(end)
Expand All @@ -467,15 +465,13 @@ protected List<RegionInfo> randomRegions(int numRegions, int numTables) {
protected List<RegionInfo> uniformRegions(int numRegions) {
List<RegionInfo> regions = new ArrayList<>(numRegions);
byte[] start = new byte[16];
Bytes.random(start);
byte[] end = new byte[16];
Random rand = ThreadLocalRandom.current();
rand.nextBytes(start);
rand.nextBytes(end);
Bytes.random(end);
for (int i = 0; i < numRegions; i++) {
Bytes.putInt(start, 0, numRegions << 1);
Bytes.putInt(end, 0, (numRegions << 1) + 1);
TableName tableName =
TableName.valueOf("table" + i);
TableName tableName = TableName.valueOf("table" + i);
RegionInfo hri = RegionInfoBuilder.newBuilder(tableName)
.setStartKey(start)
.setEndKey(end)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,19 +63,19 @@ public class TestRegionHDFSBlockLocationFinder {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRegionHDFSBlockLocationFinder.class);

private static final Random RNG = new Random(); // This test depends on Random#setSeed
private static TableDescriptor TD;

private static List<RegionInfo> REGIONS;

private RegionHDFSBlockLocationFinder finder;

private static HDFSBlocksDistribution generate(RegionInfo region) {
HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
int seed = region.hashCode();
Random rand = new Random(seed);
int size = 1 + rand.nextInt(10);
RNG.setSeed(seed);
int size = 1 + RNG.nextInt(10);
for (int i = 0; i < size; i++) {
distribution.addHostsAndBlockWeight(new String[] { "host-" + i }, 1 + rand.nextInt(100));
distribution.addHostsAndBlockWeight(new String[] { "host-" + i }, 1 + RNG.nextInt(100));
}
return distribution;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@
package org.apache.hadoop.hbase.client;

import java.util.Arrays;
import java.util.Random;

import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;

Expand All @@ -33,12 +32,12 @@ public final class PerClientRandomNonceGenerator implements NonceGenerator {

private static final PerClientRandomNonceGenerator INST = new PerClientRandomNonceGenerator();

private final Random rdm = new Random();
private final long clientId;

private PerClientRandomNonceGenerator() {
byte[] clientIdBase = ClientIdGenerator.generateClientId();
this.clientId = (((long) Arrays.hashCode(clientIdBase)) << 32) + rdm.nextInt();
this.clientId = (((long) Arrays.hashCode(clientIdBase)) << 32) +
ThreadLocalRandom.current().nextInt();
}

@Override
Expand All @@ -50,7 +49,7 @@ public long getNonceGroup() {
public long newNonce() {
long result = HConstants.NO_NONCE;
do {
result = rdm.nextLong();
result = ThreadLocalRandom.current().nextLong();
} while (result == HConstants.NO_NONCE);
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
package org.apache.hadoop.hbase.filter;

import java.util.Objects;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;

import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
Expand All @@ -35,7 +35,6 @@
*/
@InterfaceAudience.Public
public class RandomRowFilter extends FilterBase {
protected static final Random random = new Random();

protected float chance;
protected boolean filterOutRow;
Expand Down Expand Up @@ -98,7 +97,7 @@ public boolean filterRowKey(Cell firstRowCell) {
filterOutRow = false;
} else {
// roll the dice
filterOutRow = !(random.nextFloat() < chance);
filterOutRow = !(ThreadLocalRandom.current().nextFloat() < chance);
}
return filterOutRow;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import java.io.IOException;
import java.security.Key;
import java.security.KeyException;
import java.security.SecureRandom;
import java.util.Properties;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
Expand All @@ -37,7 +36,6 @@
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
Expand All @@ -50,8 +48,6 @@
public final class EncryptionUtil {
static private final Logger LOG = LoggerFactory.getLogger(EncryptionUtil.class);

static private final SecureRandom RNG = new SecureRandom();

/**
* Private constructor to keep this class from being instantiated.
*/
Expand Down Expand Up @@ -96,7 +92,7 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key)
byte[] iv = null;
if (cipher.getIvLength() > 0) {
iv = new byte[cipher.getIvLength()];
RNG.nextBytes(iv);
Bytes.secureRandom(iv);
builder.setIv(UnsafeByteOperations.unsafeWrap(iv));
}
byte[] keyBytes = key.getEncoded();
Expand Down Expand Up @@ -286,7 +282,7 @@ public static Key unwrapKey(Configuration conf, byte[] keyBytes) throws IOExcept
* @throws IOException if create CryptoAES failed
*/
public static CryptoAES createCryptoAES(RPCProtos.CryptoCipherMeta cryptoCipherMeta,
Configuration conf) throws IOException {
Configuration conf) throws IOException {
Properties properties = new Properties();
// the property for cipher class
properties.setProperty(CryptoCipherFactory.CLASSES_KEY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
Expand All @@ -49,8 +50,6 @@ public class SlowLogTableAccessor {

private static final Logger LOG = LoggerFactory.getLogger(SlowLogTableAccessor.class);

private static final Random RANDOM = new Random();

private static Connection connection;

/**
Expand Down Expand Up @@ -139,7 +138,7 @@ private static byte[] getRowKey(final TooSlowLog.SlowLogPayload slowLogPayload)
String lastFiveDig =
hashcode.substring((hashcode.length() > 5) ? (hashcode.length() - 5) : 0);
if (lastFiveDig.startsWith("-")) {
lastFiveDig = String.valueOf(RANDOM.nextInt(99999));
lastFiveDig = String.valueOf(ThreadLocalRandom.current().nextInt(99999));
}
final long currentTime = EnvironmentEdgeManager.currentTime();
final String timeAndHashcode = currentTime + lastFiveDig;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import java.security.Key;
import java.security.KeyException;
import java.security.SecureRandom;

import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
Expand Down Expand Up @@ -110,7 +110,7 @@ public void testWALKeyWrappingWithIncorrectKey() throws Exception {

// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(keyBytes);
Bytes.secureRandom(keyBytes);
String algorithm = conf.get(HConstants.CRYPTO_WAL_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
Key key = new SecretKeySpec(keyBytes, algorithm);

Expand Down Expand Up @@ -152,7 +152,7 @@ private void testKeyWrapping(String hashAlgorithm) throws Exception {

// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(keyBytes);
Bytes.secureRandom(keyBytes);
String algorithm =
conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
Key key = new SecretKeySpec(keyBytes, algorithm);
Expand Down Expand Up @@ -189,7 +189,7 @@ private void testWALKeyWrapping(String hashAlgorithm) throws Exception {

// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(keyBytes);
Bytes.secureRandom(keyBytes);
String algorithm = conf.get(HConstants.CRYPTO_WAL_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
Key key = new SecretKeySpec(keyBytes, algorithm);

Expand All @@ -214,7 +214,7 @@ private void testKeyWrappingWithMismatchingAlgorithms(Configuration conf) throws

// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
new SecureRandom().nextBytes(keyBytes);
Bytes.secureRandom(keyBytes);
String algorithm =
conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
Key key = new SecretKeySpec(keyBytes, algorithm);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,9 @@
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests;
Expand Down
Loading

0 comments on commit 1047194

Please sign in to comment.