From 56b81b42d8219309d6fe4472a336cde71ac53357 Mon Sep 17 00:00:00 2001 From: Xiaolin Ha Date: Tue, 24 May 2022 15:51:52 +0800 Subject: [PATCH] HBASE-26342 Support custom paths of independent configuration and pool for hfile cleaner (#4403) (#4461) Signed-off-by: Andrew Purtell --- .../apache/hadoop/hbase/master/HMaster.java | 99 ++++++++++++--- .../hbase/master/MasterRpcServices.java | 5 +- .../hbase/master/cleaner/CleanerChore.java | 53 +++++++- .../hbase/master/cleaner/DirScanPool.java | 11 +- .../hbase/master/cleaner/HFileCleaner.java | 23 +++- .../hbase/master/cleaner/LogCleaner.java | 2 +- .../master/TestMasterChoreScheduled.java | 7 +- .../cleaner/TestCleanerClearHFiles.java | 120 ++++++++++++++++++ 8 files changed, 287 insertions(+), 33 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerClearHFiles.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 021625a29f3c..ee6cb28b0485 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -20,6 +20,7 @@ import static org.apache.hadoop.hbase.HConstants.DEFAULT_HBASE_SPLIT_COORDINATED_BY_ZK; import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS; import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK; +import static org.apache.hadoop.hbase.master.cleaner.HFileCleaner.CUSTOM_POOL_SIZE; import static org.apache.hadoop.hbase.util.DNS.MASTER_HOSTNAME_KEY; import com.google.errorprone.annotations.RestrictedApi; @@ -39,6 +40,7 @@ import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -77,6 +79,7 @@ import org.apache.hadoop.hbase.PleaseRestartMasterException; import org.apache.hadoop.hbase.RegionMetrics; import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; +import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.ServerMetrics; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerTask; @@ -359,12 +362,18 @@ public class HMaster extends HRegionServer implements MasterServices { private HbckChore hbckChore; CatalogJanitor catalogJanitorChore; - // Threadpool for scanning the archive directory, used by the HFileCleaner - private DirScanPool hfileCleanerPool; // Threadpool for scanning the Old logs directory, used by the LogCleaner private DirScanPool logCleanerPool; private LogCleaner logCleaner; - private HFileCleaner hfileCleaner; + // HFile cleaners for the custom hfile archive paths and the default archive path + // The archive path cleaner is the first element + private List hfileCleaners = new ArrayList<>(); + // The hfile cleaner paths, including custom paths and the default archive path + private List hfileCleanerPaths = new ArrayList<>(); + // The shared hfile cleaner pool for the custom archive paths + private DirScanPool sharedHFileCleanerPool; + // The exclusive hfile cleaner pool for scanning the archive directory + private DirScanPool exclusiveHFileCleanerPool; private ReplicationBarrierCleaner replicationBarrierCleaner; private ExpiredMobFileCleanerChore expiredMobFileCleanerChore; private MobCompactionChore mobCompactChore; @@ -1157,11 +1166,18 @@ private void finishActiveMasterInitialization(MonitoredTask status) (EnvironmentEdgeManager.currentTime() - masterActiveTime) / 1000.0f)); this.masterFinishedInitializationTime = EnvironmentEdgeManager.currentTime(); configurationManager.registerObserver(this.balancer); - configurationManager.registerObserver(this.hfileCleanerPool); configurationManager.registerObserver(this.logCleanerPool); - configurationManager.registerObserver(this.hfileCleaner); configurationManager.registerObserver(this.logCleaner); configurationManager.registerObserver(this.regionsRecoveryConfigManager); + configurationManager.registerObserver(this.exclusiveHFileCleanerPool); + if (this.sharedHFileCleanerPool != null) { + configurationManager.registerObserver(this.sharedHFileCleanerPool); + } + if (this.hfileCleaners != null) { + for (HFileCleaner cleaner : hfileCleaners) { + configurationManager.registerObserver(cleaner); + } + } // Set master as 'initialized'. setInitialized(true); @@ -1434,8 +1450,8 @@ public boolean isCatalogJanitorEnabled() { boolean isCleanerChoreEnabled() { boolean hfileCleanerFlag = true, logCleanerFlag = true; - if (hfileCleaner != null) { - hfileCleanerFlag = hfileCleaner.getEnabled(); + if (getHFileCleaner() != null) { + hfileCleanerFlag = getHFileCleaner().getEnabled(); } if (logCleaner != null) { @@ -1534,13 +1550,47 @@ executorService.new ExecutorConfig().setExecutorType(ExecutorType.MASTER_MERGE_O getMasterWalManager().getOldLogDir(), logCleanerPool, params); getChoreService().scheduleChore(logCleaner); - // start the hfile archive cleaner thread Path archiveDir = HFileArchiveUtil.getArchivePath(conf); - // Create archive cleaner thread pool - hfileCleanerPool = DirScanPool.getHFileCleanerScanPool(conf); - this.hfileCleaner = new HFileCleaner(cleanerInterval, this, conf, - getMasterFileSystem().getFileSystem(), archiveDir, hfileCleanerPool, params); - getChoreService().scheduleChore(hfileCleaner); + + // Create custom archive hfile cleaners + String[] paths = conf.getStrings(HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS); + // todo: handle the overlap issues for the custom paths + + if (paths != null && paths.length > 0) { + if (conf.getStrings(HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS_PLUGINS) == null) { + Set cleanerClasses = new HashSet<>(); + String[] cleaners = conf.getStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS); + if (cleaners != null) { + Collections.addAll(cleanerClasses, cleaners); + } + conf.setStrings(HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS_PLUGINS, + cleanerClasses.toArray(new String[cleanerClasses.size()])); + LOG.info("Archive custom cleaner paths: {}, plugins: {}", Arrays.asList(paths), + cleanerClasses); + } + // share the hfile cleaner pool in custom paths + sharedHFileCleanerPool = DirScanPool.getHFileCleanerScanPool(conf.get(CUSTOM_POOL_SIZE, "6")); + for (int i = 0; i < paths.length; i++) { + Path path = new Path(paths[i].trim()); + HFileCleaner cleaner = + new HFileCleaner("ArchiveCustomHFileCleaner-" + path.getName(), cleanerInterval, this, + conf, getMasterFileSystem().getFileSystem(), new Path(archiveDir, path), + HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS_PLUGINS, sharedHFileCleanerPool, params, null); + hfileCleaners.add(cleaner); + hfileCleanerPaths.add(path); + } + } + + // Create the whole archive dir cleaner thread pool + exclusiveHFileCleanerPool = DirScanPool.getHFileCleanerScanPool(conf); + hfileCleaners.add(0, + new HFileCleaner(cleanerInterval, this, conf, getMasterFileSystem().getFileSystem(), + archiveDir, exclusiveHFileCleanerPool, params, hfileCleanerPaths)); + hfileCleanerPaths.add(0, archiveDir); + // Schedule all the hfile cleaners + for (HFileCleaner hFileCleaner : hfileCleaners) { + getChoreService().scheduleChore(hFileCleaner); + } // Regions Reopen based on very high storeFileRefCount is considered enabled // only if hbase.regions.recovery.store.file.ref.count has value > 0 @@ -1592,14 +1642,18 @@ protected void stopServiceThreads() { this.mobCompactThread.close(); } super.stopServiceThreads(); - if (hfileCleanerPool != null) { - hfileCleanerPool.shutdownNow(); - hfileCleanerPool = null; + if (exclusiveHFileCleanerPool != null) { + exclusiveHFileCleanerPool.shutdownNow(); + exclusiveHFileCleanerPool = null; } if (logCleanerPool != null) { logCleanerPool.shutdownNow(); logCleanerPool = null; } + if (sharedHFileCleanerPool != null) { + sharedHFileCleanerPool.shutdownNow(); + sharedHFileCleanerPool = null; + } LOG.debug("Stopping service threads"); @@ -1733,7 +1787,12 @@ private void stopChores() { shutdownChore(clusterStatusPublisherChore); shutdownChore(snapshotQuotaChore); shutdownChore(logCleaner); - shutdownChore(hfileCleaner); + if (hfileCleaners != null) { + for (ScheduledChore chore : hfileCleaners) { + chore.shutdown(); + } + hfileCleaners = null; + } shutdownChore(replicationBarrierCleaner); shutdownChore(snapshotCleanerChore); shutdownChore(hbckChore); @@ -3223,7 +3282,11 @@ public static void main(String[] args) { } public HFileCleaner getHFileCleaner() { - return this.hfileCleaner; + return this.hfileCleaners.get(0); + } + + public List getHFileCleaners() { + return this.hfileCleaners; } public LogCleaner getLogCleaner() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 82a8ae0917b1..7eccac6416a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -75,6 +75,7 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.master.assignment.RegionStateNode; import org.apache.hadoop.hbase.master.assignment.RegionStates; +import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.master.janitor.MetaFixer; import org.apache.hadoop.hbase.master.locking.LockProcedure; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; @@ -801,7 +802,9 @@ public SetCleanerChoreRunningResponse setCleanerChoreRunning(RpcController c, boolean prevValue = master.getLogCleaner().getEnabled() && master.getHFileCleaner().getEnabled(); master.getLogCleaner().setEnabled(req.getOn()); - master.getHFileCleaner().setEnabled(req.getOn()); + for (HFileCleaner hFileCleaner : master.getHFileCleaners()) { + hFileCleaner.setEnabled(req.getOn()); + } return SetCleanerChoreRunningResponse.newBuilder().setPrevValue(prevValue).build(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 80908e1e0509..801d7939a789 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.master.cleaner; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -79,10 +80,11 @@ public abstract class CleanerChore extends Schedu protected final Map params; private final AtomicBoolean enabled = new AtomicBoolean(true); protected List cleanersChain; + protected List excludeDirs; public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf, FileSystem fs, Path oldFileDir, String confKey, DirScanPool pool) { - this(name, sleepPeriod, s, conf, fs, oldFileDir, confKey, pool, null); + this(name, sleepPeriod, s, conf, fs, oldFileDir, confKey, pool, null, null); } /** @@ -97,7 +99,8 @@ public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Confi * @param params members could be used in cleaner */ public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf, - FileSystem fs, Path oldFileDir, String confKey, DirScanPool pool, Map params) { + FileSystem fs, Path oldFileDir, String confKey, DirScanPool pool, Map params, + List excludePaths) { super(name, s, sleepPeriod); Preconditions.checkNotNull(pool, "Chore's pool can not be null"); @@ -106,6 +109,19 @@ public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Confi this.oldFileDir = oldFileDir; this.conf = conf; this.params = params; + if (excludePaths != null && !excludePaths.isEmpty()) { + excludeDirs = new ArrayList<>(excludePaths.size()); + for (Path path : excludePaths) { + StringBuilder dirPart = new StringBuilder(path.toString()); + if (!path.toString().endsWith("/")) { + dirPart.append("/"); + } + excludeDirs.add(dirPart.toString()); + } + } + if (excludeDirs != null) { + LOG.info("Cleaner {} excludes sub dirs: {}", name, excludeDirs); + } initCleanerChain(confKey); } @@ -419,9 +435,11 @@ private void traverseAndDelete(Path dir, boolean root, CompletableFuture { - CompletableFuture subFuture = new CompletableFuture<>(); - pool.execute(() -> traverseAndDelete(subDir.getPath(), false, subFuture)); - futures.add(subFuture); + if (!shouldExclude(subDir)) { + CompletableFuture subFuture = new CompletableFuture<>(); + pool.execute(() -> traverseAndDelete(subDir.getPath(), false, subFuture)); + futures.add(subFuture); + } }); } @@ -451,11 +469,34 @@ private void traverseAndDelete(Path dir, boolean root, CompletableFuture largeFileQueue; @@ -117,8 +128,13 @@ public HFileCleaner(final int period, final Stoppable stopper, Configuration con public HFileCleaner(final int period, final Stoppable stopper, Configuration conf, FileSystem fs, Path directory, DirScanPool pool, Map params) { this("HFileCleaner", period, stopper, conf, fs, directory, MASTER_HFILE_CLEANER_PLUGINS, pool, - params); + params, null); + } + public HFileCleaner(final int period, final Stoppable stopper, Configuration conf, FileSystem fs, + Path directory, DirScanPool pool, Map params, List excludePaths) { + this("HFileCleaner", period, stopper, conf, fs, directory, MASTER_HFILE_CLEANER_PLUGINS, pool, + params, excludePaths); } /** @@ -134,8 +150,9 @@ public HFileCleaner(final int period, final Stoppable stopper, Configuration con * @param params params could be used in subclass of BaseHFileCleanerDelegate */ public HFileCleaner(String name, int period, Stoppable stopper, Configuration conf, FileSystem fs, - Path directory, String confKey, DirScanPool pool, Map params) { - super(name, period, stopper, conf, fs, directory, confKey, pool, params); + Path directory, String confKey, DirScanPool pool, Map params, + List excludePaths) { + super(name, period, stopper, conf, fs, directory, confKey, pool, params, excludePaths); throttlePoint = conf.getInt(HFILE_DELETE_THROTTLE_THRESHOLD, DEFAULT_HFILE_DELETE_THROTTLE_THRESHOLD); largeQueueInitSize = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java index 90e498583c0c..ac0a98801c15 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java @@ -75,7 +75,7 @@ public class LogCleaner extends CleanerChore public LogCleaner(final int period, final Stoppable stopper, Configuration conf, FileSystem fs, Path oldLogDir, DirScanPool pool, Map params) { super("LogsCleaner", period, stopper, conf, fs, oldLogDir, HBASE_MASTER_LOGCLEANER_PLUGINS, - pool, params); + pool, params, null); this.pendingDelete = new LinkedBlockingQueue<>(); int size = conf.getInt(OLD_WALS_CLEANER_THREAD_SIZE, DEFAULT_OLD_WALS_CLEANER_THREAD_SIZE); this.oldWALsCleaner = createOldWalsCleaner(size); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterChoreScheduled.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterChoreScheduled.java index 9b84bb729aec..f2567bbba2fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterChoreScheduled.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterChoreScheduled.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.master; import java.lang.reflect.Field; +import java.util.ArrayList; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.ScheduledChore; @@ -63,7 +64,7 @@ public static void tearDown() throws Exception { } @Test - public void testDefaultScheduledChores() { + public void testDefaultScheduledChores() throws Exception { // test if logCleaner chore is scheduled by default in HMaster init TestChoreField logCleanerTestChoreField = new TestChoreField<>(); LogCleaner logCleaner = logCleanerTestChoreField.getChoreObj("logCleaner"); @@ -71,7 +72,9 @@ public void testDefaultScheduledChores() { // test if hfileCleaner chore is scheduled by default in HMaster init TestChoreField hFileCleanerTestChoreField = new TestChoreField<>(); - HFileCleaner hFileCleaner = hFileCleanerTestChoreField.getChoreObj("hfileCleaner"); + Field masterField = HMaster.class.getDeclaredField("hfileCleaners"); + masterField.setAccessible(true); + HFileCleaner hFileCleaner = ((ArrayList) masterField.get(hMaster)).get(0); hFileCleanerTestChoreField.testIfChoreScheduled(hFileCleaner); // test if replicationBarrierCleaner chore is scheduled by default in HMaster init diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerClearHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerClearHFiles.java new file mode 100644 index 000000000000..58117516258c --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerClearHFiles.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master.cleaner; + +import static org.apache.hadoop.hbase.master.HMaster.HBASE_MASTER_CLEANER_INTERVAL; +import static org.apache.hadoop.hbase.master.cleaner.HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS_PLUGINS; + +import java.io.FileNotFoundException; +import java.io.IOException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.HFileArchiveUtil; +import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +@Category(LargeTests.class) +public class TestCleanerClearHFiles { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestCleanerClearHFiles.class); + + @Rule + public TestName name = new TestName(); + + private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static Configuration conf = TEST_UTIL.getConfiguration(); + private static Admin admin = null; + + private static final byte[] COLUMN_FAMILY = Bytes.toBytes("CF"); + + private static final String TABLE1 = "table1"; + private static final String TABLE2 = "table2"; + private static final String DEFAULT_ARCHIVE_SUBDIRS_PREFIX = "data/default/"; + + @BeforeClass + public static void setupBeforeClass() throws Exception { + conf.setStrings(HFileCleaner.HFILE_CLEANER_CUSTOM_PATHS, + DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE1); + conf.setStrings(HFILE_CLEANER_CUSTOM_PATHS_PLUGINS, HFileLinkCleaner.class.getName()); + + conf.setInt(TimeToLiveHFileCleaner.TTL_CONF_KEY, 10); + conf.setInt(HBASE_MASTER_CLEANER_INTERVAL, 20000); + + TEST_UTIL.startMiniCluster(); + admin = TEST_UTIL.getAdmin(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testClearArchive() throws Exception { + DistributedFileSystem fs = TEST_UTIL.getDFSCluster().getFileSystem(); + Table table1 = createTable(TEST_UTIL, TableName.valueOf(TABLE1)); + Table table2 = createTable(TEST_UTIL, TableName.valueOf(TABLE2)); + + admin.disableTable(table1.getName()); + admin.deleteTable(table1.getName()); + admin.disableTable(table2.getName()); + admin.deleteTable(table2.getName()); + + Path archiveDir = HFileArchiveUtil.getArchivePath(conf); + Path archiveTable1Path = new Path(archiveDir, DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE1); + Path archiveTable2Path = new Path(archiveDir, DEFAULT_ARCHIVE_SUBDIRS_PREFIX + TABLE2); + + TEST_UTIL.waitFor(10000, () -> !notExistOrEmptyDir(archiveTable1Path, fs) + && !notExistOrEmptyDir(archiveTable2Path, fs)); + + TEST_UTIL.waitFor(30000, + () -> notExistOrEmptyDir(archiveTable1Path, fs) && notExistOrEmptyDir(archiveTable2Path, fs)); + } + + private boolean notExistOrEmptyDir(Path dir, DistributedFileSystem fs) { + try { + return fs.listStatus(dir).length == 0; + } catch (Exception e) { + return e instanceof FileNotFoundException; + } + } + + private Table createTable(HBaseTestingUtility util, TableName tableName) throws IOException { + TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(COLUMN_FAMILY).build()).build(); + return util.createTable(td, null); + } +}