Skip to content

Commit

Permalink
TableAdapter & TableAdapter2x Vaneer adaption (googleapis#2006)
Browse files Browse the repository at this point in the history
* adding TableAdapter and related changes

* fixed typo error on InstanceId

* removed unnecessart TODOs & reverted toColumnFamily

* removed unused TableAdapter#adapt and added @internalapi

* added Unit Tests, refactored varible name & unused methods
  • Loading branch information
rahulKQL committed Nov 29, 2018
1 parent af11981 commit a15fdc0
Show file tree
Hide file tree
Showing 11 changed files with 350 additions and 127 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,11 @@ public InstanceName toGcbInstanceName() {
return InstanceName.of(projectId, instanceId);
}

//TODO(rahulkql): Refactor once google-cloud-java/issues/4091 is resolved.
public com.google.bigtable.admin.v2.InstanceName toAdminInstanceName() {
return com.google.bigtable.admin.v2.InstanceName.of(projectId, instanceId);
}

public BigtableClusterName toClusterName(String clusterId) {
return new BigtableClusterName(instanceName + "/clusters/" + clusterId);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package com.google.cloud.bigtable.grpc;

import com.google.cloud.bigtable.data.v2.models.InstanceName;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
Expand Down Expand Up @@ -97,4 +98,15 @@ public void testNoInstanceName() {
expectedException.expect(IllegalArgumentException.class);
new BigtableInstanceName("project", "");
}

@Test
public void testGcbInstanceName(){
Assert.assertTrue(bigtableInstanceName.toGcbInstanceName() instanceof InstanceName);
}

@Test
public void testAdminInstanceName(){
Assert.assertTrue(bigtableInstanceName.toAdminInstanceName() instanceof
com.google.bigtable.admin.v2.InstanceName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,22 @@
*/
package com.google.cloud.bigtable.hbase.adapters.admin;

import static com.google.cloud.bigtable.admin.v2.models.GCRules.GCRULES;

import com.google.bigtable.admin.v2.ColumnFamily;
import com.google.bigtable.admin.v2.GcRule;
import com.google.bigtable.admin.v2.GcRule.Intersection;
import com.google.bigtable.admin.v2.GcRule.RuleCase;
import com.google.bigtable.admin.v2.GcRule.Union;
import com.google.common.annotations.VisibleForTesting;
import com.google.cloud.bigtable.admin.v2.models.GCRules.GCRule;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.protobuf.Duration;
import org.threeten.bp.Duration;

import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -164,12 +163,12 @@ public static void throwIfRequestingUnsupportedFeatures(HColumnDescriptor column
}

/**
* Construct an Bigtable {@link com.google.bigtable.admin.v2.GcRule} from the given column descriptor.
* Construct an Bigtable {@link GCRule} from the given column descriptor.
*
* @param columnDescriptor a {@link org.apache.hadoop.hbase.HColumnDescriptor} object.
* @return a {@link com.google.bigtable.admin.v2.GcRule} object.
* @return a {@link GCRule} object.
*/
public static GcRule buildGarbageCollectionRule(HColumnDescriptor columnDescriptor) {
public static GCRule buildGarbageCollectionRule(HColumnDescriptor columnDescriptor) {
int maxVersions = columnDescriptor.getMaxVersions();
int minVersions = columnDescriptor.getMinVersions();
int ttlSeconds = columnDescriptor.getTimeToLive();
Expand All @@ -181,12 +180,12 @@ public static GcRule buildGarbageCollectionRule(HColumnDescriptor columnDescript
if (maxVersions == Integer.MAX_VALUE) {
return null;
} else {
return maxVersions(maxVersions);
return GCRULES.maxVersions(maxVersions);
}
}

// minVersions only comes into play with a TTL:
GcRule ageRule = maxAge(ttlSeconds);
GCRule ageRule = GCRULES.maxAge(Duration.ofSeconds(ttlSeconds));
if (minVersions != HColumnDescriptor.DEFAULT_MIN_VERSIONS) {
// The logic here is: only delete a cell if:
// 1) the age is older than :ttlSeconds AND
Expand All @@ -196,43 +195,15 @@ public static GcRule buildGarbageCollectionRule(HColumnDescriptor columnDescript
// Intersection (AND)
// - maxAge = :HBase_ttlSeconds
// - maxVersions = :HBase_minVersion
ageRule = intersection(ageRule, maxVersions(minVersions));
ageRule = GCRULES.intersection().rule(ageRule).rule(GCRULES.maxVersions(minVersions));
}
if (maxVersions == Integer.MAX_VALUE) {
return ageRule;
} else {
return union(ageRule, maxVersions(maxVersions));
return GCRULES.union().rule(ageRule).rule(GCRULES.maxVersions(maxVersions));
}
}

@VisibleForTesting
static GcRule intersection(GcRule... rules) {
return GcRule.newBuilder()
.setIntersection(Intersection.newBuilder().addAllRules(Arrays.asList(rules)).build())
.build();
}

@VisibleForTesting
static GcRule union(GcRule... rules) {
return GcRule.newBuilder()
.setUnion(Union.newBuilder().addAllRules(Arrays.asList(rules)).build())
.build();
}

private static Duration duration(int ttlSeconds) {
return Duration.newBuilder().setSeconds(ttlSeconds).build();
}

@VisibleForTesting
static GcRule maxAge(int ttlSeconds) {
return GcRule.newBuilder().setMaxAge(duration(ttlSeconds)).build();
}

@VisibleForTesting
static GcRule maxVersions(int maxVersions) {
return GcRule.newBuilder().setMaxNumVersions(maxVersions).build();
}

/**
* <p>
* Parse a Bigtable {@link GcRule} that is in line with
Expand Down Expand Up @@ -338,9 +309,9 @@ public ColumnFamily adapt(HColumnDescriptor columnDescriptor) {
throwIfRequestingUnsupportedFeatures(columnDescriptor);

ColumnFamily.Builder resultBuilder = ColumnFamily.newBuilder();
GcRule gcRule = buildGarbageCollectionRule(columnDescriptor);
GCRule gcRule = buildGarbageCollectionRule(columnDescriptor);
if (gcRule != null) {
resultBuilder.setGcRule(gcRule);
resultBuilder.setGcRule(gcRule.toProto());
}
return resultBuilder.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,11 @@
*/
package com.google.cloud.bigtable.hbase.adapters.admin;

import static com.google.cloud.bigtable.hbase.adapters.admin.ColumnDescriptorAdapter.buildGarbageCollectionRule;

import com.google.api.core.InternalApi;
import com.google.bigtable.admin.v2.ColumnFamily;
import com.google.bigtable.admin.v2.CreateTableRequest;
import com.google.cloud.bigtable.admin.v2.models.CreateTableRequest;
import com.google.bigtable.admin.v2.Table;
import com.google.cloud.bigtable.grpc.BigtableInstanceName;

Expand All @@ -26,8 +29,6 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;

import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

/**
Expand All @@ -36,42 +37,40 @@
* @author sduskis
* @version $Id: $Id
*/
@InternalApi
public class TableAdapter {
private static final ColumnDescriptorAdapter columnDescriptorAdapter =
ColumnDescriptorAdapter.INSTANCE;
protected final BigtableInstanceName bigtableInstanceName;



/**
* <p>adapt.</p>
*
* @param desc a {@link org.apache.hadoop.hbase.HTableDescriptor} object.
* @return a {@link com.google.bigtable.admin.v2.Table} object.
* This method adapts ColumnFamily to CreateTableRequest.
*
* @param desc a {@link HTableDescriptor} object.
* @param request a {@link CreateTableRequest}
*/
protected static Table adapt(HTableDescriptor desc) {
Map<String, ColumnFamily> columnFamilies = new HashMap<>();
for (HColumnDescriptor column : desc.getColumnFamilies()) {
String columnName = column.getNameAsString();
ColumnFamily columnFamily = columnDescriptorAdapter.adapt(column);
columnFamilies.put(columnName, columnFamily);
protected static void adapt(HTableDescriptor desc, CreateTableRequest request) {
if(request != null) {
for (HColumnDescriptor column : desc.getColumnFamilies()) {
String columnName = column.getNameAsString();
request.addFamily(columnName, buildGarbageCollectionRule(column));
}
}
return Table.newBuilder().putAllColumnFamilies(columnFamilies).build();
}

public static CreateTableRequest.Builder adapt(HTableDescriptor desc, byte[][] splitKeys) {
CreateTableRequest.Builder builder = CreateTableRequest.newBuilder();
builder.setTableId(desc.getTableName().getQualifierAsString());
builder.setTable(adapt(desc));
addSplitKeys(builder, splitKeys);
return builder;
public static CreateTableRequest adapt(HTableDescriptor desc, byte[][] splitKeys) {
CreateTableRequest request = CreateTableRequest.of(desc.getTableName().getNameAsString());
adapt(desc, request);
addSplitKeys(splitKeys, request);
return request;
}

public static void addSplitKeys(CreateTableRequest.Builder builder, byte[][] splitKeys) {
public static void addSplitKeys(byte[][] splitKeys, CreateTableRequest request) {
if (splitKeys != null) {
for (byte[] splitKey : splitKeys) {
builder.addInitialSplits(
CreateTableRequest.Split.newBuilder().setKey(ByteString.copyFrom(splitKey)).build());
request.addSplit(ByteString.copyFrom(splitKey));
}
}
}
Expand All @@ -89,8 +88,8 @@ public TableAdapter(BigtableInstanceName bigtableInstanceName) {
/**
* <p>adapt.</p>
*
* @param table a {@link com.google.bigtable.admin.v2.Table} object.
* @return a {@link org.apache.hadoop.hbase.HTableDescriptor} object.
* @param table a {@link Table} object.
* @return a {@link HTableDescriptor} object.
*/
public HTableDescriptor adapt(Table table) {
String tableId = bigtableInstanceName.toTableId(table.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,13 +354,14 @@ public static byte[][] createSplitKeys(byte[] startKey, byte[] endKey, int numRe
/** {@inheritDoc} */
@Override
public void createTable(HTableDescriptor desc, byte[][] splitKeys) throws IOException {
createTable(desc.getTableName(), TableAdapter.adapt(desc, splitKeys));
createTable(desc.getTableName(), TableAdapter.adapt(desc, splitKeys)
.toProto(bigtableInstanceName.toAdminInstanceName()));
}

protected void createTable(TableName tableName, CreateTableRequest.Builder builder) throws IOException {
builder.setParent(bigtableInstanceName.toString());
//TODO(rahulkql):update methods to adapt to v2.models.CreateTableRequest
protected void createTable(TableName tableName, CreateTableRequest request) throws IOException {
try {
bigtableTableAdminClient.createTable(builder.build());
bigtableTableAdminClient.createTable(request);
} catch (Throwable throwable) {
throw convertToTableExistsException(tableName, throwable);
}
Expand All @@ -370,14 +371,15 @@ protected void createTable(TableName tableName, CreateTableRequest.Builder build
@Override
public void createTableAsync(final HTableDescriptor desc, byte[][] splitKeys) throws IOException {
LOG.warn("Creating the table synchronously");
CreateTableRequest.Builder builder = TableAdapter.adapt(desc, splitKeys);
createTableAsync(builder, desc.getTableName());
CreateTableRequest request = TableAdapter.adapt(desc, splitKeys)
.toProto(bigtableInstanceName.toAdminInstanceName());
createTableAsync(request, desc.getTableName());
}

protected ListenableFuture<Table> createTableAsync(CreateTableRequest.Builder builder,
//TODO(rahulkql):update methods to adapt to v2.models.CreateTableRequest
protected ListenableFuture<Table> createTableAsync(CreateTableRequest request,
final TableName tableName) throws IOException {
builder.setParent(bigtableInstanceName.toString());
ListenableFuture<Table> future = bigtableTableAdminClient.createTableAsync(builder.build());
ListenableFuture<Table> future = bigtableTableAdminClient.createTableAsync(request);
final SettableFuture<Table> settableFuture = SettableFuture.create();
Futures.addCallback(future, new FutureCallback<Table>() {
@Override public void onSuccess(@Nullable Table result) {
Expand Down
Loading

0 comments on commit a15fdc0

Please sign in to comment.