Skip to content

Commit

Permalink
Run code formatter
Browse files Browse the repository at this point in the history
  • Loading branch information
electrum committed Jun 3, 2020
1 parent 8704e5a commit 67368f6
Show file tree
Hide file tree
Showing 110 changed files with 484 additions and 458 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ public static synchronized DistributedQueryRunner createAccumuloQueryRunner(Map<
throws Exception
{
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(createSession())
.setNodeCount(4)
.setExtraProperties(extraProperties)
.build();
.setNodeCount(4)
.setExtraProperties(extraProperties)
.build();

queryRunner.installPlugin(new TpchPlugin());
queryRunner.createCatalog("tpch", "tpch");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ public class DecimalConfig
private int decimalDefaultScale;
private RoundingMode decimalRoundingMode = UNNECESSARY;

public enum DecimalMapping {
public enum DecimalMapping
{
STRICT,
ALLOW_OVERFLOW,
/**/;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,9 @@ private Optional<CassandraColumnHandle> buildColumnHandle(AbstractTableMetadata
/**
* Get the list of partitions matching the given filters on partition keys.
*
* @param table the table to get partitions from
* @param table the table to get partitions from
* @param filterPrefixes the list of possible values for each partition key.
* Order of values should match {@link CassandraTable#getPartitionKeyColumns()}
* Order of values should match {@link CassandraTable#getPartitionKeyColumns()}
* @return list of {@link CassandraPartition}
*/
public List<CassandraPartition> getPartitions(CassandraTable table, List<Set<Object>> filterPrefixes)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ protected String indexEndpoint(String index, String docId)
protected String indexMapping(String properties)
{
return "{\"mappings\": " +
" {\"doc\": " + properties + "}" +
"}";
" {\"doc\": " + properties + "}" +
"}";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
* A set of functions to convert between geometries and encoded polylines.
*
* @see <a href="https://developers.google.com/maps/documentation/utilities/polylinealgorithm">
* https://developers.google.com/maps/documentation/utilities/polylinealgorithm</a> for a description of encoded polylines.
* https://developers.google.com/maps/documentation/utilities/polylinealgorithm</a> for a description of encoded polylines.
*/
public final class EncodedPolylineFunctions
{
Expand Down Expand Up @@ -75,7 +75,8 @@ private static OGCLineString decodePolyline(String polyline)
bytes = polyline.charAt(index++) - 63 - 1;
result += bytes << shift;
shift += 5;
} while (bytes >= 0x1f);
}
while (bytes >= 0x1f);
latitude += (result & 1) != 0 ? ~(result >> 1) : (result >> 1);

result = 1;
Expand All @@ -84,7 +85,8 @@ private static OGCLineString decodePolyline(String polyline)
bytes = polyline.charAt(index++) - 63 - 1;
result += bytes << shift;
shift += 5;
} while (bytes >= 0x1f);
}
while (bytes >= 0x1f);
longitude += (result & 1) != 0 ? ~(result >> 1) : (result >> 1);

if (isFirstPoint) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,8 @@ public static void main(String[] args)
}

@Test
public static void verify() throws IOException
public static void verify()
throws IOException
{
BenchmarkData data = new BenchmarkData();
data.setup();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ public SheetsClient(SheetsConfig config, JsonCodec<Map<String, List<SheetsTable>
long maxCacheSize = config.getSheetsDataMaxCacheSize();

this.tableSheetMappingCache = newCacheBuilder(expiresAfterWriteMillis, maxCacheSize)
.build(new CacheLoader<String, Optional<String>>() {
.build(new CacheLoader<String, Optional<String>>()
{
@Override
public Optional<String> load(String tableName)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ public Optional<ReaderRecordCursorWithProjections> createRecordCursor(
length,
schema,
projectedReaderColumns
.map(ReaderProjections::getReaderColumns)
.orElse(columns));
.map(ReaderProjections::getReaderColumns)
.orElse(columns));

return new GenericHiveRecordCursor<>(
configuration,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ private static void fillSupportedProjectedColumns(ConnectorExpression expression
static boolean isPushDownSupported(ConnectorExpression expression)
{
return expression instanceof Variable ||
(expression instanceof FieldDereference && isPushDownSupported(((FieldDereference) expression).getTarget()));
(expression instanceof FieldDereference && isPushDownSupported(((FieldDereference) expression).getTarget()));
}

public static ProjectedColumnRepresentation createProjectedColumnRepresentation(ConnectorExpression expression)
Expand Down Expand Up @@ -130,9 +130,9 @@ public static Optional<String> find(Map<String, ColumnHandle> assignments, Proje
HiveColumnHandle column = (HiveColumnHandle) entry.getValue();
if (column.getBaseColumnName().equals(baseColumnName) &&
column.getHiveColumnProjectionInfo()
.map(HiveColumnProjectionInfo::getDereferenceIndices)
.orElse(ImmutableList.of())
.equals(projectionIndices)) {
.map(HiveColumnProjectionInfo::getDereferenceIndices)
.orElse(ImmutableList.of())
.equals(projectionIndices)) {
return Optional.of(entry.getKey());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ Optional<ReaderPageSourceWithProjections> createPageSource(
* A wrapper class for
* - delegate reader page source and
* - projection information for columns to be returned by the delegate
*
* <p>
* Empty {@param projectedReaderColumns} indicates that the delegate page source reads the exact same columns provided to
* it in {@link HivePageSourceFactory#createPageSource}
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ Optional<ReaderRecordCursorWithProjections> createRecordCursor(
* A wrapper class for
* - delegate reader record cursor and
* - projection information for columns to be returned by the delegate
*
* <p>
* Empty {@param projectedReaderColumns} indicates that the delegate cursor reads the exact same columns provided to
* it in {@link HiveRecordCursorProvider#createRecordCursor}
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -339,8 +339,8 @@ public Optional<List<String>> getPartitionNames(HiveIdentity identity, String da
* contained which the {@code parts} argument
*
* @param databaseName the name of the database
* @param tableName the name of the table
* @param parts list of values which returned partitions should contain
* @param tableName the name of the table
* @param parts list of values which returned partitions should contain
* @return optionally, a list of strings where each entry is in the form of {key}={value}
*/
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
* Module for an Alluxio metastore implementation of the {@link HiveMetastore} interface.
*/
public class AlluxioMetastoreModule
extends AbstractConfigurationAwareModule
extends AbstractConfigurationAwareModule
{
@Override
protected void setup(Binder binder)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ static Optional<HiveBucketProperty> fromProto(alluxio.grpc.table.layout.hive.Hiv
.map(ProtoUtils::fromProto)
.collect(toImmutableList());
return Optional.of(new HiveBucketProperty(property.getBucketedByList(), HiveBucketing.BucketingVersion.BUCKETING_V1,
(int) property.getBucketCount(), sortedBy));
(int) property.getBucketCount(), sortedBy));
}

static StorageFormat fromProto(alluxio.grpc.table.layout.hive.StorageFormat format)
Expand Down Expand Up @@ -273,7 +273,7 @@ public static Partition fromProto(alluxio.grpc.table.layout.hive.PartitionInfo i
.setStorageFormat(fromProto(info.getStorage().getStorageFormat()))
.setLocation(info.getStorage().getLocation())
.setBucketProperty(info.getStorage().hasBucketProperty()
? fromProto(info.getStorage().getBucketProperty()) : Optional.empty())
? fromProto(info.getStorage().getBucketProperty()) : Optional.empty())
.setSerdeParameters(info.getStorage().getStorageFormat().getSerdelibParametersMap());

return builder.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ public CounterStat getTotalFailures()

public interface ThrowingCallable<V, E extends Exception>
{
V call() throws E;
V call()
throws E;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@

import static java.util.Objects.requireNonNull;

public enum PrestoS3StorageClass {
public enum PrestoS3StorageClass
{
STANDARD(StorageClass.Standard),
INTELLIGENT_TIERING(StorageClass.IntelligentTiering);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ private Set<RoleGrant> getRoleGrantsByGrantees(Set<String> grantees, OptionalLon
ImmutableSet.Builder<RoleGrant> roleGrants = ImmutableSet.builder();
int count = 0;
for (String grantee : grantees) {
for (PrincipalType type : new PrincipalType[]{USER, ROLE}) {
for (PrincipalType type : new PrincipalType[] {USER, ROLE}) {
if (limit.isPresent() && count >= limit.getAsLong()) {
return roleGrants.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -678,7 +678,7 @@ public void testRCTextProjectedColumns(int rowCount)
// TODO: This is a bug in the RC text reader
// RC file does not support complex type as key of a map
return !testColumn.getName().equals("t_struct_null")
&& !testColumn.getName().equals("t_map_null_key_complex_key_value");
&& !testColumn.getName().equals("t_map_null_key_complex_key_value");
})
.collect(toImmutableList());

Expand Down Expand Up @@ -747,10 +747,10 @@ public void testRCBinaryProjectedColumns(int rowCount)
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();

assertThatFileFormat(RCBINARY)
.withWriteColumns(writeColumns)
.withReadColumns(readColumns)
.withRowsCount(rowCount)
.isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
.withWriteColumns(writeColumns)
.withReadColumns(readColumns)
.withRowsCount(rowCount)
.isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}

@Test(dataProvider = "rowCount")
Expand Down Expand Up @@ -827,7 +827,7 @@ private void testRecordPageSource(
List<TestColumn> testReadColumns,
ConnectorSession session,
int rowCount)
throws Exception
throws Exception
{
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat());
Expand Down Expand Up @@ -887,12 +887,12 @@ private ConnectorPageSource createPageSourceFromCursorProvider(
splitProperties.setProperty(
"columns",
splitPropertiesColumnNames.build().stream()
.collect(Collectors.joining(",")));
.collect(Collectors.joining(",")));

splitProperties.setProperty(
"columns.types",
splitPropertiesColumnTypes.build().stream()
.collect(Collectors.joining(",")));
.collect(Collectors.joining(",")));

List<HivePartitionKey> partitionKeys = testReadColumns.stream()
.filter(TestColumn::isPartitionKey)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -753,11 +753,11 @@ public void testShowCreateSchema()
assertUpdate(admin, "CREATE SCHEMA test_show_create_schema");

String createSchemaSql = format("" +
"CREATE SCHEMA %s.test_show_create_schema\n" +
"AUTHORIZATION USER hive\n" +
"WITH \\(\n" +
" location = '.*test_show_create_schema'\n" +
"\\)",
"CREATE SCHEMA %s.test_show_create_schema\n" +
"AUTHORIZATION USER hive\n" +
"WITH \\(\n" +
" location = '.*test_show_create_schema'\n" +
"\\)",
getSession().getCatalog().get());

String actualResult = getOnlyElement(computeActual(admin, "SHOW CREATE SCHEMA test_show_create_schema").getOnlyColumnAsSet()).toString();
Expand All @@ -768,11 +768,11 @@ public void testShowCreateSchema()
assertUpdate(admin, "ALTER SCHEMA test_show_create_schema SET AUTHORIZATION ROLE test_show_create_schema_role");

createSchemaSql = format("" +
"CREATE SCHEMA %s.test_show_create_schema\n" +
"AUTHORIZATION ROLE test_show_create_schema_role\n" +
"WITH \\(\n" +
" location = '.*test_show_create_schema'\n" +
"\\)",
"CREATE SCHEMA %s.test_show_create_schema\n" +
"AUTHORIZATION ROLE test_show_create_schema_role\n" +
"WITH \\(\n" +
" location = '.*test_show_create_schema'\n" +
"\\)",
getSession().getCatalog().get());

actualResult = getOnlyElement(computeActual(admin, "SHOW CREATE SCHEMA test_show_create_schema").getOnlyColumnAsSet()).toString();
Expand Down Expand Up @@ -3046,11 +3046,11 @@ private void testRowsWithNulls(Session session, HiveStorageFormat format)
session,
format("SELECT col0, col1.f0, col2.f1.f1 FROM %s", tableName),
"SELECT * FROM \n" +
" (SELECT 1, 2, 6) UNION\n" +
" (SELECT 7, 8, NULL) UNION\n" +
" (SELECT NULL, NULL, NULL) UNION\n" +
" (SELECT 13, NULL, NULL) UNION\n" +
" (SELECT 15, 16, 18)");
" (SELECT 1, 2, 6) UNION\n" +
" (SELECT 7, 8, NULL) UNION\n" +
" (SELECT NULL, NULL, NULL) UNION\n" +
" (SELECT 13, NULL, NULL) UNION\n" +
" (SELECT 15, 16, 18)");

assertQuery(session, format("SELECT col0 FROM %s WHERE col2.f1.f1 IS NOT NULL", tableName), "SELECT * FROM UNNEST(array[1, 15])");

Expand Down
Loading

0 comments on commit 67368f6

Please sign in to comment.