Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Filter out Hive information_schema and sys #3008

Merged
merged 2 commits into from
Mar 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -324,13 +324,18 @@ public SemiTransactionalHiveMetastore getMetastore()
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return metastore.getAllDatabases();
return metastore.getAllDatabases().stream()
.filter(HiveMetadata::filterSchema)
.collect(toImmutableList());
}

@Override
public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
requireNonNull(tableName, "tableName is null");
if (!filterSchema(tableName.getSchemaName())) {
return null;
}
Optional<Table> table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName());
if (!table.isPresent()) {
return null;
Expand Down Expand Up @@ -646,6 +651,9 @@ public List<SchemaTableName> listTables(ConnectorSession session, Optional<Strin
private List<String> listSchemas(ConnectorSession session, Optional<String> schemaName)
{
if (schemaName.isPresent()) {
if (!filterSchema(schemaName.get())) {
return ImmutableList.of();
}
return ImmutableList.of(schemaName.get());
}
return listSchemaNames(session);
Expand Down Expand Up @@ -704,6 +712,9 @@ private List<SchemaTableName> listTables(ConnectorSession session, SchemaTablePr
return listTables(session, prefix.getSchema());
}
SchemaTableName tableName = prefix.toSchemaTableName();
if (!filterSchema(tableName.getSchemaName())) {
return ImmutableList.of();
}
try {
if (!metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()).isPresent()) {
return ImmutableList.of();
Expand Down Expand Up @@ -1718,6 +1729,9 @@ public List<SchemaTableName> listViews(ConnectorSession session, Optional<String
@Override
public Optional<ConnectorViewDefinition> getView(ConnectorSession session, SchemaTableName viewName)
{
if (!filterSchema(viewName.getSchemaName())) {
return Optional.empty();
}
return metastore.getTable(new HiveIdentity(session), viewName.getSchemaName(), viewName.getTableName())
.flatMap(view -> {
if (isPrestoView(view)) {
Expand Down Expand Up @@ -1748,6 +1762,21 @@ private boolean isHiveOrPrestoView(Table table)
return table.getTableType().equals(TableType.VIRTUAL_VIEW.name());
}

private static boolean filterSchema(String schemaName)
{
if ("information_schema".equals(schemaName)) {
// For things like listing columns in information_schema.columns table, we need to explicitly filter out Hive's own information_schema.
// TODO https://github.com/prestosql/presto/issues/1559 this should be filtered out in engine.
return false;
}
if ("sys".equals(schemaName)) {
// Hive 3's `sys` schema contains no objects we can handle, so there is no point in exposing it.
// Also, exposing it may require proper handling in access control.
return false;
}
return true;
}

@Override
public ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTableHandle tableHandle)
{
Expand Down
2 changes: 1 addition & 1 deletion presto-product-tests/bin/product-tests-suite-2.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ presto-product-tests-launcher/bin/run-launcher test run \

presto-product-tests-launcher/bin/run-launcher test run \
--environment singlenode-kerberos-hdfs-no-impersonation \
-- -g hdfs_no_impersonation \
-- -g storage_formats,hdfs_no_impersonation \
|| suite_exit_code=1

presto-product-tests-launcher/bin/run-launcher test run \
Expand Down
Loading