Skip to content

Commit

Permalink
[Bug][Seatunnel-web][Hive] Filter system databases (#222)
Browse files Browse the repository at this point in the history
  • Loading branch information
arshadmohammad authored Oct 6, 2024
1 parent 2c09444 commit 1545c3b
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 63 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,27 +17,16 @@

package org.apache.seatunnel.datasource.plugin.hive;

import org.apache.seatunnel.api.configuration.util.OptionRule;
import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo;
import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum;

import com.google.common.collect.Sets;

import java.util.Set;

public class HiveConfig {
public static final String PLUGIN_NAME = "Hive";

public static final DataSourcePluginInfo HIVE_DATASOURCE_PLUGIN_INFO =
DataSourcePluginInfo.builder()
.name(PLUGIN_NAME)
.icon(PLUGIN_NAME)
.name(HiveConstants.PLUGIN_NAME)
.icon(HiveConstants.PLUGIN_NAME)
.version("1.0.0")
.type(DatasourcePluginTypeEnum.DATABASE.getCode())
.build();

public static final Set<String> HIVE_SYSTEM_DATABASES = Sets.newHashSet();

public static final OptionRule OPTION_RULE =
OptionRule.builder().required(HiveOptionRule.METASTORE_URI).build();
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
import java.util.Set;

public class HiveConstants {

public static final String PLUGIN_NAME = "Hive";
public static final Set<String> HIVE_SYSTEM_DATABASES =
Sets.newHashSet(
"information_schema", "mysql", "performance_schema", "sys", "test", "hivedb");
Sets.newHashSet("sys", "information_schema", "performance_schema", "mysql");
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;

import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -64,7 +61,7 @@ public List<String> getTables(
@Override
public List<String> getDatabases(
@NonNull String pluginName, @NonNull Map<String, String> requestParams) {
try (HiveClient hiveClient = HiveClient.createInstance(requestParams); ) {
try (HiveClient hiveClient = HiveClient.createInstance(requestParams)) {
return hiveClient.getAllDatabases();
}
}
Expand Down Expand Up @@ -113,28 +110,4 @@ protected List<TableField> getTableFields(
return hiveClient.getFields(dbName, tableName);
}
}

private static boolean checkHostConnectable(String host, int port) {
try (Socket socket = new Socket()) {
socket.connect(new InetSocketAddress(host, port), 1000);
return true;
} catch (IOException e) {
return false;
}
}

private boolean isNotSystemDatabase(String pluginName, String dbName) {
// FIXME,filters system databases
return true;
}

private boolean convertToBoolean(Object value) {
if (value instanceof Boolean) {
return (Boolean) value;
}
if (value instanceof String) {
return value.equals("TRUE");
}
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,4 @@ public static OptionRule metadataRule() {
// todo
return OptionRule.builder().build();
}

public static final Option<String> TABLE_NAME =
Options.key("table").stringType().noDefaultValue().withDescription("hive table");
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.seatunnel.common.utils.ExceptionUtils;
import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
import org.apache.seatunnel.datasource.plugin.api.model.TableField;
import org.apache.seatunnel.datasource.plugin.hive.HiveConstants;
import org.apache.seatunnel.datasource.plugin.hive.HiveOptionRule;

import org.apache.commons.collections.CollectionUtils;
Expand All @@ -40,6 +41,8 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

import static com.google.common.base.Preconditions.checkNotNull;

Expand Down Expand Up @@ -94,17 +97,6 @@ public static HiveClient createInstance(Map<String, String> reqParam) {
}
}

/*private static void authKerberos(
String kerberosKrb5ConfPath, String kerberosKeytabPath, String kerberosPrincipal)
throws IOException {
System.setProperty("java.security.krb5.conf", kerberosKrb5ConfPath);
Configuration configuration = new Configuration();
configuration.set("hadoop.security.authentication", "Kerberos");
configuration.setBoolean("hadoop.security.authorization", true);
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytabPath);
}*/

public static void doKerberosAuthentication(
Configuration configuration, String principal, String keytabPath) {
if (StringUtils.isBlank(principal) || StringUtils.isBlank(keytabPath)) {
Expand Down Expand Up @@ -142,17 +134,17 @@ public void close() {

public List<String> getAllDatabases() {
try {
return hiveMetaStoreClient.getAllDatabases();
List<String> allDatabases = hiveMetaStoreClient.getAllDatabases();
Set<String> systemDatabases = HiveConstants.HIVE_SYSTEM_DATABASES;
return allDatabases.stream()
.filter(db -> !systemDatabases.contains(db))
.collect(Collectors.toList());
} catch (Exception e) {
log.error(ExceptionUtils.getMessage(e));
throw new DataSourcePluginException("get database names failed", e);
}
}

public List<String> getAllTables(String dbName) {
return getAllTables(dbName, null, null);
}

public List<String> getAllTables(String dbName, String filterName, Integer size) {
try {

Expand Down

0 comments on commit 1545c3b

Please sign in to comment.