Skip to content

Commit

Permalink
Upgrading HDFS Repository Plugin to use HDFS 2.8.1 Client (elastic#25497
Browse files Browse the repository at this point in the history
)

Hadoop 2.7.x libraries fail when running on JDK9 due to the version string changing to a single 
character. On Hadoop 2.8, this is no longer a problem, and it is unclear on whether the fix will be 
backported to the 2.7 branch. This commit upgrades our dependency of Hadoop for the HDFS 
Repository to 2.8.1.
  • Loading branch information
jbaiera authored Jun 30, 2017
1 parent c70c440 commit 74f4a14
Show file tree
Hide file tree
Showing 22 changed files with 136 additions and 57 deletions.
91 changes: 56 additions & 35 deletions plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ esplugin {
apply plugin: 'elasticsearch.vagrantsupport'

versions << [
'hadoop2': '2.7.1'
'hadoop2': '2.8.1'
]

configurations {
Expand All @@ -45,7 +45,8 @@ dependencies {
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
compile 'com.google.guava:guava:11.0.2'
compile 'com.google.protobuf:protobuf-java:2.5.0'
compile 'commons-logging:commons-logging:1.1.3'
Expand Down Expand Up @@ -210,12 +211,15 @@ if (secureFixtureSupported) {
thirdPartyAudit.excludes = [
// classes are missing, because we added hadoop jars one by one until tests pass.
'com.google.gson.stream.JsonReader',
'com.google.gson.stream.JsonWriter',
'com.jcraft.jsch.ChannelExec',
'com.jcraft.jsch.JSch',
'com.jcraft.jsch.Logger',
'com.jcraft.jsch.Session',
'com.sun.jersey.api.ParamException',
'com.google.gson.stream.JsonWriter',
'com.jcraft.jsch.ChannelExec',
'com.jcraft.jsch.ChannelSftp',
'com.jcraft.jsch.ChannelSftp$LsEntry',
'com.jcraft.jsch.JSch',
'com.jcraft.jsch.Logger',
'com.jcraft.jsch.Session',
'com.jcraft.jsch.SftpATTRS',
'com.sun.jersey.api.ParamException',
'com.sun.jersey.api.core.HttpContext',
'com.sun.jersey.core.spi.component.ComponentContext',
'com.sun.jersey.core.spi.component.ComponentScope',
Expand All @@ -239,6 +243,7 @@ thirdPartyAudit.excludes = [
'io.netty.channel.ChannelHandlerContext',
'io.netty.channel.ChannelInboundHandlerAdapter',
'io.netty.channel.ChannelInitializer',
'io.netty.channel.ChannelOption',
'io.netty.channel.ChannelPipeline',
'io.netty.channel.EventLoopGroup',
'io.netty.channel.SimpleChannelInboundHandler',
Expand Down Expand Up @@ -267,7 +272,8 @@ thirdPartyAudit.excludes = [
'io.netty.handler.stream.ChunkedStream',
'io.netty.handler.stream.ChunkedWriteHandler',
'io.netty.util.concurrent.GlobalEventExecutor',
'javax.ws.rs.core.Context',
'io.netty.util.ReferenceCountUtil',
'javax.ws.rs.core.Context',
'javax.ws.rs.core.MediaType',
'javax.ws.rs.core.MultivaluedMap',
'javax.ws.rs.core.Response$ResponseBuilder',
Expand Down Expand Up @@ -317,8 +323,7 @@ thirdPartyAudit.excludes = [
'org.apache.commons.digester.substitution.MultiVariableExpander',
'org.apache.commons.digester.substitution.VariableSubstitutor',
'org.apache.commons.digester.xmlrules.DigesterLoader',
'org.apache.commons.httpclient.util.URIUtil',
'org.apache.commons.jxpath.JXPathContext',
'org.apache.commons.jxpath.JXPathContext',
'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl',
'org.apache.commons.jxpath.ri.QName',
'org.apache.commons.jxpath.ri.compiler.NodeNameTest',
Expand Down Expand Up @@ -368,7 +373,8 @@ thirdPartyAudit.excludes = [
'org.apache.curator.utils.EnsurePath',
'org.apache.curator.utils.PathUtils',
'org.apache.curator.utils.ThreadUtils',
'org.apache.curator.utils.ZKPaths',
'org.apache.curator.utils.ZKPaths',
'org.apache.directory.shared.kerberos.components.EncryptionKey',
'org.apache.directory.server.kerberos.shared.keytab.Keytab',
'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry',
'org.apache.http.NameValuePair',
Expand Down Expand Up @@ -402,33 +408,32 @@ thirdPartyAudit.excludes = [
'org.codehaus.jackson.JsonFactory',
'org.codehaus.jackson.JsonGenerator',
'org.codehaus.jackson.JsonGenerator$Feature',
'org.codehaus.jackson.JsonNode',
'org.codehaus.jackson.map.MappingJsonFactory',
'org.codehaus.jackson.map.MappingJsonFactory',
'org.codehaus.jackson.map.ObjectMapper',
'org.codehaus.jackson.map.ObjectReader',
'org.codehaus.jackson.map.ObjectWriter',
'org.codehaus.jackson.node.ContainerNode',
'org.codehaus.jackson.type.TypeReference',
'org.codehaus.jackson.util.MinimalPrettyPrinter',
'org.codehaus.jackson.util.MinimalPrettyPrinter',
'org.fusesource.leveldbjni.JniDBFactory',
'org.iq80.leveldb.DB',
'org.iq80.leveldb.Options',
'org.iq80.leveldb.WriteBatch',
'org.mortbay.jetty.Connector',
'org.mortbay.jetty.Handler',
'org.mortbay.jetty.InclusiveByteRange',
'org.mortbay.jetty.MimeTypes',
'org.mortbay.jetty.NCSARequestLog',
'org.mortbay.jetty.RequestLog',
'org.mortbay.jetty.Server',
'org.mortbay.jetty.handler.ContextHandler$SContext',
'org.mortbay.jetty.handler.ContextHandler',
'org.mortbay.jetty.handler.ContextHandlerCollection',
'org.mortbay.jetty.handler.HandlerCollection',
'org.mortbay.jetty.handler.RequestLogHandler',
'org.mortbay.jetty.nio.SelectChannelConnector',
'org.mortbay.jetty.security.SslSocketConnector',
'org.mortbay.jetty.servlet.AbstractSessionManager',
'org.iq80.leveldb.WriteBatch',
'org.mortbay.jetty.Connector',
'org.mortbay.jetty.Handler',
'org.mortbay.jetty.InclusiveByteRange',
'org.mortbay.jetty.MimeTypes',
'org.mortbay.jetty.NCSARequestLog',
'org.mortbay.jetty.RequestLog',
'org.mortbay.jetty.Server',
'org.mortbay.jetty.handler.ContextHandler$SContext',
'org.mortbay.jetty.handler.ContextHandler',
'org.mortbay.jetty.handler.ContextHandlerCollection',
'org.mortbay.jetty.handler.HandlerCollection',
'org.mortbay.jetty.handler.RequestLogHandler',
'org.mortbay.jetty.nio.SelectChannelConnector',
'org.mortbay.jetty.security.SslSelectChannelConnector',
'org.mortbay.jetty.security.SslSocketConnector',
'org.mortbay.jetty.servlet.AbstractSessionManager',
'org.mortbay.jetty.servlet.Context',
'org.mortbay.jetty.servlet.DefaultServlet',
'org.mortbay.jetty.servlet.FilterHolder',
Expand All @@ -437,8 +442,7 @@ thirdPartyAudit.excludes = [
'org.mortbay.jetty.servlet.ServletHolder',
'org.mortbay.jetty.servlet.SessionHandler',
'org.mortbay.jetty.webapp.WebAppContext',
'org.mortbay.log.Log',
'org.mortbay.thread.QueuedThreadPool',
'org.mortbay.thread.QueuedThreadPool',
'org.mortbay.util.MultiException',
'org.mortbay.util.ajax.JSON$Convertible',
'org.mortbay.util.ajax.JSON$Output',
Expand Down Expand Up @@ -473,9 +477,26 @@ thirdPartyAudit.excludes = [
'org.apache.log4j.AppenderSkeleton',
'org.apache.log4j.AsyncAppender',
'org.apache.log4j.helpers.ISO8601DateFormat',
'org.apache.log4j.spi.ThrowableInformation'
'org.apache.log4j.spi.ThrowableInformation',

// New optional dependencies in 2.8
'com.nimbusds.jose.JWSObject$State',
'com.nimbusds.jose.crypto.RSASSAVerifier',
'com.nimbusds.jwt.ReadOnlyJWTClaimsSet',
'com.nimbusds.jwt.SignedJWT',
'com.squareup.okhttp.Call',
'com.squareup.okhttp.MediaType',
'com.squareup.okhttp.OkHttpClient',
'com.squareup.okhttp.Request$Builder',
'com.squareup.okhttp.RequestBody',
'com.squareup.okhttp.Response',
'com.squareup.okhttp.ResponseBody'
]

if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
thirdPartyAudit.excludes.add('javax.xml.bind.annotation.adapters.HexBinaryAdapter')
}

// Gradle 2.13 bundles org.slf4j.impl.StaticLoggerBinder in its core.jar which leaks into the forbidden APIs ant task
// Gradle 2.14+ does not bundle this class anymore so we need to properly exclude it here.
if (GradleVersion.current() > GradleVersion.version("2.13")) {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
335a867cf42bf789919bfc3229ff26747124e8f1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
688ccccc0e0739d8737a93b0039a4a661e52084b

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
4812f251f8100fd4722c3cec5d7353f71f69cda9

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
a4df18b79e4d0349ce4b58a52d314e7ae1d6be99

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
a378f4bc8e6cd779d779c9f512e0e31edd771633
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
6b0100e4f58ecf7ce75817fce1ffdfbec947337a

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f4ef727cb4675788ac66f48e217020acc1690960
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,8 @@ of dependencies that are NOT Apache Licensed.
See the License for the specific language governing permissions and
limitations under the License.

The HTrace Owl logo is from http://www.clker.com/clipart-13653.html. It is
public domain.
Units, a string formatting go library, is Copyright (c) 2014 Alec Thomas
and MIT licensed: https://github.com/alecthomas/units/blob/master/COPYING

D3, a javascript library for manipulating data, used by htrace-hbase
is Copyright 2010-2014, Michael Bostock and BSD licensed:
Expand Down Expand Up @@ -239,4 +239,7 @@ https://github.com/moment/moment/blob/develop/LICENSE
CMP is an implementation of the MessagePack serialization format in
C. It is licensed under the MIT license:
https://github.com/camgunz/cmp/blob/master/LICENSE
See ./htrace-c/src/util/cmp.c and ./htrace-c/src/util/cmp.h.

go-codec is an implementation of several serialization and deserialization
codecs in Go. It is licensed under the MIT license:
https://github.com/ugorji/go/blob/master/LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,6 @@ that are NOT Apache licensed (with pointers to their licensing)
Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin
is a distributed tracing system that is Apache 2.0 Licensed.
Copyright 2012 Twitter, Inc.

Our Owl logo we took from http://www.clker.com/clipart-13653.html.
It is public domain/free.
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,6 @@ private static Void evilHadoopInit() {
Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
Class.forName("org.apache.hadoop.conf.Configuration");
Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants");
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck");
} catch (ClassNotFoundException | IOException e) {
throw new RuntimeException(e);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,6 @@

package org.elasticsearch.repositories.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;

import javax.security.auth.Subject;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
Expand All @@ -38,7 +29,18 @@
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import javax.security.auth.Subject;

import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;

@ThreadLeakFilters(filters = {HdfsClientThreadLeakFilter.class})
public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.repositories.hdfs;

import com.carrotsearch.randomizedtesting.ThreadFilter;

/**
* In Hadoop 2.8.0, there is a thread that is started by the filesystem to clean up old execution stats.
* This thread ignores all interrupts, catching InterruptedException, logging it, and continuing on
* with its work. The thread is a daemon, so it thankfully does not stop the JVM from closing, and it
* is started only once in a class's static initialization. This currently breaks our testing as this
* thread leaks out of the client and is picked up by the test framework. This thread filter is meant
* to ignore the offending thread until a version of Hadoop is released that addresses the incorrect
* interrupt handling.
*
* @see <a href="https://issues.apache.org/jira/browse/HADOOP-12829">https://issues.apache.org/jira/browse/HADOOP-12829</a>
* @see "org.apache.hadoop.fs.FileSystem.Statistics.StatisticsDataReferenceCleaner"
* @see "org.apache.hadoop.fs.FileSystem.Statistics"
*/
public final class HdfsClientThreadLeakFilter implements ThreadFilter {

private static final String OFFENDING_THREAD_NAME =
"org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner";

@Override
public boolean reject(Thread t) {
return t.getName().equals(OFFENDING_THREAD_NAME);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

import java.util.Collection;

import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
Expand All @@ -34,6 +35,7 @@
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ESSingleNodeTestCase;

@ThreadLeakFilters(filters = {HdfsClientThreadLeakFilter.class})
public class HdfsTests extends ESSingleNodeTestCase {

@Override
Expand Down
2 changes: 1 addition & 1 deletion test/fixtures/hdfs-fixture/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
apply plugin: 'elasticsearch.build'

versions << [
'hadoop2': '2.7.1'
'hadoop2': '2.8.1'
]

// we create MiniHdfsCluster with the hadoop artifact
Expand Down
5 changes: 4 additions & 1 deletion test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
Expand All @@ -49,7 +50,8 @@ public class MiniHDFS {

public static void main(String[] args) throws Exception {
if (args.length != 1 && args.length != 3) {
throw new IllegalArgumentException("MiniHDFS <baseDirectory> [<kerberosPrincipal> <kerberosKeytab>]");
throw new IllegalArgumentException("Expected: MiniHDFS <baseDirectory> [<kerberosPrincipal> <kerberosKeytab>], " +
"got: " + Arrays.toString(args));
}
boolean secure = args.length == 3;

Expand Down Expand Up @@ -83,6 +85,7 @@ public static void main(String[] args) throws Exception {
cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile);
cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile);
cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true");
cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true");
cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true");
}

Expand Down

0 comments on commit 74f4a14

Please sign in to comment.