diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 509b8a0371b30..80c7011f9b70b 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import org.apache.tools.ant.filters.ReplaceTokens import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -26,45 +27,43 @@ esplugin { } versions << [ - 'hadoop2': '2.8.5' + 'hadoop': '3.3.1' ] +final int minTestedHadoopVersion = 2; +final int maxTestedHadoopVersion = 3; + testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs" -configurations { - hdfsFixture +for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { + configurations.create("hdfs" + hadoopVersion + "Fixture") } dependencies { - api "org.apache.hadoop:hadoop-client:${versions.hadoop2}" - api project(path: 'hadoop-common', configuration: 'shadow') + api project(path: 'hadoop-client-api', configuration: 'shadow') if (isEclipse) { /* * Eclipse can't pick up the shadow dependency so we point it at *something* * so it can compile things. */ - api project(path: 'hadoop-common') + api project(path: 'hadoop-client-api') } - api "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}" - api "org.apache.hadoop:hadoop-auth:${versions.hadoop2}" - api "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" - api "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}" + runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" + implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" api 'org.apache.htrace:htrace-core4:4.0.1-incubating' - runtimeOnly 'com.google.guava:guava:11.0.2' + runtimeOnly 'com.google.guava:guava:27.1-jre' api 'com.google.protobuf:protobuf-java:2.5.0' api 'commons-logging:commons-logging:1.1.3' api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api 'commons-cli:commons-cli:1.2' api "commons-codec:commons-codec:${versions.commonscodec}" api 'commons-collections:commons-collections:3.2.2' - api 'commons-configuration:commons-configuration:1.6' + api 'org.apache.commons:commons-configuration2:2.7' api 'commons-io:commons-io:2.4' api 'commons-lang:commons-lang:2.6' api 'javax.servlet:servlet-api:2.5' api "org.slf4j:slf4j-api:${versions.slf4j}" api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" - - hdfsFixture project(':test:fixtures:hdfs-fixture') // Set the keytab files in the classpath so that we can access them from test code without the security manager // freaking out. if (isEclipse == false) { @@ -73,6 +72,9 @@ dependencies { } } } +for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { + dependencies.add("hdfs" + hadoopVersion + "Fixture", project(':test:fixtures:hdfs' + hadoopVersion + '-fixture')) +} restResources { restApi { @@ -95,6 +97,7 @@ tasks.named("dependencyLicenses").configure { tasks.named("integTest").configure { dependsOn(project.tasks.named("bundlePlugin")) + enabled = false } testClusters.matching { it.name == "integTest" }.configureEach { @@ -105,82 +108,99 @@ String realm = "BUILD.ELASTIC.CO" String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs") // Create HDFS File System Testing Fixtures for HA/Secure combinations -for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture - executable = "${BuildParams.runtimeJavaHome}/bin/java" - env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" - maxWaitInSeconds 60 - onlyIf { BuildParams.inFipsJvm == false } - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] +for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { + for (String fixtureName : ['hdfs' + hadoopVersion + 'Fixture', 'haHdfs' + hadoopVersion + 'Fixture', 'secureHdfs' + hadoopVersion + 'Fixture', 'secureHaHdfs' + hadoopVersion + 'Fixture']) { + final int hadoopVer = hadoopVersion + project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { + executable = "${BuildParams.runtimeJavaHome}/bin/java" + dependsOn project.configurations.getByName("hdfs" + hadoopVer + "Fixture"), project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture + env 'CLASSPATH', "${-> project.configurations.getByName("hdfs" + hadoopVer + "Fixture").asPath}" + + maxWaitInSeconds 60 + onlyIf { BuildParams.inFipsJvm == false } + waitCondition = { fixture, ant -> + // the hdfs.MiniHDFS fixture writes the ports file when + // it's ready, so we can just wait for the file to exist + return fixture.portsFile.exists() + } + final List miniHDFSArgs = [] - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.equals('secureHdfsFixture') || name.equals('secureHaHdfsFixture')) { - miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}"); - if (BuildParams.runtimeJavaVersion == JavaVersion.VERSION_1_9) { - miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') + // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options + if (name.startsWith('secure')) { + if (BuildParams.runtimeJavaVersion.isJava9Compatible()) { + miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) + miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') + } + miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}") + miniHDFSArgs.add("-Dhdfs.config.port=" + getSecureNamenodePortForVersion(hadoopVer)) + } else { + miniHDFSArgs.add("-Dhdfs.config.port=" + getNonSecureNamenodePortForVersion(hadoopVer)) + } + // If it's an HA fixture, set a nameservice to use in the JVM options + if (name.startsWith('haHdfs') || name.startsWith('secureHaHdfs')) { + miniHDFSArgs.add("-Dha-nameservice=ha-hdfs") } - onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 } - } - // If it's an HA fixture, set a nameservice to use in the JVM options - if (name.equals('haHdfsFixture') || name.equals('secureHaHdfsFixture')) { - miniHDFSArgs.add("-Dha-nameservice=ha-hdfs") - } - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) + // Common options + miniHDFSArgs.add('hdfs.MiniHDFS') + miniHDFSArgs.add(baseDir) - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.equals('secureHdfsFixture') || name.equals('secureHaHdfsFixture')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add( - project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") - ) - } + // If it's a secure fixture, then set the principal name and keytab locations to use for auth. + if (name.startsWith('secure')) { + miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") + miniHDFSArgs.add( + project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") + ) + } - args miniHDFSArgs.toArray() + args miniHDFSArgs.toArray() + } } } +def getSecureNamenodePortForVersion(hadoopVersion) { + return 10002 - (2 * hadoopVersion) +} + +def getNonSecureNamenodePortForVersion(hadoopVersion) { + return 10003 - (2 * hadoopVersion) +} + Set disabledIntegTestTaskNames = [] -for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) { - def testTask = tasks.register(integTestTaskName, RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with HDFS." - dependsOn("bundlePlugin") +for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { + final int hadoopVer = hadoopVersion + for (String integTestTaskName : ['integTest' + hadoopVersion, 'integTestHa' + hadoopVersion, 'integTestSecure' + hadoopVersion, + 'integTestSecureHa' + hadoopVersion]) { + def testTask = tasks.register(integTestTaskName, RestIntegTestTask) { + description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer + dependsOn("bundlePlugin") - if (disabledIntegTestTaskNames.contains(name)) { - enabled = false; - } + if (disabledIntegTestTaskNames.contains(name)) { + enabled = false; + } - if (name.contains("Secure")) { - onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 } - if (name.contains("Ha")) { - dependsOn "secureHaHdfsFixture" - } else { - dependsOn "secureHdfsFixture" + if (name.contains("Secure")) { + if (name.contains("Ha")) { + dependsOn "secureHaHdfs" + hadoopVer + "Fixture" + } else { + dependsOn "secureHdfs" + hadoopVer + "Fixture" + } } - } onlyIf { BuildParams.inFipsJvm == false } if (name.contains("Ha")) { Path portsFile - File portsFileDir = file("${workingDir}/hdfsFixture") + File portsFileDir = file("${workingDir}/hdfs" + hadoopVer + "Fixture") if (name.contains("Secure")) { portsFile = buildDir.toPath() .resolve("fixtures") - .resolve("secureHaHdfsFixture") + .resolve("secureHaHdfs" + hadoopVer + "Fixture") .resolve("ports") } else { portsFile = buildDir.toPath() .resolve("fixtures") - .resolve("haHdfsFixture") + .resolve("haHdfs" + hadoopVer + "Fixture") .resolve("ports") } nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports") @@ -200,27 +220,46 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" jvmArgs "-Djava.security.krb5.conf=${krb5conf}" - if (BuildParams.getRuntimeJavaVersion() >= JavaVersion.VERSION_1_9) { - jvmArgs "--add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED" - } nonInputProperties.systemProperty( "test.krb5.keytab.hdfs", project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") ) } } + } + + testClusters.matching { it.name == testTask.name }.configureEach { + plugin(bundlePlugin.archiveFile) + if (integTestTaskName.contains("Secure")) { + systemProperty "java.security.krb5.conf", krb5conf + extraConfigFile( + "repository-hdfs/krb5.keytab", + file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE + ) + } + } } - testClusters.matching { it.name == testTask.name}.configureEach { - plugin(bundlePlugin.archiveFile) - if (integTestTaskName.contains("Secure")) { - systemProperty "java.security.krb5.conf", krb5conf - extraConfigFile( - "repository-hdfs/krb5.keytab", - file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE - ) + def processHadoopTestResources = tasks.register("processHadoop" + hadoopVer + "TestResources", Copy) + processHadoopTestResources.configure { + Map expansions = [ + 'hdfs_port': getNonSecureNamenodePortForVersion(hadoopVer), + 'secure_hdfs_port': getSecureNamenodePortForVersion(hadoopVer), + ] + inputs.properties(expansions) + filter("tokens" : expansions.collectEntries {k, v -> [k, v + .toString()]} /* must be a map of strings */, ReplaceTokens.class) + it.into("build/resources/test/rest-api-spec/test") + it.into("hdfs_repository_" + hadoopVer) { + from "src/test/resources/rest-api-spec/test/hdfs_repository" + } + it.into("secure_hdfs_repository_" + hadoopVer) { + from "src/test/resources/rest-api-spec/test/secure_hdfs_repository" } } + tasks.named("processTestResources").configure { + dependsOn (processHadoopTestResources) + } } // Determine HDFS Fixture compatibility for the current build environment. @@ -248,63 +287,66 @@ if (legalPath == false) { fixtureSupported = false } -// Always ignore HA integration tests in the normal integration test runner, they are included below as -// part of their own HA-specific integration test tasks. -tasks.named("integTest").configure { - onlyIf { BuildParams.inFipsJvm == false } - exclude('**/Ha*TestSuiteIT.class') -} - -if (fixtureSupported) { - // Check depends on the HA test. Already depends on the standard test. - tasks.named("check").configure { - dependsOn("integTestHa") +for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { + final int hadoopVer = hadoopVersion + // Always ignore HA integration tests in the normal integration test runner, they are included below as + // part of their own HA-specific integration test tasks. + tasks.named("integTest" + hadoopVer).configure { + onlyIf { BuildParams.inFipsJvm == false } + exclude('**/Ha*TestSuiteIT.class') } - // Both standard and HA tests depend on their respective HDFS fixtures - tasks.named("integTest").configure { - dependsOn "hdfsFixture" + if (fixtureSupported) { + // Check depends on the HA test. Already depends on the standard test. + tasks.named("check").configure { + dependsOn("integTestHa" + hadoopVer) + } - // The normal test runner only runs the standard hdfs rest tests - systemProperty 'tests.rest.suite', 'hdfs_repository' - } - tasks.named("integTestHa").configure { - dependsOn "haHdfsFixture" - // Only include the HA integration tests for the HA test task - setIncludes(['**/Ha*TestSuiteIT.class']) + // Both standard and HA tests depend on their respective HDFS fixtures + tasks.named("integTest" + hadoopVer).configure { + dependsOn "hdfs" + hadoopVer + "Fixture" - } + // The normal test runner only runs the standard hdfs rest tests + systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer + } + tasks.named("integTestHa" + hadoopVer).configure { + dependsOn "haHdfs" + hadoopVer + "Fixture" + // Only include the HA integration tests for the HA test task + setIncludes(['**/Ha*TestSuiteIT.class']) + + } -} else { - if (legalPath) { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") } else { - logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") - } + if (legalPath) { + logger.warn("hdfs" + hadoopVer + "Fixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") + } else { + logger.warn("hdfs" + hadoopVer + "Fixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") + } - // The normal integration test runner will just test that the plugin loads - tasks.named("integTest").configure { - systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic' - } - // HA fixture is unsupported. Don't run them. - tasks.named("integTestHa").configure { - setEnabled(false) + // The normal integration test runner will just test that the plugin loads + tasks.named("integTest" + hadoopVer).configure { + systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer + '/10_basic' + } + // HA fixture is unsupported. Don't run them. + tasks.named("integTestHa" + hadoopVer).configure { + setEnabled(false) + } } -} -tasks.named("check").configure { - dependsOn("integTestSecure", "integTestSecureHa") -} + tasks.named("check").configure { + dependsOn("integTest" + hadoopVer, "integTestSecure" + hadoopVer, "integTestSecureHa" + hadoopVer) + } // Run just the secure hdfs rest test suite. -tasks.named("integTestSecure").configure { - systemProperty 'tests.rest.suite', 'secure_hdfs_repository' - // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner. - exclude('**/Ha*TestSuiteIT.class') -} + tasks.named("integTestSecure" + hadoopVer).configure { + systemProperty 'tests.rest.suite', 'secure_hdfs_repository_' + hadoopVer + // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner. + exclude('**/Ha*TestSuiteIT.class') + } // Only include the HA integration tests for the HA test task -tasks.named("integTestSecureHa").configure { - setIncludes(['**/Ha*TestSuiteIT.class']) + tasks.named("integTestSecureHa" + hadoopVer).configure { + setIncludes(['**/Ha*TestSuiteIT.class']) + } } tasks.named("thirdPartyAudit").configure { @@ -313,7 +355,82 @@ tasks.named("thirdPartyAudit").configure { // internal java api: sun.misc.Unsafe 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', - 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor', + 'org.apache.hadoop.shaded.org.xerial.snappy.pure.PureJavaSnappy', + 'org.apache.hadoop.shaded.org.xerial.snappy.pure.SnappyRawCompressor', + 'org.apache.hadoop.shaded.org.xerial.snappy.pure.SnappyRawDecompressor', + 'org.apache.hadoop.shaded.org.xerial.snappy.pure.UnsafeUtil', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' ) } diff --git a/plugins/repository-hdfs/hadoop-common/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle similarity index 69% rename from plugins/repository-hdfs/hadoop-common/build.gradle rename to plugins/repository-hdfs/hadoop-client-api/build.gradle index ddff6ae503def..345f8605eac90 100644 --- a/plugins/repository-hdfs/hadoop-common/build.gradle +++ b/plugins/repository-hdfs/hadoop-client-api/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.java' apply plugin: 'com.github.johnrengelman.shadow' dependencies { - implementation "org.apache.hadoop:hadoop-common:${project.parent.versions.hadoop2}" + implementation "org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}" } tasks.named('shadowJar').configure { diff --git a/plugins/repository-hdfs/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java similarity index 100% rename from plugins/repository-hdfs/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java rename to plugins/repository-hdfs/hadoop-client-api/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java diff --git a/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 b/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 deleted file mode 100644 index 44ad1f6d8da42..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-configuration-1.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -32cadde23955d7681b0d94a2715846d20b425235 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-configuration2-2.7.jar.sha1 b/plugins/repository-hdfs/licenses/commons-configuration2-2.7.jar.sha1 new file mode 100644 index 0000000000000..31e16840e2c4f --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-configuration2-2.7.jar.sha1 @@ -0,0 +1 @@ +593326399e5fb5e1f986607f06f63c1250ab36b4 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt b/plugins/repository-hdfs/licenses/commons-configuration2-LICENSE.txt similarity index 100% rename from plugins/repository-hdfs/licenses/commons-configuration-LICENSE.txt rename to plugins/repository-hdfs/licenses/commons-configuration2-LICENSE.txt diff --git a/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt b/plugins/repository-hdfs/licenses/commons-configuration2-NOTICE.txt similarity index 83% rename from plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt rename to plugins/repository-hdfs/licenses/commons-configuration2-NOTICE.txt index 3d6dfaec547c6..25c286381506f 100644 --- a/plugins/repository-hdfs/licenses/commons-configuration-NOTICE.txt +++ b/plugins/repository-hdfs/licenses/commons-configuration2-NOTICE.txt @@ -1,4 +1,4 @@ -Apache Commons Configuration +Apache Commons Configuration 2 Copyright 2001-2015 The Apache Software Foundation This product includes software developed at diff --git a/plugins/repository-hdfs/licenses/guava-11.0.2.jar.sha1 b/plugins/repository-hdfs/licenses/guava-11.0.2.jar.sha1 deleted file mode 100644 index 0588c7ee5eaf7..0000000000000 --- a/plugins/repository-hdfs/licenses/guava-11.0.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35a3c69e19d72743cac83778aecbee68680f63eb \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/guava-27.1-jre.jar.sha1 b/plugins/repository-hdfs/licenses/guava-27.1-jre.jar.sha1 new file mode 100644 index 0000000000000..07bf9c19e42b2 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-27.1-jre.jar.sha1 @@ -0,0 +1 @@ +e47b59c893079b87743cdcfb6f17ca95c08c592c \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-annotations-2.8.5.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-annotations-2.8.5.jar.sha1 deleted file mode 100644 index a808179106498..0000000000000 --- a/plugins/repository-hdfs/licenses/hadoop-annotations-2.8.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a301159daf9368b05364577c985498857b5c48f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-auth-2.8.5.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-auth-2.8.5.jar.sha1 deleted file mode 100644 index 2d9064f4965eb..0000000000000 --- a/plugins/repository-hdfs/licenses/hadoop-auth-2.8.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -63fe1f9d9ef6bdf2cb52dfeb28ed8faf78e4b85c \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-client-2.8.5.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-client-2.8.5.jar.sha1 deleted file mode 100644 index 83d232c5a295d..0000000000000 --- a/plugins/repository-hdfs/licenses/hadoop-client-2.8.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -45e11f6004581e53959bc3d38c4d01dbeb5f4b22 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-client-runtime-3.3.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-client-runtime-3.3.1.jar.sha1 new file mode 100644 index 0000000000000..feb37ecc90255 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-client-runtime-3.3.1.jar.sha1 @@ -0,0 +1 @@ +f3a55d882328ee87a1054f99d62ba987fa9029a4 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-hdfs-2.8.5.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-hdfs-2.8.5.jar.sha1 deleted file mode 100644 index 241a3ea92ac2b..0000000000000 --- a/plugins/repository-hdfs/licenses/hadoop-hdfs-2.8.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36914392fd3e77d46e54c3bb092dfc32d3f4a32b \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-hdfs-3.3.1.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-hdfs-3.3.1.jar.sha1 new file mode 100644 index 0000000000000..66c98cf7ec291 --- /dev/null +++ b/plugins/repository-hdfs/licenses/hadoop-hdfs-3.3.1.jar.sha1 @@ -0,0 +1 @@ +5da7f270cb6564e099e0d2d424285a24fca62bd2 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/hadoop-hdfs-client-2.8.5.jar.sha1 b/plugins/repository-hdfs/licenses/hadoop-hdfs-client-2.8.5.jar.sha1 deleted file mode 100644 index 2a05eca7320a8..0000000000000 --- a/plugins/repository-hdfs/licenses/hadoop-hdfs-client-2.8.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d72fa62b01c32f1c0587d53c5005fc49f2bd11c \ No newline at end of file diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yml index 0f942dfdc03d6..c992e42afaeb0 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_create.yml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "test/repository_create" # Get repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yml index 34c770a8074c7..0abf71bf7df45 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_delete.yml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "foo/bar" # Get repository @@ -39,7 +39,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "foo/bar" # Get repository again diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yml index d1695b00d9dbc..ff7357e084577 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository_verify.yml @@ -9,7 +9,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "test/repository_verify" # Verify repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml index 20019686d3db1..69cea05c525a5 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "test/snapshot" # Create index @@ -36,7 +36,7 @@ - match: { snapshot.shards.failed : 0 } # Remove our snapshot - - do: + - do: snapshot.delete: repository: test_snapshot_repository snapshot: test_snapshot diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yml index f38f4783b195b..3a282c2c9942c 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_get.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "test/snapshot_get" # Create index @@ -59,7 +59,7 @@ - match: { snapshots.0.snapshot : test_snapshot_get } # Remove our snapshot - - do: + - do: snapshot.delete: repository: test_snapshot_get_repository snapshot: test_snapshot_get diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_readonly.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_readonly.yml index c2a37964e70a7..1c718d63183dd 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_readonly.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot_readonly.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "/user/elasticsearch/existing/readonly-repository" readonly: true diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yml index 1e9250ff88069..716857781b758 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yml @@ -14,7 +14,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9999" + uri: "hdfs://localhost:@hdfs_port@" path: "test/restore" # Create index diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_create.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_create.yml index 46cc9e92479c5..117d9a74d87d9 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_create.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_create.yml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/test/repository_create" security: principal: "elasticsearch@BUILD.ELASTIC.CO" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_delete.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_delete.yml index 7bff67d3d01b0..26b73174b2205 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_delete.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_delete.yml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/foo/bar" security: principal: "elasticsearch@BUILD.ELASTIC.CO" @@ -41,7 +41,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/foo/bar" security: principal: "elasticsearch@BUILD.ELASTIC.CO" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_verify.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_verify.yml index 4593c13caee23..187d43b7e88d6 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_verify.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/20_repository_verify.yml @@ -9,7 +9,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/test/repository_verify" security: principal: "elasticsearch@BUILD.ELASTIC.CO" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml index e232782084e16..01b137279714a 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/test/snapshot" security: principal: "elasticsearch@BUILD.ELASTIC.CO" @@ -38,7 +38,7 @@ - match: { snapshot.shards.failed : 0 } # Remove our snapshot - - do: + - do: snapshot.delete: repository: test_snapshot_repository snapshot: test_snapshot diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_get.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_get.yml index 20d988884113f..67f3faf14ed5e 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_get.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_get.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/test/snapshot_get" security: principal: "elasticsearch@BUILD.ELASTIC.CO" @@ -61,7 +61,7 @@ - match: { snapshots.0.snapshot : test_snapshot_get } # Remove our snapshot - - do: + - do: snapshot.delete: repository: test_snapshot_get_repository snapshot: test_snapshot_get diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_readonly.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_readonly.yml index 8c4c0347a156a..e093424a6548c 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_readonly.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot_readonly.yml @@ -11,7 +11,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/existing/readonly-repository" security: principal: "elasticsearch@BUILD.ELASTIC.CO" diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/40_restore.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/40_restore.yml index ce757236ced68..c7a298d72d6c9 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/40_restore.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/40_restore.yml @@ -14,7 +14,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:9998" + uri: "hdfs://localhost:@secure_hdfs_port@" path: "/user/elasticsearch/test/restore" security: principal: "elasticsearch@BUILD.ELASTIC.CO" diff --git a/settings.gradle b/settings.gradle index e63d463b99b50..08e78ad75cb86 100644 --- a/settings.gradle +++ b/settings.gradle @@ -67,7 +67,8 @@ List projects = [ 'test:framework', 'test:fixtures:azure-fixture', 'test:fixtures:gcs-fixture', - 'test:fixtures:hdfs-fixture', + 'test:fixtures:hdfs2-fixture', + 'test:fixtures:hdfs3-fixture', 'test:fixtures:krb5kdc-fixture', 'test:fixtures:minio-fixture', 'test:fixtures:old-elasticsearch', diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs2-fixture/build.gradle similarity index 100% rename from test/fixtures/hdfs-fixture/build.gradle rename to test/fixtures/hdfs2-fixture/build.gradle diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java similarity index 100% rename from test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java rename to test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java diff --git a/test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz similarity index 100% rename from test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz rename to test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz diff --git a/test/fixtures/hdfs3-fixture/build.gradle b/test/fixtures/hdfs3-fixture/build.gradle new file mode 100644 index 0000000000000..872ab2efd42ab --- /dev/null +++ b/test/fixtures/hdfs3-fixture/build.gradle @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +apply plugin: 'elasticsearch.java' + +dependencies { + api "org.apache.hadoop:hadoop-minicluster:3.3.1" +} diff --git a/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java new file mode 100644 index 0000000000000..0a26f5d82ac17 --- /dev/null +++ b/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package hdfs; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.MiniDFSNNTopology; +import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; +import org.apache.hadoop.security.UserGroupInformation; + +import java.io.File; +import java.lang.management.ManagementFactory; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * MiniHDFS test fixture. There is a CLI tool, but here we can + * easily properly setup logging, avoid parsing JSON, etc. + */ +public class MiniHDFS { + + private static String PORT_FILE_NAME = "ports"; + private static String PID_FILE_NAME = "pid"; + + public static void main(String[] args) throws Exception { + if (args.length != 1 && args.length != 3) { + throw new IllegalArgumentException( + "Expected: MiniHDFS [ ], got: " + Arrays.toString(args) + ); + } + boolean secure = args.length == 3; + + // configure Paths + Path baseDir = Paths.get(args[0]); + // hadoop-home/, so logs will not complain + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } + // hdfs-data/, where any data is going + Path hdfsHome = baseDir.resolve("hdfs-data"); + + // configure cluster + Configuration cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); + // lower default permission: TODO: needed? + cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); + + // optionally configure security + if (secure) { + String kerberosPrincipal = args[1]; + String keytabFile = args[2]; + + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); + cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); + cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); + cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY, "AES/CTR/NoPadding"); + } + + UserGroupInformation.setConfiguration(cfg); + + MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); + String explicitPort = System.getProperty("hdfs.config.port"); + if (explicitPort != null) { + builder.nameNodePort(Integer.parseInt(explicitPort)); + } else { + if (secure) { + builder.nameNodePort(9998); + } else { + builder.nameNodePort(9999); + } + } + + // Configure HA mode + String haNameService = System.getProperty("ha-nameservice"); + boolean haEnabled = haNameService != null; + if (haEnabled) { + MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); + MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); + MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); + MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); + builder.nnTopology(namenodeTopology); + } + + MiniDFSCluster dfs = builder.build(); + + // Configure contents of the filesystem + org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); + + FileSystem fs; + if (haEnabled) { + dfs.transitionToActive(0); + fs = HATestUtil.configureFailoverFs(dfs, cfg); + } else { + fs = dfs.getFileSystem(); + } + + try { + // Set the elasticsearch user directory up + fs.mkdirs(esUserPath); + if (UserGroupInformation.isSecurityEnabled()) { + List acls = new ArrayList<>(); + acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); + fs.modifyAclEntries(esUserPath, acls); + } + + // Install a pre-existing repository into HDFS + String directoryName = "readonly-repository"; + String archiveName = directoryName + ".tar.gz"; + URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); + if (readOnlyRepositoryArchiveURL != null) { + Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); + File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); + FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); + FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); + + fs.copyFromLocalFile( + true, + true, + new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), + esUserPath.suffix("/existing/" + directoryName) + ); + + FileUtils.deleteDirectory(tempDirectory.toFile()); + } + } finally { + fs.close(); + } + + // write our PID file + Path tmp = Files.createTempFile(baseDir, null, null); + String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + + // write our port file + String portFileContent = Integer.toString(dfs.getNameNodePort(0)); + if (haEnabled) { + portFileContent = portFileContent + "\n" + Integer.toString(dfs.getNameNodePort(1)); + } + tmp = Files.createTempFile(baseDir, null, null); + Files.write(tmp, portFileContent.getBytes(StandardCharsets.UTF_8)); + Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); + } + +} diff --git a/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz new file mode 100644 index 0000000000000..2cdb6d77c07d0 Binary files /dev/null and b/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz differ diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index 26d74135c376e..760b1c249effb 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -21,7 +21,7 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' -final Project hdfsFixtureProject = project(':test:fixtures:hdfs-fixture') +final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') @@ -81,7 +81,6 @@ for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) { // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options if (name.equals('secureHdfsFixture')) { - onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 } if (BuildParams.runtimeJavaVersion.isJava9Compatible()) { miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) } @@ -125,7 +124,7 @@ tasks.register("integTestSecure", RestIntegTestTask) { "test.krb5.keytab.hdfs", project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") ) - onlyIf { BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16} + onlyIf { BuildParams.inFipsJvm == false } } tasks.named("check").configure { dependsOn("integTestSecure") } @@ -142,6 +141,9 @@ testClusters.configureEach { testClusters.matching { it.name == "integTestSecure" }.configureEach { systemProperty "java.security.krb5.conf", krb5conf + if (BuildParams.runtimeJavaVersion.isJava9Compatible()) { + jvmArgs "--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED" + } extraConfigFile( "repository-hdfs/krb5.keytab", file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index f9670fb08c456..d2afdbbeaa032 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -22,7 +22,7 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' -final Project hdfsFixtureProject = project(':test:fixtures:hdfs-fixture') +final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs')