Skip to content

Commit

Permalink
Move repository-s3 fixture tests to QA test project (#29372)
Browse files Browse the repository at this point in the history
This commit moves the repository-s3 fixture test added in #29296 in a
new `repository-s3/qa/amazon-s3` project. This new project allows the
REST integration tests to be executed using the real S3 service when
all the required environment variables are provided. When no env var
is provided, then the tests are executed using the fixture added
in #29296.

The REST tests located at the `repository-s3`plugin  project now only
verify that the plugin is correctly loaded.

The REST tests have been adapted to allow a bucket name and a base
path to be specified as env vars. This way it is possible to run the tests
with different base paths (could be anything, like a CI job name or a
branch name) without multiplicating buckets.

Related to #29349
  • Loading branch information
tlrx committed Apr 27, 2018
1 parent 0b10f75 commit c35b49c
Show file tree
Hide file tree
Showing 13 changed files with 1,001 additions and 34 deletions.
9 changes: 7 additions & 2 deletions plugins/repository-s3/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,14 @@ test {
exclude '**/*CredentialsTests.class'
}

check {
// also execute the QA tests when testing the plugin
dependsOn 'qa:amazon-s3:check'
}

integTestCluster {
keystoreSetting 's3.client.default.access_key', 'myaccesskey'
keystoreSetting 's3.client.default.secret_key', 'mysecretkey'
keystoreSetting 's3.client.integration_test.access_key', "s3_integration_test_access_key"
keystoreSetting 's3.client.integration_test.secret_key', "s3_integration_test_secret_key"
}

thirdPartyAudit.excludes = [
Expand Down
83 changes: 83 additions & 0 deletions plugins/repository-s3/qa/amazon-s3/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture

apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'

dependencies {
testCompile project(path: ':plugins:repository-s3', configuration: 'runtime')
}

integTestCluster {
plugin ':plugins:repository-s3'
}

forbiddenApisTest {
// we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
}

boolean useFixture = false

String s3AccessKey = System.getenv("amazon_s3_access_key")
String s3SecretKey = System.getenv("amazon_s3_secret_key")
String s3Bucket = System.getenv("amazon_s3_bucket")
String s3BasePath = System.getenv("amazon_s3_base_path")

if (!s3AccessKey && !s3SecretKey && !s3Bucket && !s3BasePath) {
s3AccessKey = 's3_integration_test_access_key'
s3SecretKey = 's3_integration_test_secret_key'
s3Bucket = 'bucket_test'
s3BasePath = 'integration_test'
useFixture = true
}

/** A task to start the AmazonS3Fixture which emulates a S3 service **/
task s3Fixture(type: AntFixture) {
dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
executable = new File(project.runtimeJavaHome, 'bin/java')
args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3Bucket
}

Map<String, Object> expansions = [
'bucket': s3Bucket,
'base_path': s3BasePath
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}

integTestCluster {
keystoreSetting 's3.client.integration_test.access_key', s3AccessKey
keystoreSetting 's3.client.integration_test.secret_key', s3SecretKey

if (useFixture) {
dependsOn s3Fixture
/* Use a closure on the string to delay evaluation until tests are executed */
setting 's3.client.integration_test.endpoint', "http://${-> s3Fixture.addressAndPort}"
} else {
println "Using an external service to test the repository-s3 plugin"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.s3;

import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.elasticsearch.repositories.s3.AmazonS3TestServer.Response;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.List;
import java.util.Map;

import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;

/**
* {@link AmazonS3Fixture} is a fixture that emulates a S3 service.
* <p>
* It starts an asynchronous socket server that binds to a random local port. The server parses
* HTTP requests and uses a {@link AmazonS3TestServer} to handle them before returning
* them to the client as HTTP responses.
*/
public class AmazonS3Fixture {

public static void main(String[] args) throws Exception {
if (args == null || args.length != 2) {
throw new IllegalArgumentException("AmazonS3Fixture <working directory> <bucket>");
}

final InetSocketAddress socketAddress = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0);
final HttpServer httpServer = MockHttpServer.createHttp(socketAddress, 0);

try {
final Path workingDirectory = workingDir(args[0]);
/// Writes the PID of the current Java process in a `pid` file located in the working directory
writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]);

final String addressAndPort = addressToString(httpServer.getAddress());
// Writes the address and port of the http server in a `ports` file located in the working directory
writeFile(workingDirectory, "ports", addressAndPort);

// Emulates S3
final String storageUrl = "http://" + addressAndPort;
final AmazonS3TestServer storageTestServer = new AmazonS3TestServer(storageUrl);
storageTestServer.createBucket(args[1]);

httpServer.createContext("/", new ResponseHandler(storageTestServer));
httpServer.start();

// Wait to be killed
Thread.sleep(Long.MAX_VALUE);

} finally {
httpServer.stop(0);
}
}

@SuppressForbidden(reason = "Paths#get is fine - we don't have environment here")
private static Path workingDir(final String dir) {
return Paths.get(dir);
}

private static void writeFile(final Path dir, final String fileName, final String content) throws IOException {
final Path tempPidFile = Files.createTempFile(dir, null, null);
Files.write(tempPidFile, singleton(content));
Files.move(tempPidFile, dir.resolve(fileName), StandardCopyOption.ATOMIC_MOVE);
}

private static String addressToString(final SocketAddress address) {
final InetSocketAddress inetSocketAddress = (InetSocketAddress) address;
if (inetSocketAddress.getAddress() instanceof Inet6Address) {
return "[" + inetSocketAddress.getHostString() + "]:" + inetSocketAddress.getPort();
} else {
return inetSocketAddress.getHostString() + ":" + inetSocketAddress.getPort();
}
}

static class ResponseHandler implements HttpHandler {

private final AmazonS3TestServer storageServer;

private ResponseHandler(final AmazonS3TestServer storageServer) {
this.storageServer = storageServer;
}

@Override
public void handle(HttpExchange exchange) throws IOException {
String method = exchange.getRequestMethod();
String path = storageServer.getEndpoint() + exchange.getRequestURI().getRawPath();
String query = exchange.getRequestURI().getRawQuery();
Map<String, List<String>> headers = exchange.getRequestHeaders();
ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(exchange.getRequestBody(), out);

final Response storageResponse = storageServer.handle(method, path, query, headers, out.toByteArray());

Map<String, List<String>> responseHeaders = exchange.getResponseHeaders();
responseHeaders.put("Content-Type", singletonList(storageResponse.contentType));
storageResponse.headers.forEach((k, v) -> responseHeaders.put(k, singletonList(v)));
exchange.sendResponseHeaders(storageResponse.status.getStatus(), storageResponse.body.length);
if (storageResponse.body.length > 0) {
exchange.getResponseBody().write(storageResponse.body);
}
exchange.close();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.repositories.s3;

import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;

public class AmazonS3RepositoryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {

public AmazonS3RepositoryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}

@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
}
Loading

0 comments on commit c35b49c

Please sign in to comment.