Skip to content
This repository has been archived by the owner on Nov 8, 2024. It is now read-only.

Commit

Permalink
issue #8: fix getDirectPathBlobIdStream implementation
Browse files Browse the repository at this point in the history
The previous implementation was incorrect. This change also includes a new infrastructure for integration tests to help resolve issues like this and others in the future.
  • Loading branch information
nblair committed Aug 9, 2018
1 parent 28f41f7 commit dad6582
Show file tree
Hide file tree
Showing 10 changed files with 273 additions and 37 deletions.
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ Building
To build the project and generate the bundle use Maven:

mvn clean install

Integration Tests
-----------------

Integration tests can be found within src/test/java and have the class suffix `IT`. [Additional documentation is provided about how to configure and run them](src/test/resources/README.md)

Installing
----------
Expand Down
13 changes: 13 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,19 @@

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.22.0</version>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.gmavenplus</groupId>
<artifactId>gmavenplus-plugin</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package org.sonatype.nexus.blobstore.gcloud.internal;

import com.google.api.client.http.apache.ApacheHttpTransport;
import com.google.cloud.TransportOptions;
import com.google.cloud.http.HttpTransportOptions;
import com.google.cloud.storage.Storage;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;

/**
* Abstract supertype for Factory classes that generate Google Clients (for Storage, Datastore, etc).
*/
public abstract class AbstractGoogleClientFactory
{
/**
* This method overrides the default {@link com.google.auth.http.HttpTransportFactory} with the Apache HTTP Client
* backed implementation. In addition, it modifies the {@link HttpClient} used internally to use a
* {@link PoolingClientConnectionManager}.
*
* Note: at time of writing, this method uses deprecated classes that have been replaced in HttpClient with
* {@link HttpClientBuilder}. We cannot use {@link HttpClientBuilder} currently because of a problem with the
* Google Cloud Storage library's {@link ApacheHttpTransport} constructor; the {@link HttpClient} instance
* returned by {@link HttpClientBuilder#build()} throws an {@link UnsupportedOperationException} for
* {@link HttpClient#getParams()}.
*
* @see PoolingHttpClientConnectionManager
* @see HttpClientBuilder
* @return customized {@link TransportOptions} to use for our {@link Storage} client instance
*/
TransportOptions transportOptions() {
// replicate default connection and protocol parameters used within {@link ApacheHttpTransport}
PoolingClientConnectionManager connManager = new PoolingClientConnectionManager();
connManager.setDefaultMaxPerRoute(20);
connManager.setMaxTotal(200);
BasicHttpParams params = new BasicHttpParams();
params.setParameter(CoreConnectionPNames.STALE_CONNECTION_CHECK, false);
params.setParameter(CoreConnectionPNames.SOCKET_BUFFER_SIZE, 8192);
DefaultHttpClient client = new DefaultHttpClient(connManager, params);

return HttpTransportOptions.newBuilder()
.setHttpTransportFactory(() -> new ApacheHttpTransport(client))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ class DeletedBlobIndex

private String deletedBlobsKeyKind;

DeletedBlobIndex(final GoogleCloudDatastoreFactory factory, final BlobStoreConfiguration blobStoreConfiguration) {
DeletedBlobIndex(final GoogleCloudDatastoreFactory factory, final BlobStoreConfiguration blobStoreConfiguration)
throws Exception {
this.gcsDatastore = factory.create(blobStoreConfiguration);
this.deletedBlobsKeyKind = "NXRM-DeletedBlobs-" + blobStoreConfiguration.getName();
this.deletedBlobsKeyFactory = gcsDatastore.newKeyFactory().setKind(deletedBlobsKeyKind);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import com.google.cloud.storage.Storage.BlobListOption;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Streams;
import com.google.common.hash.HashCode;
import org.joda.time.DateTime;
Expand Down Expand Up @@ -387,9 +388,27 @@ public Stream<BlobId> getDirectPathBlobIdStream(final String prefix) {
return blobStream(subpath)
.filter(blob -> blob.getName().endsWith(BLOB_ATTRIBUTE_SUFFIX) &&
!basename(blob).startsWith(TEMPORARY_BLOB_ID_PREFIX))
.map(com.google.cloud.storage.Blob::getBlobId)
.map(blobId -> new BlobId(blobId.toString()));
.map(blob -> cloudBlobIdToDirectPathBlobId(blob.getBlobId()));
}

/**
* Used by {@link #getDirectPathBlobIdStream(String)} to convert an Google cloud BlobId to a Nexus {@link BlobId}.
*
* @see BlobIdLocationResolver
*/
private BlobId cloudBlobIdToDirectPathBlobId(final com.google.cloud.storage.BlobId blobId) {
final String blobName = blobId.getName();
checkArgument(blobName.startsWith(CONTENT_PREFIX + "/" + DIRECT_PATH_ROOT + "/"),
"Not direct path blob path: %s", blobName);
checkArgument(blobName.endsWith(BLOB_ATTRIBUTE_SUFFIX), "Not blob attribute path: %s", blobName);
String subpath = blobName.replace(format("%s/%s/", CONTENT_PREFIX, DIRECT_PATH_ROOT), "");
String name = subpath.substring(0, subpath.length() - BLOB_ATTRIBUTE_SUFFIX.length());

Map<String, String> headers = ImmutableMap.of(
BLOB_NAME_HEADER, name,
DIRECT_PATH_BLOB_HEADER, "true"
);
return blobIdLocationResolver.fromHeaders(headers);
}

Stream<com.google.cloud.storage.Blob> blobStream(final String path) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,33 @@
*/
package org.sonatype.nexus.blobstore.gcloud.internal;

import java.io.FileInputStream;
import java.io.FileNotFoundException;

import javax.inject.Named;

import org.sonatype.nexus.blobstore.api.BlobStoreConfiguration;

import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.datastore.Datastore;
import com.google.cloud.datastore.DatastoreOptions;
import org.apache.shiro.util.StringUtils;

import static org.sonatype.nexus.blobstore.gcloud.internal.GoogleCloudBlobStore.CONFIG_KEY;
import static org.sonatype.nexus.blobstore.gcloud.internal.GoogleCloudBlobStore.CREDENTIAL_FILE_KEY;

@Named
public class GoogleCloudDatastoreFactory
public class GoogleCloudDatastoreFactory extends AbstractGoogleClientFactory
{

Datastore create(final BlobStoreConfiguration configuration) {
return DatastoreOptions.getDefaultInstance().getService();
Datastore create(final BlobStoreConfiguration configuration) throws Exception {
DatastoreOptions.Builder builder = DatastoreOptions.newBuilder().setTransportOptions(transportOptions());

String credentialFile = configuration.attributes(CONFIG_KEY).get(CREDENTIAL_FILE_KEY, String.class);
if (StringUtils.hasText(credentialFile)) {
builder.setCredentials(ServiceAccountCredentials.fromStream(new FileInputStream(credentialFile)));
}

return builder.build().getService();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import static org.sonatype.nexus.blobstore.gcloud.internal.GoogleCloudBlobStore.CREDENTIAL_FILE_KEY;

@Named
public class GoogleCloudStorageFactory
public class GoogleCloudStorageFactory extends AbstractGoogleClientFactory
{

Storage create(final BlobStoreConfiguration configuration) throws Exception {
Expand All @@ -50,34 +50,4 @@ Storage create(final BlobStoreConfiguration configuration) throws Exception {

return builder.build().getService();
}

/**
* This method overrides the default {@link com.google.auth.http.HttpTransportFactory} with the Apache HTTP Client
* backed implementation. In addition, it modifies the {@link HttpClient} used internally to use a
* {@link PoolingClientConnectionManager}.
*
* Note: at time of writing, this method uses deprecated classes that have been replaced in HttpClient with
* {@link HttpClientBuilder}. We cannot use {@link HttpClientBuilder} currently because of a problem with the
* Google Cloud Storage library's {@link ApacheHttpTransport} constructor; the {@link HttpClient} instance
* returned by {@link HttpClientBuilder#build()} throws an {@link UnsupportedOperationException} for
* {@link HttpClient#getParams()}.
*
* @see PoolingHttpClientConnectionManager
* @see HttpClientBuilder
* @return customized {@link TransportOptions} to use for our {@link Storage} client instance
*/
TransportOptions transportOptions() {
// replicate default connection and protocol parameters used within {@link ApacheHttpTransport}
PoolingClientConnectionManager connManager = new PoolingClientConnectionManager();
connManager.setDefaultMaxPerRoute(20);
connManager.setMaxTotal(200);
BasicHttpParams params = new BasicHttpParams();
params.setParameter(CoreConnectionPNames.STALE_CONNECTION_CHECK, false);
params.setParameter(CoreConnectionPNames.SOCKET_BUFFER_SIZE, 8192);
DefaultHttpClient client = new DefaultHttpClient(connManager, params);

return HttpTransportOptions.newBuilder()
.setHttpTransportFactory(() -> new ApacheHttpTransport(client))
.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package org.sonatype.nexus.blobstore.gcloud.internal

import java.util.stream.Collectors
import java.util.stream.Stream

import org.sonatype.nexus.blobstore.BlobIdLocationResolver
import org.sonatype.nexus.blobstore.DefaultBlobIdLocationResolver
import org.sonatype.nexus.blobstore.PeriodicJobService
import org.sonatype.nexus.blobstore.PeriodicJobService.PeriodicJob
import org.sonatype.nexus.blobstore.api.BlobId
import org.sonatype.nexus.blobstore.api.BlobStoreConfiguration
import org.sonatype.nexus.common.node.NodeAccess

import com.google.cloud.storage.Blob.BlobSourceOption
import com.google.cloud.storage.BlobInfo
import com.google.cloud.storage.Storage
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import spock.lang.Specification

import static org.sonatype.nexus.blobstore.DirectPathLocationStrategy.DIRECT_PATH_PREFIX

class GoogleCloudBlobStoreIT
extends Specification
{

static final Logger log = LoggerFactory.getLogger(GoogleCloudBlobStoreIT.class)

static final BlobStoreConfiguration config = new BlobStoreConfiguration()

static String bucketName = "integration-test-${UUID.randomUUID().toString()}"

PeriodicJobService periodicJobService = Mock({
schedule(_, _) >> new PeriodicJob() {
@Override
void cancel() {
}
}
})

NodeAccess nodeAccess = Mock({
getId() >> 'integration-test'
})

GoogleCloudStorageFactory storageFactory = new GoogleCloudStorageFactory()

BlobIdLocationResolver blobIdLocationResolver = new DefaultBlobIdLocationResolver()

GoogleCloudBlobStoreMetricsStore metricsStore

GoogleCloudDatastoreFactory datastoreFactory = new GoogleCloudDatastoreFactory()

GoogleCloudBlobStore blobStore

def setup() {
config.attributes = [
'google cloud storage': [
bucket: bucketName,
credential_file: this.getClass().getResource('/gce-credentials.json').getFile()
]
]

log.info("Integration test using bucket ${bucketName}")

metricsStore = new GoogleCloudBlobStoreMetricsStore(periodicJobService, nodeAccess)
// can't start metrics store until blobstore init is done (which creates the bucket)
blobStore = new GoogleCloudBlobStore(storageFactory, blobIdLocationResolver, metricsStore, datastoreFactory)
blobStore.init(config)

blobStore.start()
metricsStore.start()
}

def cleanup() {
blobStore.stop()
}

def cleanupSpec() {
Storage storage = new GoogleCloudStorageFactory().create(config)
log.debug("Tests complete, deleting files from ${bucketName}")
// must delete all the files within the bucket before we can delete the bucket
storage.list(bucketName,
Storage.BlobListOption.prefix("")).iterateAll()
.forEach({ b -> b.delete(BlobSourceOption.generationMatch()) })
storage.delete(bucketName)
log.info("Integration test complete, bucket ${bucketName} deleted")
}

def "getDirectPathBlobIdStream returns empty stream for missing prefix"() {
given:

when:
Stream<?> s = blobStore.getDirectPathBlobIdStream('notpresent')

then:
!s.iterator().hasNext()
}

def "getDirectPathBlobIdStream returns expected content"() {
given:
Storage storage = storageFactory.create(config)
// mimic some RHC content, which is stored as directpath blobs
// 4 files, but only 2 blobIds (a .bytes and a .properties blob for each blobId)
createFile(storage, "content/directpath/health-check/repo1/report.properties.bytes")
createFile(storage, "content/directpath/health-check/repo1/report.properties.properties")
createFile(storage, "content/directpath/health-check/repo1/details/bootstrap.min.css.properties")
createFile(storage, "content/directpath/health-check/repo1/details/bootstrap.min.css.bytes")

when:
Stream<BlobId> stream = blobStore.getDirectPathBlobIdStream('health-check/repo1')

then:
List<BlobId> results = stream.collect(Collectors.toList())
results.size() == 2
results.contains(new BlobId("${DIRECT_PATH_PREFIX}health-check/repo1/report.properties"))
results.contains(new BlobId("${DIRECT_PATH_PREFIX}health-check/repo1/details/bootstrap.min.css"))
}

def createFile(Storage storage, String path) {
storage.create(BlobInfo.newBuilder(bucketName, path).build(),
"content".bytes)
}
}
12 changes: 12 additions & 0 deletions src/test/resources/.gce-credentials-example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"type": "service_account",
"project_id": "some-project-id",
"private_key_id": "012345...",
"private_key": "-----BEGIN PRIVATE KEY-----...",
"client_email": "[email protected]",
"client_id": "1234....",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/user%40project.iam.gserviceaccount.com"
}
29 changes: 29 additions & 0 deletions src/test/resources/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
<!--
Sonatype Nexus (TM) Open Source Version
Copyright (c) 2017-present Sonatype, Inc.
All rights reserved. Includes the third-party code listed at http://links.sonatype.com/products/nexus/oss/attributions.
This program and the accompanying materials are made available under the terms of the Eclipse Public License Version 1.0,
which accompanies this distribution and is available at http://www.eclipse.org/legal/epl-v10.html.
Sonatype Nexus (TM) Professional Version is available from Sonatype, Inc. "Sonatype" and "Sonatype Nexus" are trademarks
of Sonatype, Inc. Apache Maven is a trademark of the Apache Software Foundation. M2eclipse is a trademark of the
Eclipse Foundation. All other trademarks are the property of their respective owners.
-->
Integration Tests for Nexus Repository Manager Google Cloud Storage Blobstore
====

The integration tests for Nexus Repository Manager Google Cloud Storage Blobstore interact with
Google Cloud Storage and require credentials to be provided.

The integration tests look in this folder (`src/test/resource`) for a file named `gce-credentials.json`,
which is ignored by git. The [root README](../../../README.md) for this project describes how to
create this file and what roles/permissions are required.

See the [example file](.gce-credentials-example.json) in this directory.

Once configured, to run the tests execute the following at the root of this project:

mvn verify

0 comments on commit dad6582

Please sign in to comment.