Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(SqlEdrStore): add configurable vault path #801

Merged
merged 3 commits into from
Oct 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions DEPENDENCIES
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
maven/mavencentral/com.apicatalog/carbon-did/0.0.2, Apache-2.0, approved, #9239

Check warning on line 1 in DEPENDENCIES

View workflow job for this annotation

GitHub Actions / verify / verify-dependencies / Dash-Verify-Licenses

Restricted Dependencies found

Some dependencies are marked 'restricted' - please review them
maven/mavencentral/com.apicatalog/iron-verifiable-credentials/0.8.1, Apache-2.0, approved, #9234
maven/mavencentral/com.apicatalog/titanium-json-ld/1.0.0, Apache-2.0, approved, clearlydefined
maven/mavencentral/com.apicatalog/titanium-json-ld/1.3.1, Apache-2.0, approved, #8912
Expand Down Expand Up @@ -406,6 +406,7 @@
maven/mavencentral/org.testcontainers/junit-jupiter/1.19.0, MIT, approved, #10344
maven/mavencentral/org.testcontainers/postgresql/1.19.0, MIT, approved, #10350
maven/mavencentral/org.testcontainers/testcontainers/1.19.0, Apache-2.0 AND MIT, approved, #10347
maven/mavencentral/org.testcontainers/vault/1.19.0, MIT, approved, clearlydefined
maven/mavencentral/org.yaml/snakeyaml/2.0, Apache-2.0 AND (Apache-2.0 OR BSD-3-Clause OR EPL-1.0 OR GPL-2.0-or-later OR LGPL-2.1-or-later), approved, #7275
maven/mavencentral/org.yaml/snakeyaml/2.2, Apache-2.0 AND (Apache-2.0 OR BSD-3-Clause OR EPL-1.0 OR GPL-2.0-or-later OR LGPL-2.1-or-later), approved, #10232
maven/mavencentral/software.amazon.awssdk/annotations/2.20.146, Apache-2.0, approved, #8598
Expand Down
9 changes: 5 additions & 4 deletions edc-extensions/edr/edr-cache-sql/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ It will store in the database this fields:

It represents a single EDR negotiation done with the new Control Plane EDR APIs.

The EDR itself it is stored in the participant vault with a prefixed key `edr__<edrId>`.
The EDR itself it is stored in the participant vault with a prefixed key `edr--<edrId>`.

**_Note that the SQL statements (DDL) are specific to and only tested with PostgreSQL. Using it with other RDBMS may
work but might have unexpected side effects!_**
Expand All @@ -22,6 +22,7 @@ see [schema.sql](docs/schema.sql).

## 2. Configuration

| Key | Description | Mandatory | Default |
|:---------------------------------------|:----------------------------------|-----------|---------|
| edc.datasource.edr.name | Datasource used by this extension | | edr |
| Key | Description | Mandatory | Default |
|:------------------------|:--------------------------------------------------------------------------------------------------|-----------|---------|
| edc.datasource.edr.name | Datasource used by this extension | | edr |
| edc.edr.vault.path | Directory/Path where to store EDRs in the vault for vaults that supports hierarchical structuring | | |
Original file line number Diff line number Diff line change
Expand Up @@ -53,23 +53,22 @@ public class SqlEndpointDataReferenceCache extends AbstractSqlStore implements E
public static final String SEPARATOR = "--";
public static final String VAULT_PREFIX = "edr" + SEPARATOR;
private final EdrStatements statements;
private final String vaultPath;
private final Clock clock;
private final Vault vault;

private final SqlLeaseContextBuilder leaseContext;

private final String leaseHolder;


public SqlEndpointDataReferenceCache(DataSourceRegistry dataSourceRegistry, String dataSourceName,
TransactionContext transactionContext, EdrStatements statements,
ObjectMapper objectMapper, Vault vault, Clock clock,
ObjectMapper objectMapper, Vault vault, String vaultPath, Clock clock,
QueryExecutor queryExecutor, String connectorId) {
super(dataSourceRegistry, dataSourceName, transactionContext, objectMapper, queryExecutor);
this.statements = statements;
this.vaultPath = vaultPath;
this.clock = clock;
this.vault = vault;
this.leaseHolder = connectorId;
leaseContext = SqlLeaseContextBuilder.with(transactionContext, connectorId, statements, clock, queryExecutor);
}

Expand Down Expand Up @@ -105,11 +104,6 @@ public SqlEndpointDataReferenceCache(DataSourceRegistry dataSourceRegistry, Stri
});
}

@Override
public void save(EndpointDataReferenceEntry entity) {
throw new UnsupportedOperationException("Please use save(EndpointDataReferenceEntry, EndpointDataReference) instead!");
}

@Override
public @NotNull List<EndpointDataReference> referencesForAsset(String assetId, String providerId) {
var querySpec = QuerySpec.Builder.newInstance();
Expand Down Expand Up @@ -148,7 +142,7 @@ public void save(EndpointDataReferenceEntry entry, EndpointDataReference edr) {
entry.getErrorDetail(),
entry.getCreatedAt(),
entry.getUpdatedAt());
vault.storeSecret(VAULT_PREFIX + edr.getId(), toJson(edr)).orElseThrow((failure) -> new EdcPersistenceException(failure.getFailureDetail()));
vault.storeSecret(vaultPath + VAULT_PREFIX + edr.getId(), toJson(edr)).orElseThrow((failure) -> new EdcPersistenceException(failure.getFailureDetail()));
} catch (Exception exception) {
throw new EdcPersistenceException(exception);
}
Expand Down Expand Up @@ -183,7 +177,7 @@ public StoreResult<EndpointDataReferenceEntry> deleteByTransferProcessId(String
leaseContext.withConnection(connection).acquireLease(id);
queryExecutor.execute(connection, statements.getDeleteByIdTemplate(), id);
leaseContext.withConnection(connection).breakLease(id);
vault.deleteSecret(VAULT_PREFIX + entryWrapper.getEdrId()).orElseThrow((failure) -> new EdcPersistenceException(failure.getFailureDetail()));
vault.deleteSecret(vaultPath + VAULT_PREFIX + entryWrapper.getEdrId()).orElseThrow((failure) -> new EdcPersistenceException(failure.getFailureDetail()));
return StoreResult.success(entryWrapper.getEntry());
} else {
return StoreResult.notFound(format("EDR with id %s not found", id));
Expand Down Expand Up @@ -227,6 +221,11 @@ public StoreResult<EndpointDataReferenceEntry> deleteByTransferProcessId(String
});
}

@Override
public void save(EndpointDataReferenceEntry entity) {
throw new UnsupportedOperationException("Please use save(EndpointDataReferenceEntry, EndpointDataReference) instead!");
}

private <T> T findById(Connection connection, String id, ResultSetMapper<T> resultSetMapper) {
var sql = statements.getFindByTransferProcessIdTemplate();
return queryExecutor.single(connection, false, resultSetMapper, sql, id);
Expand Down Expand Up @@ -274,7 +273,7 @@ private EndpointDataReferenceEntryWrapper mapToWrapper(ResultSet resultSet) thro
}

private EndpointDataReference referenceFromEntry(String edrId) {
var edr = vault.resolveSecret(VAULT_PREFIX + edrId);
var edr = vault.resolveSecret(vaultPath + VAULT_PREFIX + edrId);
if (edr != null) {
return fromJson(edr, EndpointDataReference.class);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ public class SqlEndpointDataReferenceCacheExtension implements ServiceExtension

@Setting(required = true, defaultValue = SqlEndpointDataReferenceCacheExtension.DEFAULT_DATASOURCE_NAME)
public static final String DATASOURCE_SETTING_NAME = "edc.datasource.edr.name";

@Setting(value = "Directory/Path where to store EDRs in the vault for vaults that supports hierarchical structuring.", required = false, defaultValue = "")
public static final String EDC_EDR_VAULT_PATH = "edc.edr.vault.path";
public static final String DEFAULT_DATASOURCE_NAME = "edr";
@Inject
private DataSourceRegistry dataSourceRegistry;
Expand All @@ -63,7 +66,8 @@ public String name() {
@Provider
public EndpointDataReferenceCache edrCache(ServiceExtensionContext context) {
var dataSourceName = context.getConfig().getString(DATASOURCE_SETTING_NAME, DEFAULT_DATASOURCE_NAME);
return new SqlEndpointDataReferenceCache(dataSourceRegistry, dataSourceName, transactionContext, getStatementImpl(), typeManager.getMapper(), vault, clock, queryExecutor, context.getConnectorId());
var vaultDirectory = context.getConfig().getString(EDC_EDR_VAULT_PATH, "");
return new SqlEndpointDataReferenceCache(dataSourceRegistry, dataSourceName, transactionContext, getStatementImpl(), typeManager.getMapper(), vault, vaultDirectory, clock, queryExecutor, context.getConnectorId());
}

private EdrStatements getStatementImpl() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ void setUp(PostgresqlStoreSetupExtension extension, QueryExecutor queryExecutor)
when(vault.storeSecret(any(), any())).thenReturn(Result.success());
when(vault.resolveSecret(any())).then(a -> edrJson(a.getArgument(0)));

cache = new SqlEndpointDataReferenceCache(extension.getDataSourceRegistry(), extension.getDatasourceName(), extension.getTransactionContext(), statements, typeManager.getMapper(), vault, clock, queryExecutor, CONNECTOR_NAME);
cache = new SqlEndpointDataReferenceCache(extension.getDataSourceRegistry(), extension.getDatasourceName(), extension.getTransactionContext(), statements, typeManager.getMapper(), vault, "", clock, queryExecutor, CONNECTOR_NAME);
var schema = Files.readString(Paths.get("./docs/schema.sql"));
extension.runQuery(schema);
leaseUtil = new LeaseUtil(extension.getTransactionContext(), extension::getConnection, statements, clock);
Expand All @@ -96,6 +96,24 @@ void verify_unoffensive_secretKey() {
verify(vault).storeSecret(argThat(s -> s.startsWith("edr--")), anyString());
}

@Test
void verify_custom_vaultPath(PostgresqlStoreSetupExtension extension, QueryExecutor queryExecutor) {

var path = "testPath/";
cache = new SqlEndpointDataReferenceCache(extension.getDataSourceRegistry(), extension.getDatasourceName(), extension.getTransactionContext(), statements, typeManager.getMapper(), vault, path, clock, queryExecutor, CONNECTOR_NAME);

var tpId = "tp1";
var assetId = "asset1";
var edrId = "edr1";

var edr = edr(edrId);
var entry = edrEntry(assetId, randomUUID().toString(), tpId);

cache.save(entry, edr);

verify(vault).storeSecret(argThat(s -> s.startsWith(path + "edr--")), anyString());
}

@Override
protected EndpointDataReferenceCache getStore() {
return cache;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ void setUp(PostgresqlStoreSetupExtension extension, QueryExecutor queryExecutor)
when(vault.deleteSecret(any())).thenReturn(Result.success());
when(vault.storeSecret(any(), any())).thenReturn(Result.success());

cache = new SqlEndpointDataReferenceCache(extension.getDataSourceRegistry(), extension.getDatasourceName(), extension.getTransactionContext(), statements, typeManager.getMapper(), vault, clock, queryExecutor, CONNECTOR_NAME);
cache = new SqlEndpointDataReferenceCache(extension.getDataSourceRegistry(), extension.getDatasourceName(), extension.getTransactionContext(), statements, typeManager.getMapper(), vault, "", clock, queryExecutor, CONNECTOR_NAME);
var schema = Files.readString(Paths.get("./docs/schema.sql"));
extension.runQuery(schema);

Expand Down
1 change: 1 addition & 0 deletions edc-tests/e2e-tests/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ dependencies {
testImplementation(libs.edc.auth.oauth2.client)
testImplementation(libs.testcontainers.junit)
testImplementation(libs.testcontainers.postgres)
testImplementation(libs.testcontainers.vault)

}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright (c) 2023 Bayerische Motoren Werke Aktiengesellschaft (BMW AG)
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* SPDX-License-Identifier: Apache-2.0
*
* Contributors:
* Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation
*
*/

package org.eclipse.tractusx.edc.lifecycle;

import org.junit.jupiter.api.extension.ExtensionContext;
import org.testcontainers.vault.VaultContainer;

import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

import static java.lang.String.format;

public class PgHashicorpParticipantRuntime extends PgParticipantRuntime {

static final String DOCKER_IMAGE_NAME = "vault:1.9.6";
static final String TOKEN = UUID.randomUUID().toString();


public final VaultContainer<?> vaultContainer = new VaultContainer<>(DOCKER_IMAGE_NAME)
.withVaultToken(TOKEN);
private final String vaultDirectory;

public PgHashicorpParticipantRuntime(String moduleName, String runtimeName, String bpn, String vaultDirectory, Map<String, String> properties) {
super(moduleName, runtimeName, bpn, properties);
this.vaultDirectory = vaultDirectory;
}

@Override
public void beforeAll(ExtensionContext context) throws Exception {
vaultContainer.start();
config().forEach(System::setProperty);
super.beforeAll(context);
}

@Override
public void afterAll(ExtensionContext context) throws Exception {
super.afterAll(context);
vaultContainer.stop();
vaultContainer.close();
}

@Override
protected void mockVault() {

}

private Map<String, String> config() {
return new HashMap<>() {
{
put("edc.vault.hashicorp.url", format("http://%s:%s", vaultContainer.getHost(), vaultContainer.getFirstMappedPort()));
put("edc.vault.hashicorp.token", TOKEN);
put("edc.edr.vault.path", vaultDirectory);
}
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public PgParticipantRuntime(String moduleName, String runtimeName, String bpn, M
super(moduleName, runtimeName, bpn, properties);
this.dbName = runtimeName.toLowerCase();
this.registerServiceMock(IdentityService.class, new MockDapsService(bpn));
this.registerServiceMock(Vault.class, new InMemoryVaultOverride(mock(Monitor.class)));
mockVault();

postgreSqlContainer = new PostgreSQLContainer<>(POSTGRES_IMAGE_NAME)
.withLabel("runtime", dbName)
Expand Down Expand Up @@ -125,6 +125,10 @@ public String baseJdbcUrl() {
return format("jdbc:postgresql://%s:%s/", postgreSqlContainer.getHost(), postgreSqlContainer.getFirstMappedPort());
}

protected void mockVault() {
this.registerServiceMock(Vault.class, new InMemoryVaultOverride(mock(Monitor.class)));
}

private static class InMemoryVaultOverride extends InMemoryVault {

InMemoryVaultOverride(Monitor monitor) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Copyright (c) 2023 Bayerische Motoren Werke Aktiengesellschaft (BMW AG)
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* SPDX-License-Identifier: Apache-2.0
*
* Contributors:
* Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation
*
*/

package org.eclipse.tractusx.edc.tests.edr;

import org.eclipse.edc.junit.annotations.PostgresqlDbIntegrationTest;
import org.eclipse.tractusx.edc.lifecycle.PgHashicorpParticipantRuntime;
import org.eclipse.tractusx.edc.lifecycle.PgParticipantRuntime;
import org.junit.jupiter.api.extension.RegisterExtension;

import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.PLATO_BPN;
import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.PLATO_NAME;
import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.SOKRATES_BPN;
import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.SOKRATES_NAME;
import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.platoConfiguration;
import static org.eclipse.tractusx.edc.lifecycle.TestRuntimeConfiguration.sokratesConfiguration;

@PostgresqlDbIntegrationTest
public class NegotiateEdrPostgresqlHashicorpVaultTest extends AbstractNegotiateEdrTest {

@RegisterExtension
protected static final PgParticipantRuntime PLATO_RUNTIME = new PgParticipantRuntime(
":edc-tests:runtime:runtime-postgresql",
PLATO_NAME,
PLATO_BPN,
platoConfiguration()
);
private static final String VAULT_DIRECTORY = "testDir/";

@RegisterExtension
protected static final PgHashicorpParticipantRuntime SOKRATES_RUNTIME = new PgHashicorpParticipantRuntime(
":edc-tests:runtime:runtime-postgresql-hashicorp",
SOKRATES_NAME,
SOKRATES_BPN,
VAULT_DIRECTORY,
sokratesConfiguration()
);

}
3 changes: 3 additions & 0 deletions edc-tests/runtime/runtime-postgresql-hashicorp/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# PostgreSQL + Hashicorp Vault Runtime for Testing Purposes

This module provides a very small,runtime using PostgreSQL as persistence backend and Hashicorp Vault to execute tests against. Not intended for anything other than testing!
53 changes: 53 additions & 0 deletions edc-tests/runtime/runtime-postgresql-hashicorp/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Copyright (c) 2023 Bayerische Motoren Werke Aktiengesellschaft (BMW AG)
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* SPDX-License-Identifier: Apache-2.0
*
* Contributors:
* Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation
*
*/

plugins {
`java-library`
id("application")
}


dependencies {

// use basic (all in-mem) control plane
implementation(project(":edc-controlplane:edc-controlplane-postgresql-hashicorp-vault")) {
exclude(module = "data-encryption")
exclude(module = "json-ld-core")
exclude(module = "ssi-identity-core")
exclude(module = "ssi-miw-credential-client")
exclude(module = "ssi-identity-extractor")
exclude(module = "cx-policy")
}

implementation(project(":edc-tests:runtime:extensions"))

// use basic (all in-mem) data plane
runtimeOnly(project(":edc-dataplane:edc-dataplane-base")) {
exclude("org.eclipse.edc", "api-observability")
}


implementation(libs.edc.core.controlplane)
// for the controller
implementation(libs.jakarta.rsApi)
}

application {
mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime")
}

// do not publish
edcBuild {
publish.set(false)
}
1 change: 1 addition & 0 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ apache-sshd-core = { module = "org.apache.sshd:sshd-core", version.ref = "apache
apache-sshd-sftp = { module = "org.apache.sshd:sshd-sftp", version.ref = "apache-sshd" }
testcontainers-junit = { module = "org.testcontainers:junit-jupiter", version.ref = "testcontainers" }
testcontainers-postgres = { module = "org.testcontainers:postgresql", version.ref = "testcontainers" }
testcontainers-vault = { module = "org.testcontainers:vault", version.ref = "testcontainers" }
aws-s3 = { module = "software.amazon.awssdk:s3", version.ref = "aws" }
aws-s3transfer = { module = "software.amazon.awssdk:s3-transfer-manager", version.ref = "aws" }
jakarta-rsApi = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "rsApi" }
Expand Down
1 change: 1 addition & 0 deletions settings.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ include(":edc-tests:runtime:runtime-memory")
include(":edc-tests:runtime:dataplane-cloud")
include(":edc-tests:runtime:runtime-memory-ssi")
include(":edc-tests:runtime:runtime-postgresql")
include(":edc-tests:runtime:runtime-postgresql-hashicorp")
include(":edc-tests:edc-dataplane:edc-dataplane-proxy-e2e")
include(":edc-tests:edc-dataplane:cloud-transfer-tests")

Expand Down