Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

build(deps): bump edc to 0.10.0-20240918-SNAPSHOT #1564

Merged
merged 2 commits into from
Sep 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
513 changes: 255 additions & 258 deletions DEPENDENCIES

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion edc-dataplane/edc-dataplane-base/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ dependencies {

runtimeOnly(libs.edc.data.plane.selector.client)
runtimeOnly(libs.edc.data.plane.self.registration)
runtimeOnly(libs.edc.dpf.api.control)
runtimeOnly(libs.edc.dpf.api.signaling)

runtimeOnly(libs.edc.api.control.config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,6 @@ public class ParticipantRuntime extends EmbeddedRuntime {
private DataWiper wiper;

public ParticipantRuntime(String moduleName, String runtimeName, String bpn, Map<String, String> properties) {
this(moduleName, runtimeName, bpn, properties, null);
}

public ParticipantRuntime(String moduleName, String runtimeName, String bpn, Map<String, String> properties, BeforeInitCallback beforeInitCallback) {
super(runtimeName, properties, moduleName);
this.properties = properties;
this.registerServiceMock(IdentityService.class, new MockBpnIdentityService(bpn));
Expand All @@ -79,9 +75,6 @@ public ParticipantRuntime(String moduleName, String runtimeName, String bpn, Map
throw new RuntimeException(e);
}

if (beforeInitCallback != null) {
beforeInitCallback.beforeInit(this);
}
}

public DataWiper getWiper() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,8 @@
*/
public class ParticipantRuntimeExtension extends RuntimePerClassExtension implements AfterEachCallback {

public ParticipantRuntimeExtension(String moduleName, String runtimeName, String bpn, Map<String, String> properties, BeforeInitCallback beforeInitCallback) {
public ParticipantRuntimeExtension(String moduleName, String runtimeName, String bpn, Map<String, String> properties) {
super(new ParticipantRuntime(moduleName, runtimeName, bpn, properties));

if (beforeInitCallback != null) {
beforeInitCallback.beforeInit(runtime);
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public class PgRuntimeExtension extends ParticipantRuntimeExtension {
private final String dbName;

public PgRuntimeExtension(String moduleName, String runtimeName, String bpn, Map<String, String> properties) {
super(moduleName, runtimeName, bpn, properties, null);
super(moduleName, runtimeName, bpn, properties);
this.dbName = runtimeName.toLowerCase();
postgreSqlContainer = new PostgreSQLContainer<>(POSTGRES_IMAGE_NAME)
.withLabel("runtime", dbName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,7 @@
public interface Runtimes {

static RuntimeExtension memoryRuntime(String runtimeName, String bpn, Map<String, String> properties) {
return memoryRuntime(runtimeName, bpn, properties, null);
}

static RuntimeExtension memoryRuntime(String runtimeName, String bpn, Map<String, String> properties, BeforeInitCallback callback) {
return new ParticipantRuntimeExtension(":edc-tests:runtime:runtime-memory", runtimeName, bpn, properties, callback);
return new ParticipantRuntimeExtension(":edc-tests:runtime:runtime-memory", runtimeName, bpn, properties);
}

static RuntimeExtension pgRuntime(String runtimeName, String bpn, Map<String, String> properties) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import jakarta.json.JsonObject;
import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates;
import org.eclipse.edc.junit.annotations.EndToEndTest;
import org.eclipse.edc.junit.extensions.EmbeddedRuntime;
import org.eclipse.edc.junit.extensions.RuntimeExtension;
import org.eclipse.tractusx.edc.spi.identity.mapper.BdrsClient;
import org.eclipse.tractusx.edc.tests.participant.TransferParticipant;
Expand Down Expand Up @@ -76,7 +75,8 @@ public class TransferWithTokenRefreshTest {
private static final Long VERY_SHORT_TOKEN_EXPIRY = 3L;

@RegisterExtension
protected static final RuntimeExtension PROVIDER_RUNTIME = memoryRuntime(PROVIDER.getName(), PROVIDER.getBpn(), forConfig(PROVIDER.getConfiguration()), TransferWithTokenRefreshTest::providerInitiator);
protected static final RuntimeExtension PROVIDER_RUNTIME = memoryRuntime(PROVIDER.getName(), PROVIDER.getBpn(), forConfig(PROVIDER.getConfiguration()))
.registerServiceMock(BdrsClient.class, (c) -> CONSUMER.getDid());
protected ClientAndServer server;
private String privateBackendUrl;

Expand All @@ -88,10 +88,6 @@ private static Map<String, String> forConfig(Map<String, String> originalConfig)
return newConfig;
}

private static void providerInitiator(EmbeddedRuntime runtime) {
runtime.registerServiceMock(BdrsClient.class, (c) -> CONSUMER.getDid());
}

@BeforeEach
void setup() {
server = ClientAndServer.startClientAndServer(MOCK_BACKEND_REMOTE_HOST, getFreePort());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ dependencies {
testImplementation(libs.edc.auth.tokenbased)
testImplementation(libs.edc.spi.dataplane.selector)
testImplementation(libs.testcontainers.junit)
testImplementation(libs.edc.ext.jsonld)
testImplementation(libs.edc.aws.s3.core)
testImplementation(libs.aws.s3)
testImplementation(libs.aws.s3transfer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,15 @@

import com.azure.core.util.BinaryData;
import io.restassured.http.ContentType;
import jakarta.json.Json;
import jakarta.json.JsonObjectBuilder;
import org.eclipse.edc.junit.extensions.EmbeddedRuntime;
import org.eclipse.edc.junit.extensions.RuntimeExtension;
import org.eclipse.edc.junit.extensions.RuntimePerClassExtension;
import org.eclipse.edc.junit.testfixtures.TestUtils;
import org.eclipse.edc.spi.monitor.ConsoleMonitor;
import org.eclipse.edc.spi.monitor.Monitor;
import org.eclipse.edc.spi.security.Vault;
import org.eclipse.edc.spi.types.domain.DataAddress;
import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage;
import org.eclipse.edc.spi.types.domain.transfer.FlowType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
Expand All @@ -44,10 +43,14 @@
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayDeque;
import java.util.List;
import java.util.UUID;
import java.util.stream.IntStream;

import static io.restassured.RestAssured.given;
import static org.assertj.core.api.Assertions.assertThat;
import static org.eclipse.edc.spi.constants.CoreConstants.EDC_NAMESPACE;
import static org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage.EDC_DATA_FLOW_START_MESSAGE_TYPE;
import static org.eclipse.edc.util.io.Ports.getFreePort;
import static org.eclipse.tractusx.edc.dataplane.transfer.test.TestConstants.AZBLOB_CONSUMER_ACCOUNT_KEY;
import static org.eclipse.tractusx.edc.dataplane.transfer.test.TestConstants.AZBLOB_CONSUMER_ACCOUNT_NAME;
Expand Down Expand Up @@ -78,10 +81,11 @@
@Testcontainers
@CloudTransferTest
public class AzureToAzureTest {

private static final int PROVIDER_CONTROL_PORT = getFreePort();
private static final String START_DATAFLOW_URL = "http://localhost:%s/control/v1/dataflows".formatted(PROVIDER_CONTROL_PORT);
private static final int AZURITE_HOST_PORT = getFreePort();
// launches the data plane
// launches the data plane

@RegisterExtension
protected static final RuntimeExtension DATAPLANE_RUNTIME = new RuntimePerClassExtension(new EmbeddedRuntime(
"AzureBlob-Dataplane",
Expand Down Expand Up @@ -121,17 +125,17 @@ void transferMultipleFile_success(Vault vault) {
{"sas": "%s","edctype":"dataspaceconnector:azuretoken"}
""".formatted(consumerBlobHelper.generateAccountSas(AZBLOB_CONSUMER_CONTAINER_NAME)));

var request = createMultipleFileFlowRequest(PREFIX_FOR_MUTIPLE_FILES);
var url = "http://localhost:%s/control/transfer".formatted(PROVIDER_CONTROL_PORT);

var request = createFlowRequestBuilder("any")
.add("sourceDataAddress", blobSourceAddress(List.of(dspaceProperty(EDC_NAMESPACE + "blobPrefix", PREFIX_FOR_MUTIPLE_FILES))))
.build();

given().when()
.baseUri(url)
.baseUri(START_DATAFLOW_URL)
.contentType(ContentType.JSON)
.body(request)
.post()
.then()
.log().ifError()
.log().ifValidationFails()
.statusCode(200);

await().pollInterval(Duration.ofSeconds(2))
Expand All @@ -158,17 +162,13 @@ void transferFile_success(Vault vault) {
{"sas": "%s","edctype":"dataspaceconnector:azuretoken"}
""".formatted(consumerBlobHelper.generateAccountSas(AZBLOB_CONSUMER_CONTAINER_NAME)));

var request = createFlowRequest(TESTFILE_NAME);

var url = "http://localhost:%s/control/transfer".formatted(PROVIDER_CONTROL_PORT);

given().when()
.baseUri(url)
.baseUri(START_DATAFLOW_URL)
.contentType(ContentType.JSON)
.body(request)
.body(createFlowRequestBuilder(TESTFILE_NAME).build())
.post()
.then()
.log().ifError()
.log().ifValidationFails()
.statusCode(200);

await().pollInterval(Duration.ofSeconds(2))
Expand Down Expand Up @@ -210,18 +210,14 @@ void transferFile_largeFile(long sizeBytes, Vault vault) throws IOException {
{"sas": "%s","edctype":"dataspaceconnector:azuretoken"}
""".formatted(consumerBlobHelper.generateAccountSas(AZBLOB_CONSUMER_CONTAINER_NAME)));

var request = createFlowRequest(blobName);

var url = "http://localhost:%s/control/transfer".formatted(PROVIDER_CONTROL_PORT);

given().when()
.baseUri(url)
.baseUri(START_DATAFLOW_URL)
.contentType(ContentType.JSON)
.body(request)
.body(createFlowRequestBuilder(blobName).build())
.post()
.then()
.log().ifValidationFails()
.log().ifError()
.log().ifValidationFails()
.statusCode(200);

await().pollInterval(Duration.ofSeconds(10))
Expand Down Expand Up @@ -252,31 +248,17 @@ void transferFolder_targetFolderNotExists_shouldCreate(Vault vault) {
providerBlobHelper.uploadBlob(sourceContainer, fileData, "folder/blob3.bin");

var request = createFlowRequestBuilder(TESTFILE_NAME)
.sourceDataAddress(DataAddress.Builder.newInstance()
.type("AzureStorage")
.property("container", AZBLOB_PROVIDER_CONTAINER_NAME)
.property("account", AZBLOB_PROVIDER_ACCOUNT_NAME)
.property("keyName", AZBLOB_PROVIDER_KEY_ALIAS)
.property("blobPrefix", "folder/")
.build())
.destinationDataAddress(DataAddress.Builder.newInstance()
.type("AzureStorage")
.property("container", AZBLOB_CONSUMER_CONTAINER_NAME)
.property("account", AZBLOB_CONSUMER_ACCOUNT_NAME)
.property("keyName", AZBLOB_CONSUMER_KEY_ALIAS)
.property("folderName", "destfolder")
.build())
.add("sourceDataAddress", blobSourceAddress(List.of(dspaceProperty(EDC_NAMESPACE + "blobPrefix", "folder/"))))
.add("destinationDataAddress", blobDestinationAddress(List.of(dspaceProperty(EDC_NAMESPACE + "folderName", "destfolder"))))
.build();

var url = "http://localhost:%s/control/transfer".formatted(PROVIDER_CONTROL_PORT);

given().when()
.baseUri(url)
.baseUri(START_DATAFLOW_URL)
.contentType(ContentType.JSON)
.body(request)
.post()
.then()
.log().ifError()
.log().ifValidationFails()
.statusCode(200);

await().pollInterval(Duration.ofSeconds(2))
Expand All @@ -286,7 +268,6 @@ void transferFolder_targetFolderNotExists_shouldCreate(Vault vault) {
.contains("destfolder/folder/blob.bin", "destfolder/folder/blob2.bin", "destfolder/folder/blob3.bin"));
}


@Test
void transferFile_targetContainerNotExist_shouldFail(Vault vault) {
var sourceContainer = providerBlobHelper.createContainer(AZBLOB_PROVIDER_CONTAINER_NAME);
Expand All @@ -299,17 +280,13 @@ void transferFile_targetContainerNotExist_shouldFail(Vault vault) {
{"sas": "%s","edctype":"dataspaceconnector:azuretoken"}
""".formatted(consumerBlobHelper.generateAccountSas(AZBLOB_CONSUMER_CONTAINER_NAME)));

var request = createFlowRequest(TESTFILE_NAME);

var url = "http://localhost:%s/control/transfer".formatted(PROVIDER_CONTROL_PORT);

given().when()
.baseUri(url)
.baseUri(START_DATAFLOW_URL)
.contentType(ContentType.JSON)
.body(request)
.body(createFlowRequestBuilder(TESTFILE_NAME).build())
.post()
.then()
.log().ifError()
.log().ifValidationFails()
.statusCode(200);

await().pollInterval(Duration.ofSeconds(2))
Expand All @@ -318,34 +295,21 @@ void transferFile_targetContainerNotExist_shouldFail(Vault vault) {
.severe(contains("Error creating blob %s on account %s".formatted(TESTFILE_NAME, AZBLOB_CONSUMER_ACCOUNT_NAME)), isA(IOException.class)));
}

private DataFlowStartMessage createFlowRequest(String blobName) {
return createFlowRequestBuilder(blobName)
.build();
}

private DataFlowStartMessage createMultipleFileFlowRequest(String blobPrefix) {
return DataFlowStartMessage.Builder.newInstance()
.id("test-process-multiple-file-id")
.sourceDataAddress(DataAddress.Builder.newInstance()
.type("AzureStorage").property("container", AZBLOB_PROVIDER_CONTAINER_NAME)
.property("account", AZBLOB_PROVIDER_ACCOUNT_NAME).property("keyName", AZBLOB_PROVIDER_KEY_ALIAS)
.property("blobPrefix", blobPrefix)
.build())
.destinationDataAddress(DataAddress.Builder.newInstance()
.type("AzureStorage").property("container", AZBLOB_CONSUMER_CONTAINER_NAME)
.property("account", AZBLOB_CONSUMER_ACCOUNT_NAME).property("keyName", AZBLOB_CONSUMER_KEY_ALIAS)
.build())
.processId("test-process-multiple-file-id")
.flowType(FlowType.PUSH)
.build();
private JsonObjectBuilder createFlowRequestBuilder(String blobName) {
return Json.createObjectBuilder()
.add("@context", Json.createObjectBuilder().add("@vocab", EDC_NAMESPACE).add("dspace", "https://w3id.org/dspace/v0.8/"))
.add("@type", EDC_DATA_FLOW_START_MESSAGE_TYPE)
.add("@id", UUID.randomUUID().toString())
.add("processId", UUID.randomUUID().toString())
.add("sourceDataAddress", blobSourceAddress(List.of(dspaceProperty(EDC_NAMESPACE + "blobName", blobName))))
.add("destinationDataAddress", blobDestinationAddress(List.of(dspaceProperty(EDC_NAMESPACE + "blobName", blobName))))
.add("flowType", "PUSH")
.add("transferTypeDestination", "AzureStorage-PUSH");
}

private DataFlowStartMessage.Builder createFlowRequestBuilder(String blobName) {
return DataFlowStartMessage.Builder.newInstance()
.id("test-request")
.sourceDataAddress(blobSourceAddress(blobName))
.destinationDataAddress(blobDestinationAddress(blobName))
.processId("test-process-id")
.flowType(FlowType.PUSH);
private JsonObjectBuilder dspaceProperty(String name, String value) {
return Json.createObjectBuilder()
.add("dspace:name", name)
.add("dspace:value", value);
}
}
Loading
Loading