Skip to content

Commit

Permalink
Feature/1457 pruefung gesamtgroesse dateien (#1810)
Browse files Browse the repository at this point in the history
* 1457 new rest endpoint to receive file size from s3 storage

* 1457 add rest api to retrieve single file size

* 1457 integrate file size check into cosys-integration

* 1457 integrate file size check into dms-integration

* 1457 integrate file size check into email integration

* 1457 integrate file size check into ticket integration

* 1457 fix bug in S3 client, refactoring

* 1457 configurations and docs

* 1457 fix property names

* 1457 fix unit tests

* 1457 update doc

* 1457 fix unit tests

* 1457 JavaDoc

* 1457 fix merge errors

* 1457 unit tests

* 1457 add missing max-file-size property

* 1457 fix merge failure

* 1457 fix merge failure

* 1457 resolve review annotations
  • Loading branch information
markostreich authored Jul 4, 2024
1 parent 652acf3 commit 32e2d96
Show file tree
Hide file tree
Showing 50 changed files with 1,281 additions and 409 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,33 +8,45 @@
import de.muenchen.oss.digiwf.s3.integration.client.exception.DocumentStorageException;
import de.muenchen.oss.digiwf.s3.integration.client.exception.DocumentStorageServerErrorException;
import de.muenchen.oss.digiwf.s3.integration.client.repository.transfer.S3FileTransferRepository;
import de.muenchen.oss.digiwf.s3.integration.client.service.FileService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.unit.DataSize;

@Slf4j
@RequiredArgsConstructor
public class S3Adapter implements SaveFileToStorageOutPort {

private static final String S3_FILE_SAVE_ERROR = "S3_FILE_SAVE_ERROR";
private static final String S3_FILE_SIZE_ERROR = "S3_FILE_SIZE_ERROR";
private final S3FileTransferRepository s3FileTransferRepository;
private final FileService fileService;

@Override
public void saveDocumentInStorage(final GenerateDocument generateDocument, final byte[] data) {
try {
validateFileSize(data);
for (final DocumentStorageUrl presignedUrl : generateDocument.getDocumentStorageUrls()) {
if (presignedUrl.getAction().equalsIgnoreCase("POST")) {
this.s3FileTransferRepository.saveFile(presignedUrl.getUrl(), data);
} else if (presignedUrl.getAction().equalsIgnoreCase("PUT")) {
this.s3FileTransferRepository.updateFile(presignedUrl.getUrl(), data);
} else {
throw new BpmnError("S3_FILE_SAVE_ERROR", String.format("Document storage action %s is not supported.", presignedUrl.getAction()));
throw new BpmnError(S3_FILE_SAVE_ERROR, String.format("Document storage action %s is not supported.", presignedUrl.getAction()));
}
}
} catch (final DocumentStorageClientErrorException | DocumentStorageServerErrorException |
DocumentStorageException ex) {
DocumentStorageException ex) {
log.debug("Document could not be saved.", ex);
throw new BpmnError("S3_FILE_SAVE_ERROR", ex.getMessage());
throw new BpmnError(S3_FILE_SAVE_ERROR, ex.getMessage());
}
}

private void validateFileSize(final byte[] data) {
if (!fileService.isValidFileSize(data))
throw new BpmnError(S3_FILE_SIZE_ERROR,
String.format("Invalid file size %d MB. Allowed are %d MB.", DataSize.ofBytes(data.length).toMegabytes(),
fileService.getMaxFileSize().toMegabytes()));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,31 @@
import de.muenchen.oss.digiwf.s3.integration.client.exception.DocumentStorageException;
import de.muenchen.oss.digiwf.s3.integration.client.exception.DocumentStorageServerErrorException;
import de.muenchen.oss.digiwf.s3.integration.client.repository.transfer.S3FileTransferRepository;
import de.muenchen.oss.digiwf.s3.integration.client.service.FileService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.util.unit.DataSize;

import java.util.List;

import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.*;


class S3AdapterTest {
private static final String DATA = "In CoSys generiertes Dokument";
private static final byte[] DATA_AS_BYTE_ARRAY = DATA.getBytes();
private static final DataSize ALLOWED_FILE_SIZE = DataSize.ofBytes(DATA_AS_BYTE_ARRAY.length);
private static final DataSize ALLOWED_BATCH_SIZE = DataSize.ofMegabytes(110);
private static final byte[] TOO_LARGE_FILE = (DATA + "!").getBytes(); // 1 Mbyte over allowed

private final S3FileTransferRepository s3FileTransferRepository = mock(S3FileTransferRepository.class);
private final String data = "In Cosys generiertes Dokument";
private final byte[] dataAsByteArray = data.getBytes();
private final FileService fileService = new FileService(null, ALLOWED_FILE_SIZE, ALLOWED_BATCH_SIZE);
private S3Adapter s3Adapter;

@BeforeEach
void setup() {
s3Adapter = new S3Adapter(s3FileTransferRepository);
s3Adapter = new S3Adapter(s3FileTransferRepository, fileService);
}

@Test
Expand All @@ -37,9 +42,9 @@ void saveDocumentInStorage() throws DocumentStorageException, DocumentStorageCli

final GenerateDocument generateDocument = new GenerateDocument("Client", "Role", "guid", null, listOfURls);

s3Adapter.saveDocumentInStorage(generateDocument, dataAsByteArray);
s3Adapter.saveDocumentInStorage(generateDocument, DATA_AS_BYTE_ARRAY);

verify(s3FileTransferRepository).saveFile("URL", dataAsByteArray);
verify(s3FileTransferRepository).saveFile("URL", DATA_AS_BYTE_ARRAY);
verifyNoMoreInteractions(s3FileTransferRepository);

}
Expand All @@ -52,9 +57,9 @@ void updateDocumentInStorage() throws DocumentStorageException, DocumentStorageC

final GenerateDocument generateDocument = new GenerateDocument("Client", "Role", "guid", null, listOfURls);

s3Adapter.saveDocumentInStorage(generateDocument, dataAsByteArray);
s3Adapter.saveDocumentInStorage(generateDocument, DATA_AS_BYTE_ARRAY);

verify(s3FileTransferRepository).updateFile("URL", dataAsByteArray);
verify(s3FileTransferRepository).updateFile("URL", DATA_AS_BYTE_ARRAY);
verifyNoMoreInteractions(s3FileTransferRepository);

}
Expand All @@ -67,31 +72,28 @@ void saveDocumentInStorageWithGetRequest() {

final GenerateDocument generateDocument = new GenerateDocument("Client", "Role", "guid", null, listOfURls);

BpmnError bpmnError = assertThrows(BpmnError.class, () -> {
s3Adapter.saveDocumentInStorage(generateDocument, dataAsByteArray);
});
BpmnError bpmnError = assertThrows(BpmnError.class, () -> s3Adapter.saveDocumentInStorage(generateDocument, DATA_AS_BYTE_ARRAY));

String expectedMessage = "Document storage action GET is not supported.";
String actualMessage = bpmnError.getErrorMessage();

assertEquals(expectedMessage, actualMessage);
assertEquals("S3_FILE_SAVE_ERROR", bpmnError.getErrorCode());


}

@Test
void saveDocumentInStorageWithThrowsDocumentStorageException() throws DocumentStorageException, DocumentStorageClientErrorException, DocumentStorageServerErrorException {
doThrow(new DocumentStorageException("DocumentStorageClientErrorException", new Exception())).when(s3FileTransferRepository).saveFile(anyString(), any());
void saveDocumentInStorageWithThrowsDocumentStorageException()
throws DocumentStorageException, DocumentStorageClientErrorException, DocumentStorageServerErrorException {
doThrow(new DocumentStorageException("DocumentStorageClientErrorException", new Exception())).when(s3FileTransferRepository)
.saveFile(anyString(), any());

final DocumentStorageUrl documentStorageUrl = new DocumentStorageUrl("URL", "Path", "POST");
List<DocumentStorageUrl> listOfURls = List.of(documentStorageUrl);

final GenerateDocument generateDocument = new GenerateDocument("Client", "Role", "guid", null, listOfURls);

BpmnError bpmnError = assertThrows(BpmnError.class, () -> {
s3Adapter.saveDocumentInStorage(generateDocument, dataAsByteArray);
});
BpmnError bpmnError = assertThrows(BpmnError.class, () -> s3Adapter.saveDocumentInStorage(generateDocument, DATA_AS_BYTE_ARRAY));

String expectedMessage = "DocumentStorageClientErrorException";
String actualMessage = bpmnError.getErrorMessage();
Expand All @@ -102,4 +104,19 @@ void saveDocumentInStorageWithThrowsDocumentStorageException() throws DocumentSt

}

@Test
void testSaveDocumentInStorageThrowsBpmnErrorForInvalidFileSize() {
final DocumentStorageUrl documentStorageUrl = new DocumentStorageUrl("URL", "Path", "POST");
final List<DocumentStorageUrl> listOfURls = List.of(documentStorageUrl);

final GenerateDocument generateDocument = new GenerateDocument("Client", "Role", "guid", null, listOfURls);
String expectedMessage = String.format("Invalid file size %d MB. Allowed are %d MB.", DataSize.ofBytes(TOO_LARGE_FILE.length).toMegabytes(),
ALLOWED_FILE_SIZE.toMegabytes());

assertThatThrownBy(() -> s3Adapter.saveDocumentInStorage(generateDocument, TOO_LARGE_FILE))
.isInstanceOf(BpmnError.class)
.extracting("errorCode", "errorMessage")
.containsExactly("S3_FILE_SIZE_ERROR", expectedMessage);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ digiwf:
s3:
client:
document-storage-url: '${DIGIWF_S3_HTTPAPI:http://localhost:8086}'
max-file-size: ${S3_MAX_FILE_SIZE:100MB}

management:
endpoint:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import de.muenchen.oss.digiwf.message.process.api.ProcessApi;
import de.muenchen.oss.digiwf.s3.integration.client.configuration.S3IntegrationClientAutoConfiguration;
import de.muenchen.oss.digiwf.s3.integration.client.repository.transfer.S3FileTransferRepository;
import de.muenchen.oss.digiwf.s3.integration.client.service.FileService;
import lombok.RequiredArgsConstructor;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
Expand Down Expand Up @@ -105,8 +106,8 @@ public CreateDocumentInPort getCreateDocumentInPort(final SaveFileToStorageOutPo

@Bean
@ConditionalOnMissingBean
public SaveFileToStorageOutPort getSaveFileToStorageOutPort(final S3FileTransferRepository s3FileTransferRepository) {
return new S3Adapter(s3FileTransferRepository);
public SaveFileToStorageOutPort getSaveFileToStorageOutPort(final S3FileTransferRepository s3FileTransferRepository, final FileService fileService) {
return new S3Adapter(s3FileTransferRepository, fileService);
}

@Bean
Expand Down
28 changes: 15 additions & 13 deletions digiwf-integrations/digiwf-dms-integration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,21 @@ The following steps are needed to run the integration locally.

## Configuration

| Environment Variable | Description |
|-----------------------------|---------------------------------------------------------------|
| MUCS_DMS_INTEGRATION_SERVER_PORT | Port of the MUCS DMS Application |
| ALW_DMS_INTEGRATION_SERVER_PORT | Port of the ALW DMS Application |
| DIGIWF_ENV | Environment in which the services runs |
| KAFKA_SECURITY_PROTOCOL | Security protocol of kafka (default is PLAINTEXT) |
| KAFKA_BOOTSTRAP_SERVER | kafka server address (default is localhost) |
| KAFKA_BOOTSTRAP_SERVER_PORT | kafka server port (default is 29092) |
| FABASOFT_DMS_USERNAME | technical fabasoft dms user |
| FABASOFT_DMS_PASSWORD | technical fabasoft dms password |
| FABASOFT_DMS_HOST | fabasoft url |
| FABASOFT_DMS_PORT | fabasoft port |
| FABASOFT_ENABLE_MTOM | Enables MTOM default is true. Should be disabled with mocking |
| Environment Variable | Description |
|----------------------------------|-----------------------------------------------------------------------|
| MUCS_DMS_INTEGRATION_SERVER_PORT | Port of the MUCS DMS Application |
| ALW_DMS_INTEGRATION_SERVER_PORT | Port of the ALW DMS Application |
| DIGIWF_ENV | Environment in which the services runs |
| KAFKA_SECURITY_PROTOCOL | Security protocol of kafka (default is PLAINTEXT) |
| KAFKA_BOOTSTRAP_SERVER | kafka server address (default is localhost) |
| KAFKA_BOOTSTRAP_SERVER_PORT | kafka server port (default is 29092) |
| FABASOFT_DMS_USERNAME | technical fabasoft dms user |
| FABASOFT_DMS_PASSWORD | technical fabasoft dms password |
| FABASOFT_DMS_HOST | fabasoft url |
| FABASOFT_DMS_PORT | fabasoft port |
| FABASOFT_ENABLE_MTOM | Enables MTOM default is true. Should be disabled with mocking |
| S3_MAX_FILE_SIZE | Maximum allowed file size that can be downloaded from S3 |
| S3_MAX_BATCH_SIZE | Maximum allowed total size of files that can be processed at one time |

## Testing with DigiWF

Expand Down
Loading

0 comments on commit 32e2d96

Please sign in to comment.