Skip to content

Commit

Permalink
Montée en version 2.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
ebugat committed Sep 19, 2024
1 parent 1dff12d commit 377a860
Show file tree
Hide file tree
Showing 17 changed files with 136 additions and 61 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
</parent>
<groupId>fr.progilone.numahop</groupId>
<artifactId>numahop</artifactId>
<version>2.2.1</version>
<version>2.3.0</version>
<packaging>war</packaging>
<name>NumaHOP</name>
<description>NumaHOP</description>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ public SecurityFilterChain filterChain(final HttpSecurity http) throws Exception
.passwordParameter("j_password")
.permitAll())
.logout(c -> c.logoutUrl("/api/logout").logoutSuccessHandler(ajaxLogoutSuccessHandler).deleteCookies("JSESSIONID", "hazelcast.sessionId").permitAll())
.headers(c -> c.frameOptions().disable())
.headers(c -> c.frameOptions()
.disable()
.contentSecurityPolicy(contentSecurityPolicyConfig -> contentSecurityPolicyConfig.policyDirectives("default-src 'self'; script-src 'self' 'unsafe-eval' 'unsafe-inline'; style-src 'self' 'unsafe-eval' 'unsafe-inline'; img-src 'self' data:;")))
.authorizeHttpRequests(authorize -> authorize.requestMatchers(new AntPathRequestMatcher("/api/authenticate"), new AntPathRequestMatcher("/api/rest/reset"))
.permitAll()
.requestMatchers(new AntPathRequestMatcher("/api/**"), new AntPathRequestMatcher("/protected/**"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ public class DigitalLibraryConfiguration extends AbstractDomainObject {
@Column(name = "export_alto")
private boolean exportAlto;

@Column(name = "export_master")
private boolean exportMaster;

/**
* Valeur par défaut des champs vides
*/
Expand Down Expand Up @@ -237,6 +240,14 @@ public void setExportAlto(final boolean exportAlto) {
this.exportAlto = exportAlto;
}

public boolean isExportMaster() {
return exportMaster;
}

public void setExportMaster(final boolean exportMaster) {
this.exportMaster = exportMaster;
}

public String getDefaultValue() {
return defaultValue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,8 @@ public interface DocUnitRepository extends JpaRepository<DocUnit, String>, DocUn

DocUnit getOneByPgcnId(String pgcnId);

List<DocUnit> findAllByPgcnId(String pgcnId);

DocUnit getOneByPgcnIdAndState(String pgcnId, DocUnit.State state);

Long countByPgcnIdAndState(String pgcnId, DocUnit.State state);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,10 @@ public List<AutomaticCheckResult> checkMetadataOfFiles(final Map<AutoCheckType,
private boolean isTagValuesValid(final String tag, final List<ImageMetadataValue> values, final Metadatas metas) {
return StringUtils.isNotBlank(tag) && metas.getTags().containsKey(tag)
&& metas.getTags().get(tag).size() == values.size()
&& metas.getTags().get(tag).stream().allMatch(t -> values.stream().map(ImageMetadataValue::getValue).anyMatch(value -> value.equalsIgnoreCase(t)));
&& metas.getTags()
.get(tag)
.stream()
.allMatch(t -> values.stream().map(ImageMetadataValue::getValue).anyMatch(value -> value.replaceAll(" ", "").equalsIgnoreCase(t.replaceAll(" ", ""))));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -769,8 +769,8 @@ public Map<String, Long> getDiskInfos(final String libId) {
final File tmpdir = bm.getTmpDir(libId);
final Map<String, Long> infos = new HashMap<>();
if (tmpdir != null) {
infos.put("occupe", tmpdir.getTotalSpace() - tmpdir.getUsableSpace());
infos.put("disponible", tmpdir.getUsableSpace());
infos.put("occupe", tmpdir.getTotalSpace() - tmpdir.getFreeSpace());
infos.put("disponible", tmpdir.getFreeSpace());
LOG.debug("Espace disque - Occupé {} - Disponible {}", infos.get("occupe"), infos.get("disponible"));
}
return infos;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,8 @@ public BibliographicRecord getOne(final String identifier) {
@Transactional
public void delete(final String identifier) {
bibliographicRecordRepository.findById(identifier).ifPresent(r -> {
esDocUnitService.delete(r.getDocUnit().getIdentifier());
if (r.getDocUnit() != null)
esDocUnitService.delete(r.getDocUnit().getIdentifier());
bibliographicRecordRepository.deleteById(identifier);
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,15 @@ public Collection<DocUnit> lookupDuplicates(final DocUnit docUnit) {
final Set<DocUnit> duplicates = new HashSet<>();
// UD doublons sur le PGCN Id
if (docUnit.getPgcnId() != null) {
final DocUnit duplPgcnId = docUnitRepository.getOneByPgcnIdAndState(docUnit.getPgcnId(), DocUnit.State.AVAILABLE);

if (duplPgcnId != null) {
duplicates.add(duplPgcnId);
}
// Les doublons peuvent être clôturés ! Ne pas restreindre aux unités documentaires disponibles.
final List<DocUnit> duplPgcnId = docUnitRepository.findAllByPgcnId(docUnit.getPgcnId());
duplPgcnId.stream().forEach(dupl -> {
// L'unité documentaire actuellement checkée se trouve aussi dans la liste retournée par la bdd
if (!dupl.getIdentifier().equals(docUnit.getIdentifier()))
duplicates.add(dupl);
});

}
// UD doublon sur l'identifiant de la notice
docUnit.getRecords().stream().map(bib -> lookupDuplicates(docUnit, bib)).flatMap(Collection::stream).forEach(duplicates::add);
Expand All @@ -62,7 +66,7 @@ private List<DocUnit> lookupDuplicates(final DocUnit docUnit, final Bibliographi
.filter(prop -> StringUtils.equals(prop.getType().getIdentifier(), "identifier"))
.map(DocProperty::getValue)
.collect(Collectors.toList());
// Recherche des doublons déjà importés
return docUnitRepository.searchDuplicates(docUnit, identifiers, DocUnit.State.AVAILABLE);
// Les doublons peuvent être clôturés ! Ne pas restreindre aux unités documentaires disponibles.
return docUnitRepository.searchDuplicates(docUnit, identifiers);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import fr.progilone.pgcn.repository.exchange.ImportedDocUnitRepository;
import fr.progilone.pgcn.repository.imagemetadata.ImageMetadataValuesRepository;
import fr.progilone.pgcn.security.SecurityUtils;
import fr.progilone.pgcn.service.document.DocUnitService;
import fr.progilone.pgcn.service.storage.FileStorageManager;
import fr.progilone.pgcn.service.util.transaction.TransactionService;
import fr.progilone.pgcn.service.util.transaction.TransactionalJobRunner;
Expand Down Expand Up @@ -63,6 +64,7 @@ public class ImportReportService {
private final ImageMetadataValuesRepository imageMetadataValuesRepository;
private final TransactionService transactionService;
private final WebsocketService websocketService;
private final DocUnitService docUnitService;

// Stockage des fichiers importés
@Value("${uploadPath.import}")
Expand All @@ -76,7 +78,8 @@ public ImportReportService(final DocUnitRepository docUnitRepository,
final TransactionService transactionService,
final WebsocketService websocketService,
final ConditionReportRepository conditionReportRepository,
final ImageMetadataValuesRepository imageMetadataValuesRepository) {
final ImageMetadataValuesRepository imageMetadataValuesRepository,
final DocUnitService docUnitService) {
this.docUnitRepository = docUnitRepository;
this.conditionReportRepository = conditionReportRepository;
this.fm = fm;
Expand All @@ -85,6 +88,7 @@ public ImportReportService(final DocUnitRepository docUnitRepository,
this.imageMetadataValuesRepository = imageMetadataValuesRepository;
this.transactionService = transactionService;
this.websocketService = websocketService;
this.docUnitService = docUnitService;
}

@PostConstruct
Expand Down Expand Up @@ -494,14 +498,10 @@ public void delete(final String identifier) {
importedDocUnitRepository.deleteByIds(ids);
return true;
}).process();
LOG.debug("Suppression des unités documentaires liées au rapport d'import {}", identifier);
new TransactionalJobRunner<>(docUnitIds, transactionService).setCommit(BATCH_SIZE).forEachGroup(BATCH_SIZE, ids -> {
docUnitRepository.setParentNullByParentIdIn(ids);
return true;
}).process();
new TransactionalJobRunner<>(docUnitIds, transactionService).setCommit(BATCH_SIZE).forEachGroup(BATCH_SIZE, ids -> {
final List<DocUnit> docUnits = docUnitRepository.findByIdentifierIn(ids);
docUnitRepository.deleteAll(docUnits);

// Supression du docunit préchargé
new TransactionalJobRunner<>(docUnitIds, transactionService).setCommit(BATCH_SIZE).forEachGroup(BATCH_SIZE, id -> {
docUnitService.delete(id);
return true;
}).process();

Expand All @@ -510,15 +510,6 @@ public void delete(final String identifier) {
importReportRepository.deleteById(identifier);
}

/**
* Suppression de tous les imports d'une bibliothèque
*
* @param identifier
*/
public void deleteByLibrary(final String identifier) {
importReportRepository.findByLibraryIdentifier(identifier).forEach(report -> delete(report.getIdentifier()));
}

@Transactional
public void setLotNull(final List<String> lotIds) {
importReportRepository.setLotNullByLotIdIn(lotIds);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,11 @@ public class DigitalLibraryDiffusionService {
private static final String ALTO_DIR = "-alto";
private static final String JPG_DIR = "-jpg";
private static final String PDF_DIR = "-pdf";
private static final String MASTER_DIR = "-master";
private static final String MEDIA_HEADER = "media";
private static final String ALTO_HEADER = "alto";
private static final String PDF_HEADER = "pdf";

private static final String MASTER_HEADER = "master";
private final DocUnitService docUnitService;
private final DigitalLibraryConfigurationService digitalLibraryConfigurationService;
private final WorkflowService workflowService;
Expand Down Expand Up @@ -350,6 +351,9 @@ private File createDocUnitsDigitalLibraryDiffusionCsv(final DocUnit docUnit,
if (conf.isExportPdf()) {
writer.append(buildExportRelativePath(pgcnId, PDF_DIR, conf));
}
if (conf.isExportMaster()) {
writer.append(buildExportRelativePath(pgcnId, MASTER_DIR, conf));
}

final List<String> entetesDC = docPropertyTypeService.findAllBySuperType(DocPropertyType.DocPropertySuperType.DC)
.stream()
Expand Down Expand Up @@ -452,6 +456,9 @@ private void initializeCsvFile(final File csvFile, final DigitalLibraryConfigura
if (conf.isExportPdf()) {
writer.append(MEDIA_HEADER).append(CSV_COL_SEP);
}
if (conf.isExportMaster()) {
writer.append(MASTER_HEADER).append(CSV_COL_SEP);
}

// Entête Dublin Core
final List<String> entetesDC = docPropertyTypeService.findAllBySuperType(DocPropertyType.DocPropertySuperType.DC)
Expand Down Expand Up @@ -501,6 +508,9 @@ private Path createDirectories(final Path root, final String pgcnId, final Digit
if (conf.isExportAlto()) {
Files.createDirectory(depotPath.resolve(pgcnId.concat(ALTO_DIR)));
}
if (conf.isExportMaster()) {
Files.createDirectory(depotPath.resolve(pgcnId.concat(MASTER_DIR)));
}
}
return depotPath;
}
Expand All @@ -519,9 +529,12 @@ private boolean hasMultipleExports(final DigitalLibraryConfiguration conf) {
: 0;
int exportAlto = conf.isExportAlto() ? 1
: 0;
int exportMaster = conf.isExportMaster() ? 1
: 0;

return exportPrint + exportPdf
+ exportAlto > 1;
+ exportAlto
+ exportMaster > 1;
}

/**
Expand Down Expand Up @@ -579,28 +592,50 @@ private List<CheckSummedStoredFile> addDepotFiles(final DocUnit docUnit, final P

final Path depotPrint = resolveExportDepotPath(depotPath, pgcnId.concat(JPG_DIR), conf);
final Path depotPdf = resolveExportDepotPath(depotPath, pgcnId.concat(PDF_DIR), conf);
final Path depotMaster = resolveExportDepotPath(depotPath, pgcnId.concat(MASTER_DIR), conf);

docUnit.getDigitalDocuments().forEach(digitalDoc -> digitalDoc.getOrderedPages().forEach(page -> {
// Si page standard (non pdfs)
if (page.getNumber() != null && page.getNumber() != 0) {
// Par défaut, export du format PRINT
final Optional<StoredFile> print = page.getDerivedForFormat(ViewsFormatConfiguration.FileFormat.PRINT);

if (print.isPresent()) {
final StoredFile printStoredFile = print.get();
final File sourceFile = bm.getFileForStoredFile(printStoredFile, libraryId);
final Path sourcePath = Paths.get(sourceFile.getAbsolutePath());
final String fileName = printStoredFile.getFilename().substring(0, printStoredFile.getFilename().lastIndexOf(".") + 1) + ImageUtils.FORMAT_JPG;

if (conf.isExportPrint() && page.getNumber() != null) {
try {
final Path destPath = Files.createFile(depotPrint.resolve(fileName));
Files.copy(sourcePath, destPath, StandardCopyOption.REPLACE_EXISTING);
if (conf.isExportPrint()) {
// Par défaut, export du format PRINT
final Optional<StoredFile> print = page.getDerivedForFormat(ViewsFormatConfiguration.FileFormat.PRINT);

if (print.isPresent()) {
final StoredFile printStoredFile = print.get();
final File sourceFile = bm.getFileForStoredFile(printStoredFile, libraryId);
final Path sourcePath = Paths.get(sourceFile.getAbsolutePath());
final String fileName = printStoredFile.getFilename().substring(0, printStoredFile.getFilename().lastIndexOf(".") + 1) + ImageUtils.FORMAT_JPG;

if (conf.isExportPrint() && page.getNumber() != null) {
try {
final Path destPath = Files.createFile(depotPrint.resolve(fileName));
Files.copy(sourcePath, destPath, StandardCopyOption.REPLACE_EXISTING);
// On remplit la map pour optimiser le traitement ultérieur des métadonnées
checkSums.add(exportMetsService.getCheckSummedStoredFile(printStoredFile, sourceFile));
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}
}
}

if (conf.isExportMaster()) {
try {
// On récupère le master
final Optional<StoredFile> master = page.getMaster();

if (master.isPresent()) {
final StoredFile masterStoredFile = master.get();
final File masterSourceFile = bm.getFileForStoredFile(masterStoredFile, libraryId);
final Path masterSourcePath = Paths.get(masterSourceFile.getAbsolutePath());
final Path destPath = Files.createFile(depotMaster.resolve(masterStoredFile.getFilename()));
Files.copy(masterSourcePath, destPath, StandardCopyOption.REPLACE_EXISTING);
// On remplit la map pour optimiser le traitement ultérieur des métadonnées
checkSums.add(exportMetsService.getCheckSummedStoredFile(printStoredFile, sourceFile));
} catch (final IOException e) {
throw new UncheckedIOException(e);
checkSums.add(exportMetsService.getCheckSummedStoredFile(masterStoredFile, masterSourceFile));
}
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}
} else if (conf.isExportPdf()) {
Expand Down Expand Up @@ -638,6 +673,7 @@ private List<CheckSummedStoredFile> addDepotFiles(final DocUnit docUnit, final P
throw new UncheckedIOException(e);
}
}

return checkSums;

}
Expand Down
Loading

0 comments on commit 377a860

Please sign in to comment.