Skip to content

Commit

Permalink
Merge pull request #35 from progilone/develop
Browse files Browse the repository at this point in the history
montée de version en 1.1.11
  • Loading branch information
ybret authored Jan 16, 2023
2 parents fa7cee6 + c8d37d8 commit 17ea255
Show file tree
Hide file tree
Showing 25 changed files with 216 additions and 126 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ public enum Type {
/**
* Propriétés du constat d'état
*/
@OneToMany(mappedBy = "detail", orphanRemoval = true, fetch = FetchType.LAZY, cascade = CascadeType.ALL)
@OneToMany(mappedBy = "detail", orphanRemoval = true, fetch = FetchType.EAGER, cascade = CascadeType.ALL)
@Field(type = FieldType.Nested)
private final Set<Description> descriptions = new LinkedHashSet<>();

Expand Down Expand Up @@ -419,7 +419,7 @@ public Double getInsurance() {
public void setInsurance(final Double insurance) {
this.insurance = insurance;
}

/**
* On force le chargement du createur en json..
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import com.google.common.base.MoreObjects;
import fr.progilone.pgcn.domain.AbstractDomainObject;
import fr.progilone.pgcn.domain.document.DocUnit;
import org.apache.commons.lang3.RegExUtils;

import javax.annotation.Nullable;
import javax.persistence.CollectionTable;
Expand Down Expand Up @@ -220,7 +221,7 @@ public String getDocUnitLabel() {
}

public void setDocUnitLabel(final String docUnitLabel) {
this.docUnitLabel = docUnitLabel;
this.docUnitLabel = RegExUtils.replaceAll(docUnitLabel, "[\u0088\u0089]", "");
}

public String getParentDocUnit() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,6 @@ public List<DocUnitWorkflow> findDocUnitWorkflowsForLocalExport(final String lib
return new JPAQuery(em).from(qDocUnitWorkflow)
.innerJoin(qDocUnitWorkflow.docUnit, qDocUnit)
.leftJoin(qDocUnitWorkflow.states, qDocUnitState)
.fetch()
.where(builder.getValue())
.distinct()
.list(qDocUnitWorkflow);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ public List<AutomaticCheckResult> check(final List<AutomaticCheckType> checkList
* @param results
* @param doc
*/
private void checkFacile(final AutomaticCheckType checkType, final List<AutomaticCheckResult> results,
private void checkFacile(final AutomaticCheckType checkType, final List<AutomaticCheckResult> results,
final DocUnit doc, final String libraryId) {
final Set<DigitalDocument> digitalDocs = doc.getDigitalDocuments();
final List<AutomaticCheckResult> batchResults = new ArrayList<>();
Expand Down Expand Up @@ -377,7 +377,7 @@ public AutomaticCheckResult checkFileCase(final AutomaticCheckResult result,
.forEach((name) -> {

if (splitNames.get(name).isPresent()) { // à priori toujours vrai....

final SplitFilename split = splitNames.get(name).get();
final String toTest = prefix.startsWith(bibPrefix) ?
split.getLibrary().concat(seqSeparator).concat(split.getPrefix())
Expand All @@ -389,9 +389,9 @@ public AutomaticCheckResult checkFileCase(final AutomaticCheckResult result,

LOG.debug("ERREUR DE CASSE DETECTEE : prefix={} - chaine testee:{}", bibPrefix, toTest);
result.addErrorFile(name);
}
}
}

});

}
Expand Down Expand Up @@ -481,6 +481,10 @@ public List<AutomaticCheckResult> checkMetadataOfFiles(final Map<AutoCheckType,
sbIntegrity.append(HEADER_LIST_FILES);
}
resultIntegrity.addErrorFile(file.getName());

DigitalDocument dd = resultIntegrity.getDigitalDocument();
dd.setStatus(DigitalDocument.DigitalDocumentStatus.REJECTED); //Trying to reject the document
digitalDocumentService.save(dd);
}
// Type de compression
final AutomaticCheckRule ruleTypeComp = checkingRules.get(AutoCheckType.FILE_TYPE_COMPR);
Expand Down Expand Up @@ -713,7 +717,7 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
final Map<String, List<MdSecType>> extractedDmdSec) {

final List<AutomaticCheckResult> allResults = new ArrayList<>();

// Format de fichier metadonnee
AutomaticCheckResult tocResult = initializeAutomaticCheckResult(AutoCheckType.METADATA_FILE);
handleLinkResultMetaDatas(tocResult, delivery, digitalIdDoc);
Expand All @@ -736,13 +740,13 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
MetaDatasCheckService.METS_MIME_TYPE,
FileRoleEnum.METS,
extractedDmdSec);

break;
case EXCEL:
// validation table des matieres excel.
final Optional<File> excelToCheck =
files.stream().filter(file -> StringUtils.equalsIgnoreCase(file.getName(), dto.getName())).findFirst();

if (excelToCheck.isPresent()) {
if (excelToCheck.get().getName().endsWith(".xlsx")) {
tocResult = metaCheckService.checkMetaDataFileFormat(tocResult,
Expand All @@ -760,7 +764,7 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
extractedDmdSec);
}
}

break;
case PDF_MULTI:
// Validation pdf/A ocr.
Expand All @@ -772,7 +776,7 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
MetaDatasCheckService.PDF_MIME_TYPE,
FileRoleEnum.PDF_MULTI,
extractedDmdSec);

break;
case OTHER:
// rien pour le moment....
Expand All @@ -781,14 +785,14 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
// ?? COLOR, ?... Valider les elements de l'enum
break;
}

if (isTocBlocking && tocResult.getResult().compareTo(AutoCheckResult.OK) != 0) {
delivery.setTableOfContentsOK(false);
}
if (isPdfBlocking && pdfResult.getResult().compareTo(AutoCheckResult.OK) != 0) {
delivery.setPdfMultiOK(false);
}

allResults.add(save(tocResult));
allResults.add(save(pdfResult));
}
Expand All @@ -797,7 +801,7 @@ public List<AutomaticCheckResult> checkMetaDataFilesFormat(final Delivery delive
tocResult.setMessage("Table des matières introuvable");
pdfResult.setResult(AutoCheckResult.OTHER);
pdfResult.setMessage("PDF multicouches introuvable");

if (isTocBlocking && tocResult.getResult().compareTo(AutoCheckResult.OK) != 0) {
delivery.setTableOfContentsOK(false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,14 +327,17 @@ public void processDelivery(final String identifier, final DeliveryProcessResult
// Recuperation des fichiers de TOC et pdf multis / OCR.
final Map<String, Map<String, List<File>>> tocOcrFiles = prepareTocAndOcrTreatment(delivery,
processElement.getMetadatasDTOForPrefix());
tocFiles = tocOcrFiles.get("tocFiles");
// Si on ne doit pas generer les pdf ocr => on traite tout de suite
if (!isOcrPdfGeneration) {
multiPdfs = tocOcrFiles.get("multiPdfs");
// Extraction text ocr des pdfs multi.
extractedOcr = extractOcrText(multiPdfs, documentsForPrefix.size());
if(tocOcrFiles != null) {
tocFiles = tocOcrFiles.get("tocFiles");
// Si on ne doit pas generer les pdf ocr => on traite tout de suite
if (!isOcrPdfGeneration) {
multiPdfs = tocOcrFiles.get("multiPdfs");
// Extraction text ocr des pdfs multi.
extractedOcr = extractOcrText(multiPdfs, documentsForPrefix.size());
}
} else {
LOG.warn("Fichiers TOC et pdf multi/OCR absents");
}

}

/*
Expand Down Expand Up @@ -456,7 +459,8 @@ public void processDelivery(final String identifier, final DeliveryProcessResult
// Avec génération PDF/OCR.
final AutomaticCheckRule generateWithoutOcrRule = processElement.getCheckingRules().get(AutoCheckType.GENER_PDF_WITHOUT_OCR);
final boolean isPdfGenerationWithoutOcr = generateWithoutOcrRule != null && generateWithoutOcrRule.isActive();

LOG.debug("isOcrPdfGeneration {}", isOcrPdfGeneration);
LOG.debug("isPdfGenerationWithoutOcr {}", isPdfGenerationWithoutOcr);
if (isOcrPdfGeneration) {
multiPdfs = generateOcrPdf(documentsToTreat, libraryId);

Expand Down Expand Up @@ -543,6 +547,17 @@ public void processDelivery(final String identifier, final DeliveryProcessResult
// Moteur de recherche
esDeliveryService.indexAsync(identifier);

//delete temporaries files
/* try {
File tmpDir = bm.getTmpDir(libraryId);
Path tmpDirPath = tmpDir.toPath();
FileUtils.cleanDirectory(tmpDirPath.getParent().toFile());
} catch (IOException e) {
LOG.error("Impossible to delete temporaries files");
LOG.error(e.getMessage(), e);
}*/

}), SecurityContextHolder.getContext()));
}

Expand Down Expand Up @@ -613,12 +628,22 @@ private Map<String, List<File>> generatePdfWithoutOcr(final Map<String, Prefixed
90,
"Démarrage de la génération du pdf sans OCR");

LOG.info("generatePdfWithoutOCR start");

documentsToTreat.forEach((prefix, prefixedDoc) -> {
final Path tmpDir = getTemporaryDirectory(prefix, libraryId);

if(tmpDir != null && !prefixedDoc.getFiles().isEmpty()) {
final DigitalDocument unitializedDigitalDoc = Iterables.getOnlyElement(prefixedDoc.getDigitalDocuments());
final DigitalDocument digitalDoc = digitalDocumentService.getOneWithDocUnitAndPages(unitializedDigitalDoc.getIdentifier());
final List<String> pagesIds = digitalDoc.getPages().stream().map(AbstractDomainObject::getIdentifier).collect(Collectors.toList());
final List<StoredFile> storedFiles = binaryRepository.getAllByPageIdentifiersAndFileFormat(pagesIds, ViewsFormatConfiguration.FileFormat.VIEW);


final List<File> listNamesFile = getAndCreateDerivedFiles(storedFiles, libraryId);

if(tmpDir != null) {
List<File> listToMap = new ArrayList<>();
listToMap.add(imService.convertImgFromDirectoryToPdf(prefixedDoc.getFiles(), tmpDir.toString()));
listToMap.add(imService.convertImgFromDirectoryToPdf(prefix, listNamesFile, tmpDir.toString()));
mapPdfs.put(prefix, listToMap);
} else {
//Log file and continue
Expand All @@ -633,6 +658,8 @@ private Map<String, List<File>> generatePdfWithoutOcr(final Map<String, Prefixed
95,
"Fin de la génération du pdf sans OCR");

LOG.info("generatePdfWithoutOCR end");

return mapPdfs;
}

Expand All @@ -650,7 +677,7 @@ private Map<String, List<File>> generateOcrPdf(final Map<String, PrefixedDocumen
final DigitalDocument digitalDoc = digitalDocumentService.getOneWithDocUnitAndPages(unitializedDigitalDoc.getIdentifier());
final List<String> pagesIds = digitalDoc.getPages().stream().map(AbstractDomainObject::getIdentifier).collect(Collectors.toList());
final List<StoredFile> storedFiles = binaryRepository.getAllByPageIdentifiersAndFileFormat(pagesIds,
ViewsFormatConfiguration.FileFormat.ZOOM);
ViewsFormatConfiguration.FileFormat.VIEW);

final List<File> pdfs = new ArrayList<>();
mapPdfs.put(digitalDoc.getDigitalId(), pdfs);
Expand Down Expand Up @@ -704,7 +731,7 @@ private File createTextPathsFile(final String prefix, final String tmpDir,
.map(File::getAbsolutePath)
.collect(Collectors.toList());
// liste les paths dans un simple fichier texte.
final File tmpFile = new File(tmpDir, prefix + "_input.txt");
final File tmpFile = new File(tmpDir, prefix + "_input.txt");
try (final FileWriter writer = new FileWriter(tmpFile)) {
for (final String p : filesPath) {
writer.write(p + System.lineSeparator());
Expand All @@ -713,6 +740,14 @@ private File createTextPathsFile(final String prefix, final String tmpDir,
return tmpFile;
}

private List<File> getAndCreateDerivedFiles(final List<StoredFile> storedFiles, final String libraryId) {

//get and create derived files
return storedFiles.stream()
.map(sf -> bm.getFileForStoredFile(sf, libraryId))
.collect(Collectors.toList());
}

/**
* Récupération des fichiers masters
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,9 @@ public PreDeliveryDTO predeliver(final Delivery delivery, final boolean createDo

return prefixForDirectory == null;
});
if(subDirectories.isEmpty()) {
LOG.info("Aucun repertoire correspondant au prefix: " + prefixes.toString());
}
}

final DeliverySlip deliverySlip = new DeliverySlip();
Expand Down Expand Up @@ -344,6 +347,7 @@ private List<File> getSubDirectories(final Delivery delivery, final PreDeliveryD
LOG.debug("Recherche de fichiers dans le dossier : {}", deliveryPath);

final File[] subDirectories = new File(deliveryPath).listFiles(File::isDirectory);
LOG.debug("sous dossier : {}", subDirectories);
if (subDirectories == null) {
final PgcnError error = buildError(DELIVERY_WRONG_FOLDER);
preDeliveryDTO.addError(error);
Expand All @@ -369,6 +373,7 @@ private Map<String, PrefixedDocuments> getPrefixedDocuments(final Delivery deliv
final PrefixedDocuments prefixedDocs = new PrefixedDocuments();
prefixedDocs.addPhysicalDocument(physicalDoc);
documentsForPrefix.put(physicalDoc.getDigitalId(), prefixedDocs);
LOG.info("documentsForPrefix if: " + documentsForPrefix.toString());
}
} else {
final PhysicalDocumentDTO physicalDocDTOWorkflow = PhysicalDocumentMapper.INSTANCE.physicalDocumentToPhysicalDocumentDTO(physicalDoc);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ public void writeReportTemplate(final OutputStream out, final List<String> docUn
templateBuilder.initWorkbook();

// Unités documentaires
final List<DocUnit> docUnits = docUnitService.findAllById(docUnitIds).stream().sorted((f1, f2) -> f2.getLabel().compareTo(f1.getLabel())).collect(Collectors.toList());
final List<DocUnit> docUnits = docUnitService.findAllById(docUnitIds).stream()
.sorted((f1, f2) -> f1.getPgcnId().compareTo(f2.getPgcnId())).collect(Collectors.toList());

for (final DocUnit docUnit : docUnits) {
final List<PropertyConfiguration> configurations = propertyConfigurationService.findByLibrary(docUnit.getLibrary());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,9 @@ public void importCSVAsync(final File importFile,
* @param mappingId
* @param parentKeyExpr
*/
private ImportReport importCSVRecords(final File importFile,
final ImportReport report,
final String mappingId,
private ImportReport importCSVRecords(final File importFile,
final ImportReport report,
final String mappingId,
final String parentKeyExpr,
final boolean archivable,
final boolean distributable) throws PgcnTechnicalException {
Expand All @@ -161,9 +161,9 @@ private ImportReport importCSVRecords(final File importFile,
* @param mappingId
* @param parentKeyExpr
*/
private ImportReport importRecord(final Reader in,
final ImportReport importReport,
final String mappingId,
private ImportReport importRecord(final Reader in,
final ImportReport importReport,
final String mappingId,
final String parentKeyExpr,
final boolean archivable,
final boolean distributable) throws PgcnTechnicalException {
Expand All @@ -173,8 +173,8 @@ private ImportReport importRecord(final Reader in,
final CSVMapping mapping = mappingService.findOne(mappingId);
if (mapping == null) {
throw new PgcnTechnicalException("Il n'existe pas de mapping avec l'identifiant " + mappingId);
}
}

// Record iterator
final CSVParser parser;
try {
Expand All @@ -193,7 +193,7 @@ private ImportReport importRecord(final Reader in,
if (key.startsWith("dc:")) {
propertyNames.put(key.substring(3), key.substring(3));
entetes.put(i, key);

} else if (key.equals(parentKeyExpr)) {
entetes.put(i, key);
} else {
Expand All @@ -208,7 +208,7 @@ private ImportReport importRecord(final Reader in,
propertyNames.put(keyRule, key);
entetes.put(i, key);
}

}
}

Expand All @@ -219,8 +219,8 @@ private ImportReport importRecord(final Reader in,
propertyTypes.put(propertyNames.get(property), docPropertyTypeService.findOne(property));
}
}


// Résumé d'exécution
importReport.setCsvMapping(mapping); // lien avec le mapping qui vient d'être chargé
final ImportReport runningReport = importReportService.startReport(importReport);
Expand All @@ -230,7 +230,7 @@ private ImportReport importRecord(final Reader in,
// Création des unités documentaires pré-importées à partir des notices
new TransactionalJobRunner<CSVRecord>(transactionService)
// Configuration du job
.setCommit(BULK_SIZE).setMaxThreads(Runtime.getRuntime().availableProcessors() - 2)
.setCommit(BULK_SIZE).setMaxThreads(Runtime.getRuntime().availableProcessors())
// Traitement principal
.forEach((record) -> {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public class OmekaService {

private static final String CSV_COL_SEP = "\t";
private static final String CSV_REPEATED_FIELD_SEP = "|";
private static final String EMPTY_FIELD_VALUE = "Non renseigné";
private static final String EMPTY_FIELD_VALUE = "";
private static final String NEW_LINE_SEPARATOR = "\n";
private static final String JPG_EXT = ".jpg";

Expand Down
Loading

0 comments on commit 17ea255

Please sign in to comment.