Skip to content

Commit

Permalink
Added logs per row and when creating siard
Browse files Browse the repository at this point in the history
  • Loading branch information
AntonioG70 authored and hmiguim committed Feb 26, 2024
1 parent a0b18eb commit 13f6a5b
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,14 @@
import com.databasepreservation.common.server.storage.ContentPayload;
import com.databasepreservation.common.server.storage.fs.FSPathContentPayload;
import org.roda.core.data.exceptions.GenericException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* @author Gabriel Barros <[email protected]>
*/
public class Browser {
private static final Logger LOGGER = LoggerFactory.getLogger(Browser.class);
public static void createFile(InputStream uploadedInputStream, String fileName, Path path)
throws AlreadyExistsException, GenericException {
try{
Expand All @@ -30,6 +33,7 @@ public static void createFile(InputStream uploadedInputStream, String fileName,
ContentPayload payload = new FSPathContentPayload(file);

payload.writeToPath(path);
LOGGER.info("Created file {} in {}", fileName, path);
} catch (FileAlreadyExistsException e){
System.out.println("File exist on path: " + Paths.get(path.toString(), fileName));
throw new AlreadyExistsException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,4 +241,4 @@ public void createRowsCollection() {
}

}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.solr.client.solrj.SolrServerException;
import org.roda.core.data.exceptions.GenericException;
Expand Down Expand Up @@ -37,10 +39,14 @@
public class DbvtkExportModule implements DatabaseFilterModule {
private final DatabaseRowsSolrManager solrManager;
private CollectionStatus collectionConfiguration;
private final int rowThreshold = ViewerFactory.getEnvInt("ROW_PROGRESS_THRESHOLD", 500);
private int rowsProcessedByTableCounter = 0;
private int rowCounter = 0;
private ViewerDatabase retrieved;
private ViewerTable currentTable;
private String databaseUUID;
private long rowIndex = 1;
private static final Logger LOGGER = LoggerFactory.getLogger(DbvtkExportModule.class);

public DbvtkExportModule(String databaseUUID) {
solrManager = ViewerFactory.getSolrManager();
Expand All @@ -61,6 +67,7 @@ public DbvtkExportModule(String databaseUUID) {
*/
@Override
public void initDatabase() throws ModuleException {
LOGGER.info("Starting to process database {}", databaseUUID);
// setup is done when DBVTK starts
}

Expand Down Expand Up @@ -101,6 +108,7 @@ public void handleStructure(DatabaseStructure structure) throws ModuleException
*/
@Override
public void handleDataOpenSchema(String schemaName) throws ModuleException {
LOGGER.info("Starting to process schema {}", schemaName);
// viewerDatabase.getSchema(schemaName);
}

Expand All @@ -116,8 +124,10 @@ public void handleDataOpenSchema(String schemaName) throws ModuleException {
*/
@Override
public void handleDataOpenTable(String tableId) throws ModuleException {
rowsProcessedByTableCounter = 0;
currentTable = retrieved.getMetadata().getTableById(tableId);
solrManager.addTable(retrieved.getUuid(), currentTable);
LOGGER.info("Processing table {}", tableId);
// rowIndex = 1;
}

Expand All @@ -133,6 +143,25 @@ public void handleDataOpenTable(String tableId) throws ModuleException {
public void handleDataRow(Row row) throws ModuleException {
solrManager.addRow(retrieved.getUuid(),
ToolkitStructure2ViewerStructure.getRow(collectionConfiguration, currentTable, row, rowIndex++,retrieved.getPath()));

rowsProcessedByTableCounter++;
rowCounter++;

if (shouldLogRowProgress()) {
LOGGER.info("Processed {} rows of {} total", rowsProcessedByTableCounter, currentTable.getCountRows());
rowCounter = 0;
}
}


/**
* Checks if a row process log should be done
*
*/
private boolean shouldLogRowProgress() {
return rowCounter == rowThreshold ||
(currentTable.getCountRows() <= rowThreshold && rowsProcessedByTableCounter == currentTable.getCountRows()) ||
rowsProcessedByTableCounter == currentTable.getCountRows();
}

/**
Expand All @@ -145,6 +174,7 @@ public void handleDataRow(Row row) throws ModuleException {
*/
@Override
public void handleDataCloseTable(String tableId) throws ModuleException {
LOGGER.info("Finished processing table {}", tableId);
// committing + optimizing after whole database
}

Expand All @@ -162,6 +192,7 @@ public void handleDataCloseSchema(String schemaName) throws ModuleException {

try {
ViewerFactory.getSolrClient().commit(SolrRowsCollectionRegistry.get(databaseUUID).getIndexName());
LOGGER.info("Finished processing schema {}", schemaName);
} catch (SolrServerException | IOException e) {
e.printStackTrace();
}
Expand All @@ -178,6 +209,7 @@ public void finishDatabase() throws ModuleException {
solrManager.markDatabaseAsReady(databaseUUID);
collectionConfiguration.setConsolidateProperty(LargeObjectConsolidateProperty.NOT_CONSOLIDATED);
ViewerFactory.getConfigurationManager().updateCollectionStatus(databaseUUID, collectionConfiguration);
LOGGER.info("Finished processing database {}", databaseUUID);
}

@Override
Expand Down

0 comments on commit 13f6a5b

Please sign in to comment.