diff --git a/Rdmp.Core/CohortCommitting/Pipeline/Sources/AggregateConfigurationTableSource.cs b/Rdmp.Core/CohortCommitting/Pipeline/Sources/AggregateConfigurationTableSource.cs
index a92aa106c4..7b3eeb9538 100644
--- a/Rdmp.Core/CohortCommitting/Pipeline/Sources/AggregateConfigurationTableSource.cs
+++ b/Rdmp.Core/CohortCommitting/Pipeline/Sources/AggregateConfigurationTableSource.cs
@@ -83,7 +83,7 @@ private DataTable GetDataTable(int timeout, IDataLoadEventListener listener)
$"Connection opened, ready to send the following SQL (with Timeout {Timeout}s):{Environment.NewLine}{sql}"));
var dt = new DataTable();
-
+ dt.BeginLoadData();
using (var cmd = server.GetCommand(sql, con))
{
cmd.CommandTimeout = timeout;
@@ -96,6 +96,7 @@ private DataTable GetDataTable(int timeout, IDataLoadEventListener listener)
dt.TableName = TableName;
+ dt.EndLoadData();
listener?.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
$"successfully read {dt.Rows.Count} rows from source"));
diff --git a/Rdmp.Core/CohortCommitting/Pipeline/Sources/CohortIdentificationConfigurationSource.cs b/Rdmp.Core/CohortCommitting/Pipeline/Sources/CohortIdentificationConfigurationSource.cs
index 42ab66b66b..7cc5ca351b 100644
--- a/Rdmp.Core/CohortCommitting/Pipeline/Sources/CohortIdentificationConfigurationSource.cs
+++ b/Rdmp.Core/CohortCommitting/Pipeline/Sources/CohortIdentificationConfigurationSource.cs
@@ -125,11 +125,12 @@ private DataTable GetDataTable(IDataLoadEventListener listener)
throw new Exception(
"CohortIdentificationCriteria execution resulted in an empty dataset (there were no cohorts matched by the query?)");
- var dt = execution.Identifiers;
-
+ DataTable dt = execution.Identifiers;
+ dt.BeginLoadData();
foreach (DataColumn column in dt.Columns)
column.ReadOnly = false;
+ dt.EndLoadData();
return dt;
}
diff --git a/Rdmp.Core/CohortCommitting/Pipeline/Sources/PatientIdentifierColumnSource.cs b/Rdmp.Core/CohortCommitting/Pipeline/Sources/PatientIdentifierColumnSource.cs
index 3dfa4ad159..de7dd6fe40 100644
--- a/Rdmp.Core/CohortCommitting/Pipeline/Sources/PatientIdentifierColumnSource.cs
+++ b/Rdmp.Core/CohortCommitting/Pipeline/Sources/PatientIdentifierColumnSource.cs
@@ -17,7 +17,7 @@
namespace Rdmp.Core.CohortCommitting.Pipeline.Sources;
///
-/// Pipeline source component that generates a DataTable containing all the unique patient identifiers in the column referenced by the
+/// Pipeline source component that generates a DataTable containing all the unique patient identifiers in the column referenced by the
/// .
///
public class PatientIdentifierColumnSource : IPluginDataFlowSource,
@@ -54,7 +54,8 @@ private DataTable GetDataTable(int timeout, int? topX)
var colName = _extractionInformation.GetRuntimeName();
- var dt = new DataTable();
+ DataTable dt = new DataTable();
+ dt.BeginLoadData();
dt.Columns.Add(colName);
using (var con = server.GetConnection())
@@ -72,7 +73,7 @@ private DataTable GetDataTable(int timeout, int? topX)
}
}
}
-
+ dt.EndLoadData();
return dt;
}
diff --git a/Rdmp.Core/Curation/Data/Aggregation/AggregateConfiguration.cs b/Rdmp.Core/Curation/Data/Aggregation/AggregateConfiguration.cs
index c561efae3d..5bf37181c7 100644
--- a/Rdmp.Core/Curation/Data/Aggregation/AggregateConfiguration.cs
+++ b/Rdmp.Core/Curation/Data/Aggregation/AggregateConfiguration.cs
@@ -29,17 +29,17 @@ namespace Rdmp.Core.Curation.Data.Aggregation;
/// Entry point for the aggregation system. This class describes what a given aggregation is supposed to achieve (e.g. summarise the number of records in a
/// dataset by region over time since 2001 to present). An AggregateConfiguration belongs to a given Catalogue and is the hanging-off point for the rest of
/// the configuration (e.g. AggregateDimension / AggregateFilter)
-///
+///
/// AggregateConfigurations can be used with an AggregateBuilder to produce runnable SQL which will return a DataTable containing results appropriate to the
/// query being built.
-///
+///
/// There are Three types of AggregateConfiguration (these are configurations - not separate classes):
/// 1. 'Aggregate Graph' - Produce summary information about a dataset designed to be displayed in a graph e.g. number of records each year by healthboard
/// 2. 'Cohort Aggregate' - Produce a list of unique patient identifiers from a dataset (e.g. 'all patients with HBA1c test code > 50 in biochemistry')
/// 3. 'Joinable PatientIndex Table' - Produce a patient identifier fact table for joining to other Cohort Aggregates during cohort building (See JoinableCohortAggregateConfiguration)
/// The above labels are informal terms. Use IsCohortIdentificationAggregate and IsJoinablePatientIndexTable to determine what type a given
/// AggregateConfiguration is.
-///
+///
/// If your Aggregate is part of cohort identification (Identifier List or Patient Index Table) then its name will start with cic_X_ where X is the ID of the cohort identification
/// configuration. Depending on the user interface though this might not appear (See ToString implementation).
///
@@ -113,7 +113,7 @@ public DateTime dtCreated
///
/// Indicates the AggregateDimension (if any) that will result in a pivot graph being generated. E.g. if your AggregateConfiguration is a graph of records by year between
/// 2001 and 2018 then specifying a pivot on healthboard would result in 1 line in the graph per healthboard instead of a single line for the count of all (the default).
- ///
+ ///
/// If an AggregateConfiguration is a Cohort or Patient index table then it cannot have a Pivot
///
public int? PivotOnDimensionID
@@ -227,9 +227,13 @@ public static void AdjustGraphDataTable(DataTable dt)
if (dt.Rows.Count == 0) return;
if (!UserSettings.IncludeZeroSeriesInGraphs)
+ {
+ dt.BeginLoadData();
foreach (var col in dt.Columns.Cast().ToArray())
if (dt.Rows.Cast().All(r => IsBasicallyZero(r[col.ColumnName])))
dt.Columns.Remove(col);
+ dt.EndLoadData();
+ }
}
private static bool IsBasicallyZero(object v) => v == null || v == DBNull.Value ||
@@ -257,7 +261,7 @@ private static bool IsBasicallyZero(object v) => v == null || v == DBNull.Value
/// When an AggregateConfiguration is used in a cohort identification capacity it can have one or more 'patient index tables' defined e.g.
/// 'Give me all prescriptions for morphine' (Prescribing) 'within 6 months of patient being discharged from hospital' (SMR01). In this case
/// a join is done against the secondary dataset.
- ///
+ ///
/// This property returns all such 'patient index table' AggregateConfigurations which are currently being used by this AggregateConfiguration
/// for building its join.
///
@@ -304,7 +308,7 @@ public AggregateConfiguration()
///
/// Only relevant for AggregateConfigurations that are being used in a cohort identification capacity (See ).
- ///
+ ///
/// The order location of an AggregateConfiguration within its parent (if it has one). This is mostly irrelevant for UNION /
/// INTERSECT operations (other than helping the user viewing the system) but is vital for EXCEPT containers where the first AggregateConfiguration in the container is
/// run producing a dataset and all subsequent AggregateConfigurations are then removed from that patient set.
diff --git a/Rdmp.Core/DataExport/Data/ExtractableCohort.cs b/Rdmp.Core/DataExport/Data/ExtractableCohort.cs
index e082e82a88..46eb9614c6 100644
--- a/Rdmp.Core/DataExport/Data/ExtractableCohort.cs
+++ b/Rdmp.Core/DataExport/Data/ExtractableCohort.cs
@@ -191,13 +191,13 @@ public IExternalCohortDefinitionData GetExternalData(int timeoutInSeconds = -1)
var syntax = db.Server.GetQuerySyntaxHelper();
var sql =
- $@"Select
+ $@"Select
{syntax.EnsureWrapped("projectNumber")},
{syntax.EnsureWrapped("description")},
{syntax.EnsureWrapped("version")},
{syntax.EnsureWrapped("dtCreated")}
-from {ExternalCohortTable.DefinitionTableName}
-where
+from {ExternalCohortTable.DefinitionTableName}
+where
{syntax.EnsureWrapped("id")} = {OriginID}";
if (timeoutInSeconds != -1) db.Server.TestConnection(timeoutInSeconds * 1000);
@@ -227,7 +227,7 @@ public IExternalCohortDefinitionData GetExternalData(int timeoutInSeconds = -1)
private int _originID;
///
- /// Creates a new cohort reference in the data export database. This must resolve (via ) to
+ /// Creates a new cohort reference in the data export database. This must resolve (via ) to
/// a row in the external cohort database ().
///
///
@@ -288,11 +288,12 @@ public DataTable FetchEntireCohort()
var sql = $"SELECT DISTINCT * FROM {cohortTable.GetFullyQualifiedName()} WHERE {WhereSQL()}";
var da = cohortTable.Database.Server.GetDataAdapter(sql, con);
- var dtReturn = new DataTable();
+ DataTable dtReturn = new DataTable();
+ dtReturn.BeginLoadData();
da.Fill(dtReturn);
dtReturn.TableName = cohortTable.GetRuntimeName();
-
+ dtReturn.EndLoadData();
return dtReturn;
}
}
@@ -398,13 +399,13 @@ public static DataTable GetImportableCohortDefinitionsTable(ExternalCohortTable
{
con.Open();
var sql =
- $@"Select
+ $@"Select
{syntax.EnsureWrapped("description")},
{syntax.EnsureWrapped("id")},
{syntax.EnsureWrapped("version")},
{syntax.EnsureWrapped("projectNumber")}
-from {externalSource.DefinitionTableName}
-where
+from {externalSource.DefinitionTableName}
+where
exists (Select 1 from {externalSource.TableName} WHERE {externalSource.DefinitionTableForeignKeyField}=id)";
using (var da = server.GetDataAdapter(sql, con))
@@ -414,8 +415,10 @@ public static DataTable GetImportableCohortDefinitionsTable(ExternalCohortTable
versionMemberName = "version";
projectNumberMemberName = "projectNumber";
- var toReturn = new DataTable();
+ DataTable toReturn = new DataTable();
+ toReturn.BeginLoadData();
da.Fill(toReturn);
+ toReturn.EndLoadData();
return toReturn;
}
}
@@ -535,6 +538,7 @@ public void ReverseAnonymiseDataTable(DataTable toProcess, IDataLoadEventListene
sw2.Start();
//fix values
+ toProcess.BeginLoadData();
foreach (DataRow row in toProcess.Rows)
try
{
@@ -580,6 +584,8 @@ public void ReverseAnonymiseDataTable(DataTable toProcess, IDataLoadEventListene
$"Substituted {substitutions} release identifiers for private identifiers in input data table (input data table contained {toProcess.Rows.Count} rows)"));
toProcess.Columns[releaseIdentifier].ColumnName = privateIdentifier;
+
+ toProcess.EndLoadData();
}
///
diff --git a/Rdmp.Core/DataExport/DataExtraction/ExtractionTimeValidator.cs b/Rdmp.Core/DataExport/DataExtraction/ExtractionTimeValidator.cs
index 32c3c9a428..ff8fb3ba59 100644
--- a/Rdmp.Core/DataExport/DataExtraction/ExtractionTimeValidator.cs
+++ b/Rdmp.Core/DataExport/DataExtraction/ExtractionTimeValidator.cs
@@ -18,10 +18,10 @@ namespace Rdmp.Core.DataExport.DataExtraction;
///
/// Applies Catalogue.ValidationXML to rows extracted during a Data Extraction Pipeline (See ExecuteDatasetExtractionSource). Because the columns which
/// are extracted can be a subset of the columns in the Catalogue and can include transforms the validation rules have to be adjusted (some are not applied).
-///
+///
/// A count of the number of rows failing validation is stored in VerboseValidationResults (divided by column) and is available for writing to the word
/// metadata document that accompanies the extracted records (See WordDataWriter).
-///
+///
/// This is similar to CatalogueConstraintReport (DQE) but is applied to a researchers extract instead of the Catalogue as a whole.
///
public class ExtractionTimeValidator
@@ -53,7 +53,7 @@ public void Validate(DataTable dt, string validationColumnToPopulateIfAny)
{
if (!_initialized)
Initialize(dt);
-
+ dt.BeginLoadData();
foreach (DataRow r in dt.Rows)
{
//additive validation results, Results is a class that wraps DictionaryOfFailure which is an array of columns and each element is another array of consequences (with a row count for each consequence)
@@ -64,6 +64,7 @@ public void Validate(DataTable dt, string validationColumnToPopulateIfAny)
if (validationColumnToPopulateIfAny != null)
r[validationColumnToPopulateIfAny] = consequenceOnLastRowProcessed;
}
+ dt.EndLoadData();
}
private void Initialize(DataTable dt)
diff --git a/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/ExecuteDatasetExtractionSource.cs b/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/ExecuteDatasetExtractionSource.cs
index 3657c35e63..3712f9998a 100644
--- a/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/ExecuteDatasetExtractionSource.cs
+++ b/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/ExecuteDatasetExtractionSource.cs
@@ -555,7 +555,9 @@ public virtual DataTable TryGetPreview()
var da = server.GetDataAdapter(Request.QueryBuilder.SQL, con);
//get up to 1000 records
+ toReturn.BeginLoadData();
da.Fill(0, 1000, toReturn);
+ toReturn.EndLoadData();
con.Close();
}
diff --git a/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/RowPeeker.cs b/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/RowPeeker.cs
index 3a897fad35..059fc64702 100644
--- a/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/RowPeeker.cs
+++ b/Rdmp.Core/DataExport/DataExtraction/Pipeline/Sources/RowPeeker.cs
@@ -74,10 +74,12 @@ public void AddWhile(IDbDataCommandDataFlowSource source, Func eq
DataRow r;
+ chunk.BeginLoadData();
//while we are still successfully reading rows and those rows have the same release id
while ((r = source.ReadOneRow()) != null)
+ {
if (equalityFunc(r))
- //add it to the current chunk
+ //add it to the current chunk
{
chunk.ImportRow(r);
}
@@ -87,5 +89,7 @@ public void AddWhile(IDbDataCommandDataFlowSource source, Func eq
_peekedRecord = r;
break;
}
+ }
+ chunk.EndLoadData();
}
}
\ No newline at end of file
diff --git a/Rdmp.Core/DataExport/DataExtraction/UserPicks/BundledLookupTable.cs b/Rdmp.Core/DataExport/DataExtraction/UserPicks/BundledLookupTable.cs
index 9f6fc8cbb5..be1d54a828 100644
--- a/Rdmp.Core/DataExport/DataExtraction/UserPicks/BundledLookupTable.cs
+++ b/Rdmp.Core/DataExport/DataExtraction/UserPicks/BundledLookupTable.cs
@@ -40,7 +40,7 @@ public DataTable GetDataTable()
var tbl = TableInfo.Discover(DataAccessContext.DataExport);
var server = tbl.Database.Server;
- var dt = new DataTable();
+ DataTable dt = new DataTable();
using (var con = server.GetConnection())
{
@@ -48,7 +48,9 @@ public DataTable GetDataTable()
using (var da = server.GetDataAdapter(
server.GetCommand(GetDataTableFetchSql(), con)))
{
+ dt.BeginLoadData();
da.Fill(dt);
+ dt.EndLoadData();
}
}
diff --git a/Rdmp.Core/DataLoad/Engine/Pipeline/Components/CleanStrings.cs b/Rdmp.Core/DataLoad/Engine/Pipeline/Components/CleanStrings.cs
index 6808f81680..bcbb1e3b70 100644
--- a/Rdmp.Core/DataLoad/Engine/Pipeline/Components/CleanStrings.cs
+++ b/Rdmp.Core/DataLoad/Engine/Pipeline/Components/CleanStrings.cs
@@ -31,8 +31,8 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener
GracefulCancellationToken cancellationToken)
{
timer.Start();
-
- StartAgain:
+ toProcess.BeginLoadData();
+ StartAgain:
foreach (DataRow row in toProcess.Rows)
{
for (var i = 0; i < columnsToClean.Count; i++)
@@ -85,7 +85,7 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener
job.OnProgress(this,
new ProgressEventArgs(_taskDescription, new ProgressMeasurement(_rowsProcessed, ProgressType.Records),
timer.Elapsed));
-
+ toProcess.EndLoadData();
return toProcess;
}
diff --git a/Rdmp.Core/DataLoad/Engine/Pipeline/Sources/DbDataCommandDataFlowSource.cs b/Rdmp.Core/DataLoad/Engine/Pipeline/Sources/DbDataCommandDataFlowSource.cs
index 755e94ddd5..6c4c0939ab 100644
--- a/Rdmp.Core/DataLoad/Engine/Pipeline/Sources/DbDataCommandDataFlowSource.cs
+++ b/Rdmp.Core/DataLoad/Engine/Pipeline/Sources/DbDataCommandDataFlowSource.cs
@@ -75,8 +75,8 @@ public DataTable GetChunk(IDataLoadEventListener job, GracefulCancellationToken
timer.Start();
try
{
- var chunk = GetChunkSchema(_reader);
-
+ DataTable chunk = GetChunkSchema(_reader);
+ chunk.BeginLoadData();
while (_reader.HasRows && _reader.Read())
{
cancellationToken.ThrowIfCancellationRequested();
@@ -86,8 +86,12 @@ public DataTable GetChunk(IDataLoadEventListener job, GracefulCancellationToken
//we reached batch limit
if (readThisBatch == BatchSize)
+ {
+ chunk.EndLoadData();
return chunk;
+ }
}
+ chunk.EndLoadData();
//if data was read
if (readThisBatch > 0)
diff --git a/Rdmp.Core/DataLoad/Modules/Attachers/FixedWidthFormatFile.cs b/Rdmp.Core/DataLoad/Modules/Attachers/FixedWidthFormatFile.cs
index ce3d1ddc8d..309622c356 100644
--- a/Rdmp.Core/DataLoad/Modules/Attachers/FixedWidthFormatFile.cs
+++ b/Rdmp.Core/DataLoad/Modules/Attachers/FixedWidthFormatFile.cs
@@ -40,7 +40,7 @@ public FixedWidthFormatFile(FileInfo pathToFormatFile)
//now add values
for (var index = 0; index < readAllLines.Length - 1; index++)
{
- //skip header line
+ //skip header line
var cellsOnRowAsSplitString = readAllLines[index + 1].Split(',');
FormatColumns[index].From = int.Parse(cellsOnRowAsSplitString[0]);
@@ -53,7 +53,7 @@ public FixedWidthFormatFile(FileInfo pathToFormatFile)
FormatColumns[index].DateFormat =
cellsOnRowAsSplitString[4]
.Replace("ccyy",
- "yyyy"); //some people think that ccyy is a valid way of expressing year formats... they are wrong
+ "yyyy"); //some people think that ccyy is a valid way of expressing year formats... they are wrong
if (FormatColumns[index].From + FormatColumns[index].Size - 1 != FormatColumns[index].To)
throw new FlatFileLoadException(
@@ -77,9 +77,9 @@ public FixedWidthFormatFile(FileInfo pathToFormatFile)
public DataTable GetDataTableFromFlatFile(FileInfo f)
{
//setup the table
- var toReturn = new DataTable();
-
+ DataTable toReturn = new DataTable();
+ toReturn.BeginLoadData();
foreach (var fixedWidthColumn in FormatColumns)
{
var dataColumn = toReturn.Columns.Add(fixedWidthColumn.Field);
@@ -115,7 +115,7 @@ public DataTable GetDataTableFromFlatFile(FileInfo f)
if (string.IsNullOrWhiteSpace(value))
dataRow[fixedWidthColumn.Field] = DBNull.Value;
else
- //it is a date column
+ //it is a date column
if (!string.IsNullOrWhiteSpace(fixedWidthColumn.DateFormat))
try
{
@@ -132,6 +132,7 @@ public DataTable GetDataTableFromFlatFile(FileInfo f)
}
}
+ toReturn.EndLoadData();
return toReturn;
}
diff --git a/Rdmp.Core/DataLoad/Modules/Attachers/KVPAttacher.cs b/Rdmp.Core/DataLoad/Modules/Attachers/KVPAttacher.cs
index 15a0753f7f..962cd276e1 100644
--- a/Rdmp.Core/DataLoad/Modules/Attachers/KVPAttacher.cs
+++ b/Rdmp.Core/DataLoad/Modules/Attachers/KVPAttacher.cs
@@ -23,9 +23,9 @@ namespace Rdmp.Core.DataLoad.Modules.Attachers;
///
/// Data load component for loading very wide files into RAW tables by translating columns into key value pairs. Relies on a user configured pipeline for
/// reading from the file (so it can support csv, fixed width, excel etc). Once the user configured pipeline has read a DataTable from the file (which is
-/// expected to have lots of columns which might be sparsely populated or otherwise suitable for key value pair representation rather than traditional
+/// expected to have lots of columns which might be sparsely populated or otherwise suitable for key value pair representation rather than traditional
/// relational/flat format.
-///
+///
/// Component converts each DataTable row into one or more rows in the format pk,key,value where pk are the column(s) which uniquely identify the source
/// row (e.g. Labnumber). See KVPAttacher.docx for a full explanation.
///
@@ -113,7 +113,7 @@ protected override int IterativelyBatchLoadDataIntoDataTable(DataTable dt, int m
var currentBatch = BatchesReadyForProcessing[0];
var recordsGenerated = 0;
-
+ dt.BeginLoadData();
foreach (DataRow batchRow in currentBatch.Rows)
{
var pkValues = new Dictionary();
@@ -138,7 +138,7 @@ protected override int IterativelyBatchLoadDataIntoDataTable(DataTable dt, int m
recordsGenerated++;
}
}
-
+ dt.EndLoadData();
BatchesReadyForProcessing.Remove(currentBatch);
return recordsGenerated;
diff --git a/Rdmp.Core/DataLoad/Modules/DataFlowOperations/CohortSampler.cs b/Rdmp.Core/DataLoad/Modules/DataFlowOperations/CohortSampler.cs
index f08fb7de23..edca0b3ce1 100644
--- a/Rdmp.Core/DataLoad/Modules/DataFlowOperations/CohortSampler.cs
+++ b/Rdmp.Core/DataLoad/Modules/DataFlowOperations/CohortSampler.cs
@@ -107,11 +107,13 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener
throw new Exception(
$"Cohort only contains {chosen.Count} unique identifiers. This is less than the requested sample size of {SampleSize} and {nameof(FailIfNotEnoughIdentifiers)} is true");
- var dtToReturn = new DataTable();
+ DataTable dtToReturn = new DataTable();
+ dtToReturn.BeginLoadData();
dtToReturn.Columns.Add(expectedFieldName);
foreach (var val in chosen) dtToReturn.Rows.Add(val);
+ dtToReturn.EndLoadData();
return dtToReturn;
}
diff --git a/Rdmp.Core/DataLoad/Modules/DataFlowOperations/Transposer.cs b/Rdmp.Core/DataLoad/Modules/DataFlowOperations/Transposer.cs
index 80458c62fb..b2318882e2 100644
--- a/Rdmp.Core/DataLoad/Modules/DataFlowOperations/Transposer.cs
+++ b/Rdmp.Core/DataLoad/Modules/DataFlowOperations/Transposer.cs
@@ -17,9 +17,9 @@ namespace Rdmp.Core.DataLoad.Modules.DataFlowOperations;
///
/// Pipeline component which rotates DataTables flowing through it by 90 degrees such that the first column becomes the new headers. Only use this if you have
-/// been given a file in which proper headers are vertical down the first column and records are subsequent columns (i.e. adding new records results in the
+/// been given a file in which proper headers are vertical down the first column and records are subsequent columns (i.e. adding new records results in the
/// DataTable growing horizontally).
-///
+///
/// IMPORTANT: Only works with a single load batch if you have a chunked pipeline you cannot use this component unless you set the chunk size large enough
/// to read the entire file in one go
///
@@ -65,8 +65,8 @@ public void Check(ICheckNotifier notifier)
private DataTable GenerateTransposedTable(DataTable inputTable)
{
- var outputTable = new DataTable();
-
+ DataTable outputTable = new DataTable();
+ outputTable.BeginLoadData();
// Add columns by looping rows
// Header row's first column is same as in inputTable
@@ -83,7 +83,7 @@ private DataTable GenerateTransposedTable(DataTable inputTable)
outputTable.Columns.Add(newColName);
}
- // Add rows by looping columns
+ // Add rows by looping columns
for (var rCount = 1; rCount <= inputTable.Columns.Count - 1; rCount++)
{
var newRow = outputTable.NewRow();
@@ -98,7 +98,7 @@ private DataTable GenerateTransposedTable(DataTable inputTable)
outputTable.Rows.Add(newRow);
}
-
+ outputTable.EndLoadData();
return outputTable;
}
}
\ No newline at end of file
diff --git a/Rdmp.Core/DataLoad/Modules/DataFlowSources/ExcelDataFlowSource.cs b/Rdmp.Core/DataLoad/Modules/DataFlowSources/ExcelDataFlowSource.cs
index 98e5c3c912..58e2a6cb05 100644
--- a/Rdmp.Core/DataLoad/Modules/DataFlowSources/ExcelDataFlowSource.cs
+++ b/Rdmp.Core/DataLoad/Modules/DataFlowSources/ExcelDataFlowSource.cs
@@ -83,7 +83,6 @@ private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellati
wb = new XSSFWorkbook(fs);
DataTable toReturn;
-
try
{
var worksheet =
@@ -113,7 +112,6 @@ private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellati
{
wb.Close();
}
-
return toReturn;
}
}
@@ -127,7 +125,7 @@ private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellati
public DataTable GetAllData(ISheet worksheet, IDataLoadEventListener listener)
{
var toReturn = new DataTable();
-
+ toReturn.BeginLoadData();
var rowEnumerator = worksheet.GetRowEnumerator();
var nColumns = -1;
@@ -191,7 +189,7 @@ public DataTable GetAllData(ISheet worksheet, IDataLoadEventListener listener)
if (!gotAtLeastOneGoodValue)
toReturn.Rows.Remove(r);
}
-
+ toReturn.EndLoadData();
return toReturn;
}
diff --git a/Rdmp.Core/DataLoad/Modules/DataFlowSources/SubComponents/FlatFileToDataTablePusher.cs b/Rdmp.Core/DataLoad/Modules/DataFlowSources/SubComponents/FlatFileToDataTablePusher.cs
index 4665ffa1ef..f1547756e3 100644
--- a/Rdmp.Core/DataLoad/Modules/DataFlowSources/SubComponents/FlatFileToDataTablePusher.cs
+++ b/Rdmp.Core/DataLoad/Modules/DataFlowSources/SubComponents/FlatFileToDataTablePusher.cs
@@ -278,7 +278,7 @@ public DataTable StronglyTypeTable(DataTable workingTable, ExplicitTypingCollect
factory.Settings.ExplicitDateFormats = new[] { _explicitDateTimeFormat };
var dtCloned = workingTable.Clone();
-
+ dtCloned.BeginLoadData();
var typeChangeNeeded = false;
foreach (DataColumn col in workingTable.Columns)
@@ -311,7 +311,7 @@ public DataTable StronglyTypeTable(DataTable workingTable, ExplicitTypingCollect
foreach (DataRow row in workingTable.Rows)
dtCloned.Rows.Add(row.ItemArray.Select((v, idx) =>
deciders.TryGetValue(idx, out var decider) && v is string s ? decider.Parse(s) : v).ToArray());
-
+ dtCloned.EndLoadData();
return dtCloned;
}
diff --git a/Rdmp.Core/DataLoad/Triggers/DiffDatabaseDataFetcher.cs b/Rdmp.Core/DataLoad/Triggers/DiffDatabaseDataFetcher.cs
index 9716a266d7..8062b32113 100644
--- a/Rdmp.Core/DataLoad/Triggers/DiffDatabaseDataFetcher.cs
+++ b/Rdmp.Core/DataLoad/Triggers/DiffDatabaseDataFetcher.cs
@@ -192,12 +192,12 @@ private void GetUpdatetData(DiscoveredServer server, DiscoveredDatabase database
SELECT top {{0}}
{{6}},
{{7}}
-FROM {{1}}
+FROM {{1}}
CROSS APPLY
(
SELECT TOP 1 {{2}}.*
FROM {{2}}
- WHERE
+ WHERE
{{3}}
order by {syntaxHelper.EnsureWrapped(SpecialFieldNames.ValidFrom)} desc
) {{8}}
@@ -215,7 +215,7 @@ SELECT TOP 1 {{2}}.*
SELECT
{{6}},
{{7}}
-FROM
+FROM
{{1}}
Join
{{2}} {{8}} on {whereStatement.Replace(archiveTableName, archive)}
@@ -319,7 +319,9 @@ private void FillTableWithQueryIfUserConsents(DataTable dt, string sql, ICheckNo
cmd.CommandTimeout = _timeout;
using (var da = server.GetDataAdapter(cmd))
{
+ dt.BeginLoadData();
da.Fill(dt);
+ dt.EndLoadData();
}
}
}
diff --git a/Rdmp.Core/Logging/LogManager.cs b/Rdmp.Core/Logging/LogManager.cs
index 9fd25d453b..c48c4ed84b 100644
--- a/Rdmp.Core/Logging/LogManager.cs
+++ b/Rdmp.Core/Logging/LogManager.cs
@@ -23,12 +23,12 @@ namespace Rdmp.Core.Logging;
///
/// Entry point for the RDMP relational logging database. This class requires to be pointed at an existing logging database with the correct schema (Defined
/// in HIC.Logging.Database - See DatabaseCreation.exe for how to do this). See Logging.cd for the full hierarchy of concepts.
-///
+///
/// You can both create new logging records and fetch old ones. New logging objects are generally maintained for future use e.g. when you want to record
/// that a new table is being loaded during a given load (DataLoadInfo) you must pass the load log object (DataLoadInfo). Live logging objects generally
/// must be closed to indicate that they are completed (succesfully or otherwise), if you do not close a logging object then the EndTime will be left
/// blank and it will be unclear if a process blue screened or if it all went fine (other than the ongoing accumulation of log events, errors etc).
-///
+///
/// Fetching old records is done based on ID, Task Name etc and is also handled by this class. The objects returned will be ArchivalDataLoadInfo objects
/// which are immutable and include the full hierarchy of sub concepts (errors, progress messages, which tables were loaded with how many records etc -
/// See Logging.cd).
@@ -109,7 +109,7 @@ public DataTable GetTable(LogViewerFilter filter, int? topX, bool sortDesc)
private DataTable GetAsTable(string sql)
{
- var dt = new DataTable();
+ DataTable dt = new DataTable();
using (var con = Server.GetConnection())
{
@@ -118,7 +118,9 @@ private DataTable GetAsTable(string sql)
using (var cmd = Server.GetCommand(sql, con))
using (var da = Server.GetDataAdapter(cmd))
{
+ dt.BeginLoadData();
da.Fill(dt);
+ dt.EndLoadData();
}
return dt;
diff --git a/Rdmp.Core/Reports/MetadataReport.cs b/Rdmp.Core/Reports/MetadataReport.cs
index 6b9f261a19..aa15f822a0 100644
--- a/Rdmp.Core/Reports/MetadataReport.cs
+++ b/Rdmp.Core/Reports/MetadataReport.cs
@@ -267,8 +267,9 @@ private static DataTable GetLookupTableInfoContentsFromDatabase(TableInfo lookup
using (var da = DatabaseCommandHelper.GetDataAdapter(cmd))
{
var dt = new DataTable();
+ dt.BeginLoadData();
da.Fill(dt);
-
+ dt.EndLoadData();
return dt;
}
}