Skip to content

Commit

Permalink
#356 update deprecated mda scala reference
Browse files Browse the repository at this point in the history
  • Loading branch information
cwoods-cpointe committed Sep 25, 2024
1 parent e4ac6a3 commit 084d813
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 11 deletions.
2 changes: 0 additions & 2 deletions build-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -350,8 +350,6 @@
<testRelease>${maven.compiler.target}</testRelease>
<compilerArgs>
<arg>-Xlint:unchecked</arg>
<!-- <arg>&#45;&#45;add-exports</arg>-->
<!-- <arg>java.base/sun.nio.ch=ALL-UNNAMED</arg>-->
</compilerArgs>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public Map<String, String> getProperties() {
Map<String, String> map = new HashMap<>();
for (String name : pipelineProperties.stringPropertyNames()) {
String reactiveName = mapToReactiveProperty(name);
String value = pipelineProperties.getProperty(reactiveName);
String value = pipelineProperties.getProperty(name);
map.put(reactiveName, value);
}
return map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ NoChannelTestStep.in.connector=smallrye-kafka
# these are overridden to in-memory in test/pipeline-messaging.properties
OverrideTestStep.in.connector=smallrye-kafka
OverrideTestStep.out.connector=smallrye-kafka

Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ Feature: Pipeline messaging steps can be configured
When messages flow from the external system
Then the configuration directs the messaging to the step
And the configuration directs the step result to the external system

#
Scenario: I can configure a step with only incoming messaging
Given a step with incoming messaging
And a pipeline configuration for the step
Expand All @@ -35,4 +35,4 @@ Feature: Pipeline messaging steps can be configured
And a pipeline configuration for the step
When messages flow from the external system
Then the configuration directs the messaging to the step
And the configuration directs the step result to the external system
And the configuration directs the step result to the external system
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
@data-delivery @object-store-validation
Feature: Object store credentials can be verified for read and write access
I want to use configured credentials to connect to an object store and
Feature: Object store credentials can be verified for read and write access
I want to use configured credentials to connect to an object store and
verify read and write access

Scenario: I can retrieve credentials from a Krausening file
Given a properties file exists
Then the credentials are used to verify object store connectivity
Then the credentials are used to verify object store connectivity
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.spark.sql.Column;
import scala.collection.JavaConversions;
import scala.collection.JavaConverters;
import scala.collection.Seq;

import com.boozallen.aiops.data.delivery.spark.SparkSchema;
Expand Down Expand Up @@ -122,7 +122,7 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
#end ;

Column filterSchema = null;
List<String> validationColumns = new ArrayList();
List<String> validationColumns = new ArrayList<>();
Collections.addAll(validationColumns, dataWithValidations.columns());
validationColumns.removeAll(Arrays.asList(data.columns()));
for (String columnName : validationColumns) {
Expand All @@ -139,7 +139,8 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
}

// Remove validation columns from valid data
Seq<String> columnsToDrop = JavaConversions.asScalaBuffer(validationColumns).toSeq();
Seq<String> columnsToDrop =
JavaConverters.collectionAsScalaIterableConverter(validationColumns).asScala().toSeq();
validData = validData.drop(columnsToDrop);

return validData;
Expand Down

0 comments on commit 084d813

Please sign in to comment.