Skip to content

Commit

Permalink
[2.29.x] Forward port transforms exports 2.26 (#6809)
Browse files Browse the repository at this point in the history
Forward Ports

Provide better transform failure message 390db3f
#6771

DDF-6386 Add support for source id and metacard type for csv metacard transforms
367e426
#6387

Adds Gmd QueryResponseTransformer
89877a4
#6781

Use UTC dates when exporting metacards in CSV format
27ed601
#6501

Fix CSV transformer output when no columnOrder given
fef3cb1
#6653

Updated the CsvTransformer to remove attributes that have empty or null
fbb2b4d
#6738

Xlsx column filtering
0335485
#6747

updated RTF transformer formatting and now omits null attributes from
64f8e5d
#6744

fix npe in rtf
41fe113
#6750

Updated CSV and XLSX transformers to maintain the order specified in the columnOrder argument
506791d
#6757

Dynamic rtf
83bcf56
#6762

Fix multi-value exports for RTF
dd7bc91
#6767

---------

Co-authored-by: derekwilhelm <[email protected]>
  • Loading branch information
malmgrens4 and derekwilhelm authored Nov 7, 2024
1 parent 788d818 commit 2f68b0b
Show file tree
Hide file tree
Showing 34 changed files with 1,774 additions and 1,808 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import ddf.catalog.operation.impl.UpdateRequestImpl;
import ddf.catalog.plugin.OAuthPluginException;
import ddf.catalog.resource.DataUsageLimitExceededException;
import ddf.catalog.resource.ResourceNotFoundException;
import ddf.catalog.source.IngestException;
import ddf.catalog.source.InternalIngestException;
import ddf.catalog.source.SourceUnavailableException;
Expand Down Expand Up @@ -263,6 +264,12 @@ public BinaryContent getHeaders(
throw new InternalServerErrorException(exceptionMessage);
} catch (CatalogTransformerException e) {
String exceptionMessage = "Unable to transform Metacard. Try different transformer: ";
Throwable cause = e.getCause();
if (cause instanceof ResourceNotFoundException) {
exceptionMessage = "Resource file is not available";
} else if (cause instanceof IOException) {
exceptionMessage = "Unable to read resource file";
}
LOGGER.info(exceptionMessage, e);
throw new InternalServerErrorException(exceptionMessage);
} catch (SourceUnavailableException e) {
Expand Down Expand Up @@ -462,6 +469,12 @@ public BinaryContent getDocument(
throw new InternalServerErrorException(exceptionMessage);
} catch (CatalogTransformerException e) {
String exceptionMessage = "Unable to transform Metacard. Try different transformer: ";
Throwable cause = e.getCause();
if (cause instanceof ResourceNotFoundException) {
exceptionMessage = "Resource file is not available";
} else if (cause instanceof IOException) {
exceptionMessage = "Unable to read resource file";
}
LOGGER.info(exceptionMessage, e);
throw new InternalServerErrorException(exceptionMessage);
} catch (SourceUnavailableException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@
import ddf.catalog.data.BinaryContent;
import ddf.catalog.data.Metacard;
import ddf.catalog.data.impl.BinaryContentImpl;
import ddf.catalog.operation.SourceResponse;
import ddf.catalog.transform.CatalogTransformerException;
import ddf.catalog.transform.MetacardTransformer;
import ddf.catalog.transform.QueryResponseTransformer;
import java.io.ByteArrayInputStream;
import java.io.Serializable;
import java.io.StringWriter;
Expand All @@ -36,20 +38,33 @@
import org.codice.ddf.spatial.ogc.csw.catalog.common.GmdConstants;
import org.codice.ddf.spatial.ogc.csw.catalog.converter.CswRecordConverter;

public class AbstractGmdTransformer implements MetacardTransformer {
public class AbstractGmdTransformer implements MetacardTransformer, QueryResponseTransformer {

public static final String GML_PREFIX = "gml:";

public static final String GCO_PREFIX = GmdConstants.GCO_PREFIX + ":";

private Supplier<Converter> converterSupplier;

protected static final String TRANSFORM_EXCEPTION_MSG =
"Unable to transform from GMD Metadata to Metacard";

/** @param converterSupplier must be non-null */
public AbstractGmdTransformer(Supplier<Converter> converterSupplier) {
notNull(converterSupplier, "converterSupplier must be non-null");
this.converterSupplier = converterSupplier;
}

@Override
public BinaryContent transform(SourceResponse sourceResponse, Map<String, Serializable> map)
throws CatalogTransformerException {
if (sourceResponse.getResults() != null && sourceResponse.getResults().size() == 1) {
return transform(sourceResponse.getResults().get(0).getMetacard(), map);
} else {
throw new CatalogTransformerException(TRANSFORM_EXCEPTION_MSG);
}
}

@Override
public BinaryContent transform(Metacard metacard, Map<String, Serializable> arguments)
throws CatalogTransformerException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,6 @@ public class GmdTransformer extends AbstractGmdTransformer implements InputTrans

private static final Logger LOGGER = LoggerFactory.getLogger(GmdTransformer.class);

private static final String TRANSFORM_EXCEPTION_MSG =
"Unable to transform from GMD Metadata to Metacard";

private static final Pattern WHITE_SPACE_PATTER = Pattern.compile("(\\s)+");

private static final Pattern NEW_LINE_PATTERN = Pattern.compile("(\\n)+");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,5 +211,20 @@
<argument ref="writerProvider"/>
</bean>

<service ref="gmdInputTransformerTranformer"
interface="ddf.catalog.transform.QueryResponseTransformer">
<service-properties>
<entry key="id" value="gmd:MD_Metadata"/>
<entry key="mime-type" >
<list>
<value>text/xml</value>
<value>application/xml</value>
</list>
</entry>
<entry key="schema" value="http://www.isotc211.org/2005/gmd"/>
<entry key="displayName" value="GMD Metadata"/>

</service-properties>
</service>

</blueprint>
2 changes: 1 addition & 1 deletion catalog/transformer/catalog-transformer-csv-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
<limit implementation="org.codice.jacoco.LenientLimit">
<counter>COMPLEXITY</counter>
<value>COVEREDRATIO</value>
<minimum>0.83</minimum>
<minimum>0.81</minimum>
</limit>
</limits>
</rule>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ public int compare(AttributeDescriptor descriptor1, AttributeDescriptor descript

return new CompareToBuilder()
.append(getAttributeIndex(descriptorName1), getAttributeIndex(descriptorName2))
.append(descriptorName1, descriptorName2)
.toComparison();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,23 @@
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;

import ddf.catalog.data.Attribute;
import ddf.catalog.data.AttributeDescriptor;
import ddf.catalog.data.AttributeType;
import ddf.catalog.data.BinaryContent;
import ddf.catalog.data.Metacard;
import ddf.catalog.data.MetacardType;
import ddf.catalog.data.impl.BinaryContentImpl;
import ddf.catalog.operation.QueryResponse;
import ddf.catalog.transform.CatalogTransformerException;
import ddf.catalog.transform.MetacardTransformer;
import ddf.catalog.transform.QueryResponseTransformer;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand All @@ -37,6 +42,7 @@
import javax.activation.MimeTypeParseException;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -170,11 +176,51 @@ public static Set<AttributeDescriptor> getAllCsvAttributeDescriptors(
*/
public static Set<AttributeDescriptor> getOnlyRequestedAttributes(
final List<Metacard> metacards, final Set<String> requestedAttributes) {
final Set<AttributeDescriptor> attributes = getAllCsvAttributeDescriptors(metacards);
final Set<AttributeDescriptor> attributes = getNonEmptyValueAttributes(metacards);

// Filter out attributes not requested.
attributes.removeIf(attrDesc -> !requestedAttributes.contains(attrDesc.getName()));

return attributes;
}

/**
* Given a list of {@link Metacard}s, return only the attribute descriptors that have a non-empty
* value. Returns a set of {@link AttributeDescriptor}s containing the attributes with non-empty
* values.
*/
public static Set<AttributeDescriptor> getNonEmptyValueAttributes(
final List<Metacard> metacards) {
final Set<AttributeDescriptor> attributes = getAllCsvAttributeDescriptors(metacards);
final Set<AttributeDescriptor> nonEmptyAttributes = new HashSet<>();

for (final AttributeDescriptor attribute : attributes) {
final boolean hasNonEmptyValue =
metacards.stream().anyMatch(metacard -> isNonEmptyValue(metacard, attribute));
if (hasNonEmptyValue) {
nonEmptyAttributes.add(attribute);
}
}
return nonEmptyAttributes;
}

private static boolean isNonEmptyValue(Metacard metacard, AttributeDescriptor descriptor) {
final Attribute attribute = metacard.getAttribute(descriptor.getName());
switch (descriptor.getType().getAttributeFormat()) {
case STRING:
case XML:
case GEOMETRY:
return attribute != null && StringUtils.isNotEmpty((String) attribute.getValue());
case INTEGER:
case LONG:
case DOUBLE:
case FLOAT:
case SHORT:
case DATE:
case BOOLEAN:
return attribute != null && attribute.getValue() != null;
default:
return false;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
import ddf.catalog.data.types.Core;
import java.io.Serializable;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
Expand All @@ -39,10 +42,9 @@
*
* @see java.util.Iterator
*/
class MetacardIterator implements Iterator<Serializable> {
public class MetacardIterator implements Iterator<Serializable> {

private static final Logger LOGGER = LoggerFactory.getLogger(MetacardIterator.class);

private static final String MULTIVALUE_DELIMITER = "\n";

private final List<AttributeDescriptor> attributeDescriptorList;
Expand All @@ -51,12 +53,14 @@ class MetacardIterator implements Iterator<Serializable> {

private int index;

private DateTimeFormatter formatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME;

/**
* @param metacard the metacard to be iterated over.
* @param attributeDescriptorList the list of attributeDescriptors used to determine which
* metacard attributes to return.
*/
MetacardIterator(
public MetacardIterator(
final Metacard metacard, final List<AttributeDescriptor> attributeDescriptorList) {
this.metacard = metacard;
this.attributeDescriptorList = Collections.unmodifiableList(attributeDescriptorList);
Expand Down Expand Up @@ -118,7 +122,9 @@ private Serializable convertValue(
return null;
}
Instant instant = ((Date) value).toInstant();
return instant.toString();
ZoneId zoneId = ZoneId.of("UTC");
ZonedDateTime zonedDateTime = instant.atZone(zoneId);
return zonedDateTime.format(formatter);
case BINARY:
byte[] bytes = (byte[]) value;
return DatatypeConverter.printBase64Binary(bytes);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void testDescriptorComparator() {
assertThat(comparison, is(1));

comparison = comparator.compare(nonExistendAttribute1, nonExistendAttribute2);
assertThat(comparison, is(0));
assertThat(comparison, is(-1));

comparison = comparator.compare(nonExistendAttribute1, null);
assertThat(comparison, is(1));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
package ddf.catalog.transformer.csv.common;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
Expand Down Expand Up @@ -51,15 +53,8 @@
public class CsvTransformerTest {

private static final List<AttributeDescriptor> ATTRIBUTE_DESCRIPTOR_LIST = new ArrayList<>();

private static Map<String, Attribute> metacardDataMap = new HashMap<>();

private static List<Metacard> metacardList = new ArrayList<>();

private static final int METACARD_COUNT = 2;

private static final String CSV_ITEM_SEPARATOR_REGEX = "[\\n\\r,]";

private static final List<ImmutableTriple<Object, Object, Object>> ATTRIBUTE_DATA =
Arrays.asList(
new ImmutableTriple<Object, Object, Object>(
Expand All @@ -76,7 +71,10 @@ public class CsvTransformerTest {
new ImmutableTriple<Object, Object, Object>(
"attribute7", "OBJECT", BasicTypes.OBJECT_TYPE),
new ImmutableTriple<Object, Object, Object>(
"attribute8", "BINARY", BasicTypes.BINARY_TYPE));
"attribute8", "BINARY", BasicTypes.BINARY_TYPE),
new ImmutableTriple<Object, Object, Object>("attribute9", "", BasicTypes.STRING_TYPE));
private static Map<String, Attribute> metacardDataMap = new HashMap<>();
private static List<Metacard> metacardList = new ArrayList<>();

@Before
public void setup() {
Expand All @@ -89,13 +87,19 @@ public void setup() {
public void getAllCsvAttributeDescriptors() {
Set<AttributeDescriptor> allAttributes =
CsvTransformer.getAllCsvAttributeDescriptors(metacardList);
assertThat(allAttributes, hasSize(6));
assertThat(allAttributes, hasSize(7));
Set<String> allAttributeNames =
allAttributes.stream().map(AttributeDescriptor::getName).collect(Collectors.toSet());
// Binary and Object types are filtered
final Set<String> expectedAttributes =
Sets.newHashSet(
"attribute1", "attribute2", "attribute3", "attribute4", "attribute5", "attribute6");
"attribute1",
"attribute2",
"attribute3",
"attribute4",
"attribute5",
"attribute6",
"attribute9");
assertThat(allAttributeNames, is(expectedAttributes));
}

Expand Down Expand Up @@ -175,6 +179,29 @@ public void writeSearchResultsToCsvWithAliasMap() throws CatalogTransformerExcep
assertThat(scanner.hasNext(), is(false));
}

@Test
public void filterEmptyValueAttributes() {
AttributeDescriptor attr9 = buildAttributeDescriptor("attribute9", BasicTypes.STRING_TYPE);
Set<AttributeDescriptor> attributeDescriptors = new HashSet<>(ATTRIBUTE_DESCRIPTOR_LIST);
attributeDescriptors.add(attr9);
MetacardType metacardType = new MetacardTypeImpl("", attributeDescriptors);
Metacard metacard = new MetacardImpl(metacardType);
for (Attribute a : metacardDataMap.values()) {
metacard.setAttribute(a);
}
metacard.setAttribute(new AttributeImpl("attribute9", ""));
List<Metacard> metacards = new ArrayList<>(metacardList);
metacards.add(metacard);
Set<AttributeDescriptor> nonEmptyValues = CsvTransformer.getNonEmptyValueAttributes(metacards);

assertThat(nonEmptyValues, hasSize(6));
Set<String> nonEmptyAttributes =
nonEmptyValues.stream().map(AttributeDescriptor::getName).collect(Collectors.toSet());
assertThat(nonEmptyAttributes, not(hasItem("attribute7")));
assertThat(nonEmptyAttributes, not(hasItem("attribute8")));
assertThat(nonEmptyAttributes, not(hasItem("attribute9")));
}

private Metacard buildMetacard() {
MetacardType metacardType = new MetacardTypeImpl("", new HashSet<>(ATTRIBUTE_DESCRIPTOR_LIST));
Metacard metacard = new MetacardImpl(metacardType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
package ddf.catalog.transformer.csv;

import static ddf.catalog.transformer.csv.common.CsvTransformer.createResponse;
import static ddf.catalog.transformer.csv.common.CsvTransformer.getAllCsvAttributeDescriptors;
import static ddf.catalog.transformer.csv.common.CsvTransformer.getNonEmptyValueAttributes;
import static ddf.catalog.transformer.csv.common.CsvTransformer.getOnlyRequestedAttributes;
import static ddf.catalog.transformer.csv.common.CsvTransformer.sortAttributes;
import static ddf.catalog.transformer.csv.common.CsvTransformer.writeMetacardsToCsv;
Expand Down Expand Up @@ -66,7 +66,7 @@ static BinaryContent transformWithArguments(

final Set<AttributeDescriptor> requestedAttributeDescriptors =
requestedFields.isEmpty()
? getAllCsvAttributeDescriptors(metacards)
? getNonEmptyValueAttributes(metacards)
: getOnlyRequestedAttributes(metacards, requestedFields);

if (shouldInjectMetacardType(requestedFields)) {
Expand Down
Loading

0 comments on commit 2f68b0b

Please sign in to comment.