diff --git a/src/main/java/org/dependencytrack/common/MdcKeys.java b/src/main/java/org/dependencytrack/common/MdcKeys.java index def1a3088..34fcb40bd 100644 --- a/src/main/java/org/dependencytrack/common/MdcKeys.java +++ b/src/main/java/org/dependencytrack/common/MdcKeys.java @@ -23,12 +23,19 @@ */ public final class MdcKeys { + public static final String MDC_BOM_FORMAT = "bomFormat"; + public static final String MDC_BOM_SERIAL_NUMBER = "bomSerialNumber"; + public static final String MDC_BOM_SPEC_VERSION = "bomSpecVersion"; + public static final String MDC_BOM_UPLOAD_TOKEN = "bomUploadToken"; + public static final String MDC_BOM_VERSION = "bomVersion"; public static final String MDC_COMPONENT_UUID = "componentUuid"; public static final String MDC_KAFKA_RECORD_TOPIC = "kafkaRecordTopic"; public static final String MDC_KAFKA_RECORD_PARTITION = "kafkaRecordPartition"; public static final String MDC_KAFKA_RECORD_OFFSET = "kafkaRecordOffset"; public static final String MDC_KAFKA_RECORD_KEY = "kafkaRecordKey"; + public static final String MDC_PROJECT_NAME = "projectName"; public static final String MDC_PROJECT_UUID = "projectUuid"; + public static final String MDC_PROJECT_VERSION = "projectVersion"; public static final String MDC_SCAN_TOKEN = "scanToken"; private MdcKeys() { diff --git a/src/main/java/org/dependencytrack/event/IntegrityAnalysisEvent.java b/src/main/java/org/dependencytrack/event/IntegrityAnalysisEvent.java index 6c980e992..97952f1a8 100644 --- a/src/main/java/org/dependencytrack/event/IntegrityAnalysisEvent.java +++ b/src/main/java/org/dependencytrack/event/IntegrityAnalysisEvent.java @@ -23,13 +23,15 @@ import java.util.UUID; -public class IntegrityAnalysisEvent implements Event { +import static org.dependencytrack.util.PersistenceUtil.assertNonPersistent; - private UUID uuid; +public class IntegrityAnalysisEvent implements Event { - private IntegrityMetaComponent integrityMetaComponent; + private final UUID uuid; + private final IntegrityMetaComponent integrityMetaComponent; public IntegrityAnalysisEvent(UUID uuid, IntegrityMetaComponent integrityMetaComponent) { + assertNonPersistent(integrityMetaComponent, "integrityMetaComponent must not be persistent"); this.uuid = uuid; this.integrityMetaComponent = integrityMetaComponent; } diff --git a/src/main/java/org/dependencytrack/model/License.java b/src/main/java/org/dependencytrack/model/License.java index 5a8596548..33d4998ab 100644 --- a/src/main/java/org/dependencytrack/model/License.java +++ b/src/main/java/org/dependencytrack/model/License.java @@ -92,6 +92,7 @@ public enum FetchGroup { } private static final long serialVersionUID = -1707920279688859358L; + public static final License UNRESOLVED = new License(); @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.NATIVE) diff --git a/src/main/java/org/dependencytrack/model/Project.java b/src/main/java/org/dependencytrack/model/Project.java index 3893c3478..8a4c0d9a6 100644 --- a/src/main/java/org/dependencytrack/model/Project.java +++ b/src/main/java/org/dependencytrack/model/Project.java @@ -300,6 +300,8 @@ public enum FetchGroup { @ApiModelProperty(accessMode = ApiModelProperty.AccessMode.READ_ONLY) private ProjectMetadata metadata; + private transient String bomRef; + private transient ProjectMetrics metrics; private transient List versions; @@ -511,6 +513,14 @@ public void setActive(Boolean active) { this.active = active; } + public String getBomRef() { + return bomRef; + } + + public void setBomRef(String bomRef) { + this.bomRef = bomRef; + } + public ProjectMetrics getMetrics() { return metrics; } diff --git a/src/main/java/org/dependencytrack/parser/cyclonedx/util/ModelConverter.java b/src/main/java/org/dependencytrack/parser/cyclonedx/util/ModelConverter.java index 225799ebd..a94fafb8a 100644 --- a/src/main/java/org/dependencytrack/parser/cyclonedx/util/ModelConverter.java +++ b/src/main/java/org/dependencytrack/parser/cyclonedx/util/ModelConverter.java @@ -21,7 +21,10 @@ import alpine.common.logging.Logger; import com.github.packageurl.MalformedPackageURLException; import com.github.packageurl.PackageURL; +import org.apache.commons.collections4.MultiValuedMap; +import org.apache.commons.collections4.multimap.HashSetValuedHashMap; import org.apache.commons.lang3.StringUtils; +import org.cyclonedx.model.BomReference; import org.cyclonedx.model.Dependency; import org.cyclonedx.model.Hash; import org.cyclonedx.model.LicenseChoice; @@ -129,15 +132,16 @@ public static Project convertToProject(final org.cyclonedx.model.Metadata cdxMet public static Project convertToProject(final org.cyclonedx.model.Component cdxComponent) { final var project = new Project(); + project.setBomRef(useOrGenerateRandomBomRef(cdxComponent.getBomRef())); project.setAuthor(trimToNull(cdxComponent.getAuthor())); project.setPublisher(trimToNull(cdxComponent.getPublisher())); + project.setSupplier(convert(cdxComponent.getSupplier())); project.setClassifier(convertClassifier(cdxComponent.getType()).orElse(Classifier.APPLICATION)); project.setGroup(trimToNull(cdxComponent.getGroup())); project.setName(trimToNull(cdxComponent.getName())); project.setVersion(trimToNull(cdxComponent.getVersion())); project.setDescription(trimToNull(cdxComponent.getDescription())); project.setExternalReferences(convertExternalReferences(cdxComponent.getExternalReferences())); - project.setSupplier(ModelConverter.convert(cdxComponent.getSupplier())); if (cdxComponent.getPurl() != null) { try { @@ -165,6 +169,7 @@ public static List convertComponents(final List convertServices(final List convertDependencyGraph(final List cdxDependencies) { + final var dependencyGraph = new HashSetValuedHashMap(); + if (cdxDependencies == null || cdxDependencies.isEmpty()) { + return dependencyGraph; + } + + for (final Dependency cdxDependency : cdxDependencies) { + if (cdxDependency.getDependencies() == null || cdxDependency.getDependencies().isEmpty()) { + continue; + } + + final List directDependencies = cdxDependency.getDependencies().stream() + .map(BomReference::getRef).toList(); + dependencyGraph.putAll(cdxDependency.getRef(), directDependencies); + } + + return dependencyGraph; + } + private static Optional convertClassifier(final org.cyclonedx.model.Component.Type cdxComponentType) { return Optional.ofNullable(cdxComponentType) .map(Enum::name) @@ -482,6 +506,12 @@ private static List convertDataClassification(final List UUID.randomUUID().toString()); + } + public static List flatten(final Collection items, final Function> childrenGetter, final BiConsumer> childrenSetter) { diff --git a/src/main/java/org/dependencytrack/persistence/ComponentQueryManager.java b/src/main/java/org/dependencytrack/persistence/ComponentQueryManager.java index 73ac307f0..20de28b1d 100644 --- a/src/main/java/org/dependencytrack/persistence/ComponentQueryManager.java +++ b/src/main/java/org/dependencytrack/persistence/ComponentQueryManager.java @@ -656,65 +656,47 @@ public void recursivelyDelete(Component component, boolean commitIndex) { /** * Returns a component by matching its identity information. *

- * Note that this method employs a stricter matching logic than {@link #matchIdentity(Project, ComponentIdentity)} - * and {@link #matchIdentity(ComponentIdentity)}. For example, if {@code purl} of the given {@link ComponentIdentity} - * is {@code null}, this method will use a query that explicitly checks for the {@code purl} column to be {@code null}. + * Note that this method employs a stricter matching logic than {@link #matchIdentity(Project, ComponentIdentity)}. + * For example, if {@code purl} of the given {@link ComponentIdentity} is {@code null}, this method will use a + * query that explicitly checks for the {@code purl} column to be {@code null}. * Whereas other methods will simply not include {@code purl} in the query in such cases. * * @param project the Project the component is a dependency of * @param cid the identity values of the component * @return a Component object, or null if not found + * @since 4.11.0 */ - public Component matchSingleIdentity(final Project project, final ComponentIdentity cid) { - var filterParts = new ArrayList(); - final var params = new HashMap(); - - if (cid.getPurl() != null) { - filterParts.add("(purl != null && purl == :purl)"); - params.put("purl", cid.getPurl().canonicalize()); - } else { - filterParts.add("purl == null"); - } - - if (cid.getCpe() != null) { - filterParts.add("(cpe != null && cpe == :cpe)"); - params.put("cpe", cid.getCpe()); - } else { - filterParts.add("cpe == null"); - } - - if (cid.getSwidTagId() != null) { - filterParts.add("(swidTagId != null && swidTagId == :swidTagId)"); - params.put("swidTagId", cid.getSwidTagId()); - } else { - filterParts.add("swidTagId == null"); - } - - var coordinatesFilter = "("; - if (cid.getGroup() != null) { - coordinatesFilter += "group == :group"; - params.put("group", cid.getGroup()); - } else { - coordinatesFilter += "group == null"; - } - coordinatesFilter += " && name == :name"; - params.put("name", cid.getName()); - if (cid.getVersion() != null) { - coordinatesFilter += " && version == :version"; - params.put("version", cid.getVersion()); - } else { - coordinatesFilter += " && version == null"; + public Component matchSingleIdentityExact(final Project project, final ComponentIdentity cid) { + final Pair> queryFilterParamsPair = buildExactComponentIdentityQuery(project, cid); + final Query query = pm.newQuery(Component.class, queryFilterParamsPair.getKey()); + query.setNamedParameters(queryFilterParamsPair.getRight()); + try { + return query.executeUnique(); + } finally { + query.closeAll(); } - coordinatesFilter += ")"; - filterParts.add(coordinatesFilter); - - final var filter = "project == :project && (" + String.join(" && ", filterParts) + ")"; - params.put("project", project); + } - final Query query = pm.newQuery(Component.class, filter); - query.setNamedParameters(params); + /** + * Returns the first component matching a given {@link ComponentIdentity} in a {@link Project}. + * + * @param project the Project the component is a dependency of + * @param cid the identity values of the component + * @return a Component object, or null if not found + * @since 4.11.0 + */ + public Component matchFirstIdentityExact(final Project project, final ComponentIdentity cid) { + final Pair> queryFilterParamsPair = buildExactComponentIdentityQuery(project, cid); + final Query query = pm.newQuery(Component.class, queryFilterParamsPair.getKey()); + query.setNamedParameters(queryFilterParamsPair.getRight()); + query.setRange(0, 1); try { - return query.executeUnique(); + final List result = query.executeList(); + if (result.isEmpty()) { + return null; + } + + return result.getFirst(); } finally { query.closeAll(); } @@ -810,6 +792,55 @@ private static Pair> buildComponentIdentityQuery(fin return Pair.of(filter, params); } + private static Pair> buildExactComponentIdentityQuery(final Project project, final ComponentIdentity cid) { + var filterParts = new ArrayList(); + final var params = new HashMap(); + + if (cid.getPurl() != null) { + filterParts.add("(purl != null && purl == :purl)"); + params.put("purl", cid.getPurl().canonicalize()); + } else { + filterParts.add("purl == null"); + } + + if (cid.getCpe() != null) { + filterParts.add("(cpe != null && cpe == :cpe)"); + params.put("cpe", cid.getCpe()); + } else { + filterParts.add("cpe == null"); + } + + if (cid.getSwidTagId() != null) { + filterParts.add("(swidTagId != null && swidTagId == :swidTagId)"); + params.put("swidTagId", cid.getSwidTagId()); + } else { + filterParts.add("swidTagId == null"); + } + + var coordinatesFilter = "("; + if (cid.getGroup() != null) { + coordinatesFilter += "group == :group"; + params.put("group", cid.getGroup()); + } else { + coordinatesFilter += "group == null"; + } + coordinatesFilter += " && name == :name"; + params.put("name", cid.getName()); + if (cid.getVersion() != null) { + coordinatesFilter += " && version == :version"; + params.put("version", cid.getVersion()); + } else { + coordinatesFilter += " && version == null"; + } + coordinatesFilter += ")"; + filterParts.add(coordinatesFilter); + + final var filter = "project == :project && (" + String.join(" && ", filterParts) + ")"; + params.put("project", project); + + return Pair.of(filter, params); + } + /** * Intelligently adds dependencies for components that are not already a dependency * of the specified project and removes the dependency relationship for components diff --git a/src/main/java/org/dependencytrack/persistence/QueryManager.java b/src/main/java/org/dependencytrack/persistence/QueryManager.java index 6a0850305..6aa03351e 100644 --- a/src/main/java/org/dependencytrack/persistence/QueryManager.java +++ b/src/main/java/org/dependencytrack/persistence/QueryManager.java @@ -983,8 +983,12 @@ public List getAllVulnerableSoftware(final String cpePart, f return getVulnerableSoftwareQueryManager().getAllVulnerableSoftware(cpePart, cpeVendor, cpeProduct, purl); } - public Component matchSingleIdentity(final Project project, final ComponentIdentity cid) { - return getComponentQueryManager().matchSingleIdentity(project, cid); + public Component matchSingleIdentityExact(final Project project, final ComponentIdentity cid) { + return getComponentQueryManager().matchSingleIdentityExact(project, cid); + } + + public Component matchFirstIdentityExact(final Project project, final ComponentIdentity cid) { + return getComponentQueryManager().matchFirstIdentityExact(project, cid); } public List matchIdentity(final Project project, final ComponentIdentity cid) { diff --git a/src/main/java/org/dependencytrack/resources/v1/ComponentResource.java b/src/main/java/org/dependencytrack/resources/v1/ComponentResource.java index fbe54a931..006f2c5e8 100644 --- a/src/main/java/org/dependencytrack/resources/v1/ComponentResource.java +++ b/src/main/java/org/dependencytrack/resources/v1/ComponentResource.java @@ -52,7 +52,7 @@ import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.persistence.QueryManager; import org.dependencytrack.proto.repometaanalysis.v1.FetchMeta; -import org.dependencytrack.util.InternalComponentIdentificationUtil; +import org.dependencytrack.util.InternalComponentIdentifier; import org.dependencytrack.util.PurlUtil; import javax.validation.Validator; @@ -382,7 +382,7 @@ public Response createComponent(@PathParam("uuid") String uuid, Component jsonCo component.setClassifier(jsonComponent.getClassifier()); component.setPurl(jsonComponent.getPurl()); component.setPurlCoordinates(PurlUtil.silentPurlCoordinatesOnly(jsonComponent.getPurl())); - component.setInternal(InternalComponentIdentificationUtil.isInternalComponent(component, qm)); + component.setInternal(new InternalComponentIdentifier().isInternal(component)); component.setCpe(StringUtils.trimToNull(jsonComponent.getCpe())); component.setSwidTagId(StringUtils.trimToNull(jsonComponent.getSwidTagId())); component.setCopyright(StringUtils.trimToNull(jsonComponent.getCopyright())); @@ -489,7 +489,7 @@ public Response updateComponent(Component jsonComponent) { component.setClassifier(jsonComponent.getClassifier()); component.setPurl(jsonComponent.getPurl()); component.setPurlCoordinates(PurlUtil.silentPurlCoordinatesOnly(component.getPurl())); - component.setInternal(InternalComponentIdentificationUtil.isInternalComponent(component, qm)); + component.setInternal(new InternalComponentIdentifier().isInternal(component)); component.setCpe(StringUtils.trimToNull(jsonComponent.getCpe())); component.setSwidTagId(StringUtils.trimToNull(jsonComponent.getSwidTagId())); component.setCopyright(StringUtils.trimToNull(jsonComponent.getCopyright())); diff --git a/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java b/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java index 363c2c54a..d753f67e6 100644 --- a/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java +++ b/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java @@ -28,12 +28,12 @@ import alpine.notification.NotificationLevel; import org.apache.commons.collections4.MultiValuedMap; import org.apache.commons.collections4.multimap.HashSetValuedHashMap; -import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.commons.lang3.exception.ExceptionUtils; import org.cyclonedx.BomParserFactory; import org.cyclonedx.exception.ParseException; -import org.cyclonedx.model.Dependency; import org.cyclonedx.parsers.Parser; import org.datanucleus.flush.FlushMode; +import org.datanucleus.store.query.QueryNotUniqueException; import org.dependencytrack.common.ConfigKey; import org.dependencytrack.event.BomUploadEvent; import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent; @@ -61,22 +61,23 @@ import org.dependencytrack.notification.NotificationScope; import org.dependencytrack.notification.vo.BomConsumedOrProcessed; import org.dependencytrack.notification.vo.BomProcessingFailed; -import org.dependencytrack.persistence.FlushHelper; import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.proto.repometaanalysis.v1.FetchMeta; +import org.dependencytrack.util.InternalComponentIdentifier; import org.dependencytrack.util.WaitingLockConfiguration; import org.json.JSONArray; +import org.slf4j.MDC; +import javax.jdo.JDOUserException; import javax.jdo.PersistenceManager; import javax.jdo.Query; -import javax.jdo.Transaction; -import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.StandardOpenOption; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -86,24 +87,34 @@ import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; import java.util.function.Predicate; -import java.util.stream.Stream; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.trim; import static org.apache.commons.lang3.StringUtils.trimToNull; +import static org.apache.commons.lang3.time.DurationFormatUtils.formatDurationHMS; import static org.datanucleus.PropertyNames.PROPERTY_FLUSH_MODE; import static org.datanucleus.PropertyNames.PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT; import static org.datanucleus.PropertyNames.PROPERTY_RETAIN_VALUES; -import static org.dependencytrack.common.ConfigKey.BOM_UPLOAD_PROCESSING_TRX_FLUSH_THRESHOLD; +import static org.dependencytrack.common.MdcKeys.MDC_BOM_FORMAT; +import static org.dependencytrack.common.MdcKeys.MDC_BOM_SERIAL_NUMBER; +import static org.dependencytrack.common.MdcKeys.MDC_BOM_SPEC_VERSION; +import static org.dependencytrack.common.MdcKeys.MDC_BOM_UPLOAD_TOKEN; +import static org.dependencytrack.common.MdcKeys.MDC_BOM_VERSION; +import static org.dependencytrack.common.MdcKeys.MDC_PROJECT_NAME; +import static org.dependencytrack.common.MdcKeys.MDC_PROJECT_UUID; +import static org.dependencytrack.common.MdcKeys.MDC_PROJECT_VERSION; import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK; import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.TIME_SPAN; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertComponents; +import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertDependencyGraph; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertServices; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertToProject; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertToProjectMetadata; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.flatten; -import static org.dependencytrack.util.InternalComponentIdentificationUtil.isInternalComponent; +import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION; +import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_LATEST_VERSION; import static org.dependencytrack.util.LockProvider.executeWithLockWaiting; import static org.dependencytrack.util.PersistenceUtil.applyIfChanged; import static org.dependencytrack.util.PersistenceUtil.assertPersistent; @@ -117,8 +128,27 @@ */ public class BomUploadProcessingTask implements Subscriber { + private static final class Context { + + private final UUID token; + private final Project project; + private final Bom.Format bomFormat; + private final long startTimeNs; + private String bomSpecVersion; + private String bomSerialNumber; + private Date bomTimestamp; + private Integer bomVersion; + + private Context(final UUID token, final Project project) { + this.token = token; + this.project = project; + this.bomFormat = Bom.Format.CYCLONEDX; + this.startTimeNs = System.nanoTime(); + } + + } + private static final Logger LOGGER = Logger.getLogger(BomUploadProcessingTask.class); - private static final int FLUSH_THRESHOLD = Config.getInstance().getPropertyAsInt(BOM_UPLOAD_PROCESSING_TRX_FLUSH_THRESHOLD); private final KafkaEventDispatcher kafkaEventDispatcher; private final boolean delayBomProcessedNotification; @@ -136,81 +166,90 @@ public BomUploadProcessingTask() { * {@inheritDoc} */ public void inform(final Event e) { - if (e instanceof final BomUploadEvent event) { - final var ctx = new Context(event.getProject(), event.getChainIdentifier()); - try { - // Prevent BOMs for the same project to be processed concurrently. - // Note that this is an edge case, we're not expecting any lock waits under normal circumstances. - final WaitingLockConfiguration lockConfiguration = createLockConfiguration(ctx); - executeWithLockWaiting(lockConfiguration, () -> processBom(ctx, event.getFile())); - - LOGGER.info("BOM processed successfully (%s)".formatted(ctx)); - updateState(ctx, WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED); - if (!delayBomProcessedNotification) { - dispatchBomProcessedNotification(ctx); - } else { - // The notification will be dispatched by the Kafka Streams topology, - // when it detects that the vulnerability scan completed. - LOGGER.warn("Not dispatching %s notification, because %s is enabled (%s)" - .formatted(NotificationGroup.BOM_PROCESSED, ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION.getPropertyName(), ctx)); - } - } catch (Throwable throwable) { - if (throwable instanceof BomConsumptionException ex) { - LOGGER.error("BOM consumption failed (%s)".formatted(ex.ctx), ex); - updateStateAndCancelDescendants(ctx, WorkflowStep.BOM_CONSUMPTION, WorkflowStatus.FAILED, ex.getMessage()); - kafkaEventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_PROCESSING_FAILED) - .level(NotificationLevel.ERROR) - .title(NotificationConstants.Title.BOM_PROCESSING_FAILED) - .content("An error occurred while processing a BOM") - // TODO: Look into adding more fields to BomProcessingFailed, to also cover upload token, serial number, version, etc. - // FIXME: Add reference to BOM after we have dedicated BOM server - .subject(new BomProcessingFailed(ctx.uploadToken, ctx.project, /* bom */ "(Omitted)", ex.getMessage(), ex.ctx.bomFormat, ex.ctx.bomSpecVersion))); - } else if (throwable instanceof final BomProcessingException ex) { - LOGGER.error("BOM processing failed (%s)".formatted(ex.ctx), ex); - updateStateAndCancelDescendants(ctx, WorkflowStep.BOM_PROCESSING, WorkflowStatus.FAILED, ex.getMessage()); - kafkaEventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_PROCESSING_FAILED) - .level(NotificationLevel.ERROR) - .title(NotificationConstants.Title.BOM_PROCESSING_FAILED) - .content("An error occurred while processing a BOM") - // TODO: Look into adding more fields to BomProcessingFailed, to also cover upload token, serial number, version, etc. - // Thanks to ctx we now have more information about the BOM that may be useful to consumers downstream. - // FIXME: Add reference to BOM after we have dedicated BOM server - .subject(new BomProcessingFailed(ctx.uploadToken, ctx.project, /* bom */ "(Omitted)", ex.getMessage(), ex.ctx.bomFormat, ex.ctx.bomSpecVersion))); - } else { - LOGGER.error("BOM processing failed unexpectedly (%s)".formatted(ctx), throwable); - updateStateAndCancelDescendants(ctx, WorkflowStep.BOM_PROCESSING, WorkflowStatus.FAILED, throwable.getMessage()); - kafkaEventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_PROCESSING_FAILED) - .level(NotificationLevel.ERROR) - .title(NotificationConstants.Title.BOM_PROCESSING_FAILED) - .content("An error occurred while processing a BOM") - // FIXME: Add reference to BOM after we have dedicated BOM server - .subject(new BomProcessingFailed(ctx.uploadToken, ctx.project, /* bom */ "(Omitted)", throwable.getMessage(), ctx.bomFormat /* (may be null) */, ctx.bomSpecVersion /* (may be null) */))); - } - } + if (!(e instanceof final BomUploadEvent event)) { + return; + } + + final var ctx = new Context(event.getChainIdentifier(), event.getProject()); + try (var ignoredMdcProjectUuid = MDC.putCloseable(MDC_PROJECT_UUID, ctx.project.getUuid().toString()); + var ignoredMdcProjectName = MDC.putCloseable(MDC_PROJECT_NAME, ctx.project.getName()); + var ignoredMdcProjectVersion = MDC.putCloseable(MDC_PROJECT_VERSION, ctx.project.getVersion()); + var ignoredMdcBomUploadToken = MDC.putCloseable(MDC_BOM_UPLOAD_TOKEN, ctx.token.toString())) { + processEvent(ctx, event); } } - private void processBom(final Context ctx, final File bomFile) throws BomConsumptionException, BomProcessingException { - LOGGER.info("Consuming uploaded BOM (%s)".formatted(ctx)); - WorkflowState bomConsumptionState = null; - try (final var qm = new QueryManager()) { - WorkflowState consumptionState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, WorkflowStep.BOM_CONSUMPTION); - if (consumptionState != null) { - consumptionState.setStartedAt(Date.from(Instant.now())); - bomConsumptionState = qm.persist(consumptionState); - } else { - //TODO change the log level to error and throw exception once the workflow has been migrated completely - LOGGER.warn("Workflow state for BOM_CONSUMPTION not found in database so cannot be updated for context: (%s)".formatted(ctx)); + private void processEvent(final Context ctx, final BomUploadEvent event) { + startBomConsumptionWorkflowStep(ctx); + + final ConsumedBom consumedBom; + try (final var bomFileInputStream = Files.newInputStream(event.getFile().toPath(), StandardOpenOption.DELETE_ON_CLOSE)) { + final byte[] cdxBomBytes = bomFileInputStream.readAllBytes(); + final Parser parser = BomParserFactory.createParser(cdxBomBytes); + final org.cyclonedx.model.Bom cdxBom = parser.parse(cdxBomBytes); + + ctx.bomSpecVersion = cdxBom.getSpecVersion(); + if (cdxBom.getSerialNumber() != null) { + ctx.bomSerialNumber = cdxBom.getSerialNumber().replaceFirst("urn:uuid:", ""); } + if (cdxBom.getMetadata() != null && cdxBom.getMetadata().getTimestamp() != null) { + ctx.bomTimestamp = cdxBom.getMetadata().getTimestamp(); + } + ctx.bomVersion = cdxBom.getVersion(); + + consumedBom = consumeBom(cdxBom); + } catch (IOException | ParseException | RuntimeException e) { + failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_CONSUMPTION, e); + dispatchBomProcessingFailedNotification(ctx, e); + return; + } + + startBomProcessingWorkflowStep(ctx); + dispatchBomConsumedNotification(ctx); + + final ProcessedBom processedBom; + try (var ignoredMdcBomFormat = MDC.putCloseable(MDC_BOM_FORMAT, ctx.bomFormat.getFormatShortName()); + var ignoredMdcBomSpecVersion = MDC.putCloseable(MDC_BOM_SPEC_VERSION, ctx.bomSpecVersion); + var ignoredMdcBomSerialNumber = MDC.putCloseable(MDC_BOM_SERIAL_NUMBER, ctx.bomSerialNumber); + var ignoredMdcBomVersion = MDC.putCloseable(MDC_BOM_VERSION, String.valueOf(ctx.bomVersion))) { + // Prevent BOMs for the same project to be processed concurrently. + // Note that this is an edge case, we're not expecting any lock waits under normal circumstances. + final WaitingLockConfiguration lockConfiguration = createLockConfiguration(ctx); + processedBom = executeWithLockWaiting(lockConfiguration, () -> processBom(ctx, consumedBom)); + } catch (Throwable e) { + failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_PROCESSING, e); + dispatchBomProcessingFailedNotification(ctx, e); + return; } - final org.cyclonedx.model.Bom cdxBom = parseBom(ctx, bomFile); + completeBomProcessingWorkflowStep(ctx); + final var processingDurationMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - ctx.startTimeNs); + LOGGER.info("BOM processed successfully in %s".formatted(formatDurationHMS(processingDurationMs))); + if (!delayBomProcessedNotification) { + dispatchBomProcessedNotification(ctx); + } + + final List vulnAnalysisEvents = createVulnAnalysisEvents(ctx, processedBom.components()); + final List repoMetaAnalysisEvents = createRepoMetaAnalysisEvents(processedBom.components()); + + final var dispatchedEvents = new ArrayList>(vulnAnalysisEvents.size() + repoMetaAnalysisEvents.size()); + dispatchedEvents.addAll(initiateVulnerabilityAnalysis(ctx, vulnAnalysisEvents)); + dispatchedEvents.addAll(initiateRepoMetaAnalysis(repoMetaAnalysisEvents)); + CompletableFuture.allOf(dispatchedEvents.toArray(new CompletableFuture[0])).join(); + } + + private record ConsumedBom( + Project project, + ProjectMetadata projectMetadata, + List components, + List services, + MultiValuedMap dependencyGraph, + Map identitiesByBomRef, + MultiValuedMap bomRefsByIdentity + ) { + } + + private ConsumedBom consumeBom(final org.cyclonedx.model.Bom cdxBom) { // Keep track of which BOM ref points to which component identity. // During component and service de-duplication, we'll potentially drop // some BOM refs, which can break the dependency graph. @@ -230,50 +269,45 @@ private void processBom(final Context ctx, final File bomFile) throws BomConsump if (cdxBom.getMetadata() != null && cdxBom.getMetadata().getComponent() != null) { components.addAll(convertComponents(cdxBom.getMetadata().getComponent().getComponents())); } + components.addAll(convertComponents(cdxBom.getComponents())); components = flatten(components, Component::getChildren, Component::setChildren); final int numComponentsTotal = components.size(); - components = components.stream() - .filter(distinctComponentsByIdentity(identitiesByBomRef, bomRefsByIdentity)) - .toList(); + List services = convertServices(cdxBom.getServices()); services = flatten(services, ServiceComponent::getChildren, ServiceComponent::setChildren); final int numServicesTotal = services.size(); - services = services.stream() - .filter(distinctServicesByIdentity(identitiesByBomRef, bomRefsByIdentity)) - .toList(); - - LOGGER.info("Consumed %d components (%d before de-duplication) and %d services (%d before de-duplication) from uploaded BOM (%s)" - .formatted(components.size(), numComponentsTotal, services.size(), numServicesTotal, ctx)); - - //complete the Bom consumption state and start the processing state - if (bomConsumptionState != null) { - try (var qm = new QueryManager()) { - bomConsumptionState.setStatus(WorkflowStatus.COMPLETED); - bomConsumptionState.setUpdatedAt(Date.from(Instant.now())); - qm.updateWorkflowState(bomConsumptionState); - - WorkflowState processingState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, WorkflowStep.BOM_PROCESSING); - processingState.setStartedAt(Date.from(Instant.now())); - qm.persist(processingState); - } - } - - kafkaEventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_CONSUMED) - .level(NotificationLevel.INFORMATIONAL) - .title(NotificationConstants.Title.BOM_CONSUMED) - .content("A %s BOM was consumed and will be processed".formatted(ctx.bomFormat.getFormatShortName())) - .subject(new BomConsumedOrProcessed(ctx.uploadToken, ctx.project, /* bom */ "(Omitted)", ctx.bomFormat, ctx.bomSpecVersion))); + final MultiValuedMap dependencyGraph = convertDependencyGraph(cdxBom.getDependencies()); + final int numDependencyGraphEntries = dependencyGraph.asMap().size(); + + components = components.stream().filter(distinctComponentsByIdentity(identitiesByBomRef, bomRefsByIdentity)).toList(); + services = services.stream().filter(distinctServicesByIdentity(identitiesByBomRef, bomRefsByIdentity)).toList(); + LOGGER.info(""" + Consumed %d components (%d before de-duplication), %d services (%d before de-duplication), \ + and %d dependency graph entries""".formatted(components.size(), numComponentsTotal, + services.size(), numServicesTotal, numDependencyGraphEntries)); + + return new ConsumedBom( + project, + projectMetadata, + components, + services, + dependencyGraph, + identitiesByBomRef, + bomRefsByIdentity + ); + } - final var vulnAnalysisEvents = new ArrayList(); - final var repoMetaAnalysisEvents = new ArrayList(); + private record ProcessedBom( + Project project, + Collection components, + Collection services + ) { + } + private ProcessedBom processBom(final Context ctx, final ConsumedBom bom) { try (final var qm = new QueryManager()) { - final PersistenceManager pm = qm.getPersistenceManager(); - // Disable reachability checks on commit. // See https://www.datanucleus.org/products/accessplatform_4_1/jdo/performance_tuning.html // @@ -286,7 +320,7 @@ private void processBom(final Context ctx, final File bomFile) throws BomConsump // See: // https://www.datanucleus.org/products/accessplatform_6_0/jdo/persistence.html#cascading // https://www.datanucleus.org/products/accessplatform_6_0/jdo/persistence.html#_managing_relationships - pm.setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false"); + qm.getPersistenceManager().setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false"); // Save some database round-trips by only flushing changes every FLUSH_THRESHOLD write operations. // See https://www.datanucleus.org/products/accessplatform_4_1/jdo/performance_tuning.html @@ -302,7 +336,7 @@ private void processBom(final Context ctx, final File bomFile) throws BomConsump // Still, QUERY may be a nice middle-ground between AUTO and MANUAL. // // BomUploadProcessingTaskTest#informWithBloatedBomTest can be used to profile the impact on large BOMs. - pm.setProperty(PROPERTY_FLUSH_MODE, FlushMode.MANUAL.name()); + qm.getPersistenceManager().setProperty(PROPERTY_FLUSH_MODE, FlushMode.MANUAL.name()); // Prevent object fields from being unloaded upon commit. // @@ -312,190 +346,34 @@ private void processBom(final Context ctx, final File bomFile) throws BomConsump // See https://www.datanucleus.org/products/accessplatform_6_0/jdo/persistence.html#lifecycle qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true"); - LOGGER.info("Processing %d components and %d services from BOM (%s)" - .formatted(components.size(), services.size(), ctx)); + return qm.runInTransaction(() -> { + final Project persistentProject = processProject(ctx, qm, bom.project(), bom.projectMetadata()); - final Transaction trx = pm.currentTransaction(); - try { - trx.begin(); - final Project persistentProject = processProject(ctx, pm, project, projectMetadata); - final Map persistentComponents = - processComponents(qm, persistentProject, components, identitiesByBomRef, bomRefsByIdentity); - final Map persistentServices = - processServices(qm, persistentProject, services, identitiesByBomRef, bomRefsByIdentity); - processDependencyGraph(ctx, pm, cdxBom, persistentProject, persistentComponents, persistentServices, identitiesByBomRef); - Date bomGeneratedTimestamp = null; - if (cdxBom.getMetadata() != null && cdxBom.getMetadata().getTimestamp() != null) { - bomGeneratedTimestamp = cdxBom.getMetadata().getTimestamp(); - } - recordBomImport(ctx, pm, persistentProject, bomGeneratedTimestamp); - // BOM ref <-> ComponentIdentity indexes are no longer needed. - // Let go of their contents to make it eligible for GC sooner. - identitiesByBomRef.clear(); - bomRefsByIdentity.clear(); - - for (final Component component : persistentComponents.values()) { - // Note: component does not need to be detached. - // The constructors of ComponentRepositoryMetaAnalysisEvent and ComponentVulnerabilityAnalysisEvent - // merely call a few getters on it, but the component object itself is not passed around. - // Detaching would imply additional database interactions that we'd rather not do. - if (component.getPurl() != null) { - if (SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK.contains(component.getPurl().getType())) { - repoMetaAnalysisEvents.add(new ComponentRepositoryMetaAnalysisEvent(component.getUuid(), - component.getPurl().canonicalize(), component.isInternal(), FetchMeta.FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION)); - } else { - repoMetaAnalysisEvents.add(new ComponentRepositoryMetaAnalysisEvent(component.getUuid(), - component.getPurlCoordinates().toString(), component.isInternal(), FetchMeta.FETCH_META_LATEST_VERSION)); - } - } - vulnAnalysisEvents.add(new ComponentVulnerabilityAnalysisEvent( - ctx.uploadToken, component, VulnerabilityAnalysisLevel.BOM_UPLOAD_ANALYSIS, component.isNew())); - } + LOGGER.info("Processing %d components".formatted(bom.components().size())); + final Map persistentComponentsByIdentity = + processComponents(qm, persistentProject, bom.components(), bom.identitiesByBomRef(), bom.bomRefsByIdentity()); - trx.commit(); - } finally { - if (trx.isActive()) { - trx.rollback(); - } - } + LOGGER.info("Processing %d services".formatted(bom.services().size())); + final Map persistentServicesByIdentity = + processServices(qm, persistentProject, bom.services(), bom.identitiesByBomRef(), bom.bomRefsByIdentity()); - // Clear the PersistenceManager's L1 cache. - // Lessens some overhead of DataNucleus-internal housekeeping during - // the following persistence operations. - pm.evictAll(); - - final var dispatchedEvents = new ArrayList>(); - final var vulnAnalysisState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, WorkflowStep.VULN_ANALYSIS); - if (!vulnAnalysisEvents.isEmpty()) { - qm.createVulnerabilityScan(TargetType.PROJECT, ctx.project.getUuid(), ctx.uploadToken.toString(), vulnAnalysisEvents.size()); - // Initiate vuln-analysis workflow for the token - if (vulnAnalysisState != null) { - vulnAnalysisState.setStartedAt(Date.from(Instant.now())); - qm.persist(vulnAnalysisState); - } + LOGGER.info("Processing %d dependency graph entries".formatted(bom.dependencyGraph().asMap().size())); + processDependencyGraph(qm, persistentProject, bom.dependencyGraph(), persistentComponentsByIdentity, bom.identitiesByBomRef()); - for (final ComponentVulnerabilityAnalysisEvent event : vulnAnalysisEvents) { - final CompletableFuture future = kafkaEventDispatcher.dispatchEvent(event) - .whenComplete((ignored, throwable) -> { - if (throwable != null) { - // Include context in the log message to make log correlation easier. - LOGGER.error("Failed to produce %s to Kafka (%s)".formatted(event, ctx), throwable); - } - }); - dispatchedEvents.add(future); - } - } else { - // No components to be sent for vulnerability analysis. - // If the BOM_PROCESSED notification was delayed, dispatch it now. - if (delayBomProcessedNotification) { - dispatchBomProcessedNotification(ctx); - } + recordBomImport(ctx, qm, persistentProject); - if (vulnAnalysisState != null) { - vulnAnalysisState.setStatus(WorkflowStatus.NOT_APPLICABLE); - vulnAnalysisState.setUpdatedAt(Date.from(Instant.now())); - qm.updateWorkflowState(vulnAnalysisState); - // make only policy evaluation state NA - var policyEvaluationState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, WorkflowStep.POLICY_EVALUATION); - policyEvaluationState.setStatus(WorkflowStatus.NOT_APPLICABLE); - policyEvaluationState.setUpdatedAt(Date.from(Instant.now())); - qm.updateWorkflowState(policyEvaluationState); - // Trigger project metrics update no matter if vuln analysis is applicable or not - final ChainableEvent metricsUpdateEvent = new ProjectMetricsUpdateEvent(ctx.project.getUuid()); - metricsUpdateEvent.setChainIdentifier(ctx.uploadToken); - Event.dispatch(metricsUpdateEvent); - } - } - - for (final ComponentRepositoryMetaAnalysisEvent event : repoMetaAnalysisEvents) { - final ComponentRepositoryMetaAnalysisEvent eventToSend; - if (event.fetchMeta() == FetchMeta.FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION) { - final boolean shouldFetchIntegrityData = qm.runInTransaction(() -> prepareIntegrityMetaComponent(event, qm)); - if (shouldFetchIntegrityData) { - eventToSend = event; - } else { - // If integrity metadata was fetched recently, we don't want to fetch it again - // as it's unlikely to change frequently. Fall back to fetching only the latest - // version information. - eventToSend = new ComponentRepositoryMetaAnalysisEvent(null, event.purlCoordinates(), - event.internal(), FetchMeta.FETCH_META_LATEST_VERSION); - } - } else { - eventToSend = event; - } - - final CompletableFuture future = kafkaEventDispatcher.dispatchEvent(eventToSend) - .whenComplete((ignored, throwable) -> { - if (throwable != null) { - // Include context in the log message to make log correlation easier. - LOGGER.error("Failed to produce %s to Kafka (%s)".formatted(eventToSend, ctx), throwable); - } - }); - dispatchedEvents.add(future); - } - - // Before proceeding, wait for all events to be delivered successfully. - CompletableFuture.allOf(dispatchedEvents.toArray(new CompletableFuture[0])).join(); + return new ProcessedBom( + persistentProject, + persistentComponentsByIdentity.values(), + persistentServicesByIdentity.values() + ); + }); } } - private static void updateStateAndCancelDescendants(final Context ctx, WorkflowStep transientStep, WorkflowStatus transientStatus, String failureReason) { - try (var qm = new QueryManager()) { - WorkflowState workflowState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, transientStep); - if (workflowState != null) { - workflowState.setStatus(transientStatus); - workflowState.setFailureReason(failureReason); - workflowState.setUpdatedAt(Date.from(Instant.now())); - WorkflowState updatedState = qm.updateWorkflowState(workflowState); - qm.updateAllDescendantStatesOfParent(updatedState, WorkflowStatus.CANCELLED, Date.from(Instant.now())); - } - } - } - - private static void updateState(final Context ctx, WorkflowStep transientStep, WorkflowStatus transientStatus) { - try (var qm = new QueryManager()) { - WorkflowState workflowState = qm.getWorkflowStateByTokenAndStep(ctx.uploadToken, transientStep); - if (workflowState != null) { - workflowState.setStatus(transientStatus); - workflowState.setUpdatedAt(Date.from(Instant.now())); - qm.updateWorkflowState(workflowState); - } - } - } - - - private static org.cyclonedx.model.Bom parseBom(final Context ctx, final File bomFile) throws BomConsumptionException { - final byte[] bomBytes; - try (final var bomFileInputStream = Files.newInputStream(bomFile.toPath(), StandardOpenOption.DELETE_ON_CLOSE)) { - bomBytes = bomFileInputStream.readAllBytes(); - } catch (IOException e) { - throw new BomConsumptionException(ctx, "Failed to read BOM file", e); - } - - // The file is verified to contain valid CycloneDX upon upload. - ctx.bomFormat = Bom.Format.CYCLONEDX; - - final org.cyclonedx.model.Bom bom; - try { - final Parser parser = BomParserFactory.createParser(bomBytes); - bom = parser.parse(bomBytes); - } catch (ParseException e) { - throw new BomConsumptionException(ctx, "Failed to parse BOM", e); - } - - ctx.bomSpecVersion = bom.getSpecVersion(); - if (bom.getSerialNumber() != null) { - ctx.bomSerialNumber = bom.getSerialNumber().replaceFirst("urn:uuid:", ""); - } - ctx.bomVersion = bom.getVersion(); - - - return bom; - } - - private static Project processProject(final Context ctx, final PersistenceManager pm, - final Project project, final ProjectMetadata projectMetadata) throws BomProcessingException { - final Query query = pm.newQuery(Project.class); + private static Project processProject(final Context ctx, final QueryManager qm, + final Project project, final ProjectMetadata projectMetadata) { + final Query query = qm.getPersistenceManager().newQuery(Project.class); query.setFilter("uuid == :uuid"); query.setParameters(ctx.project.getUuid()); @@ -506,11 +384,12 @@ private static Project processProject(final Context ctx, final PersistenceManage query.closeAll(); } if (persistentProject == null) { - throw new BomProcessingException(ctx, "Project does not exist"); + throw new IllegalStateException("Project does not exist"); } boolean hasChanged = false; if (project != null) { + persistentProject.setBomRef(project.getBomRef()); // Transient hasChanged |= applyIfChanged(persistentProject, project, Project::getAuthor, persistentProject::setAuthor); hasChanged |= applyIfChanged(persistentProject, project, Project::getPublisher, persistentProject::setPublisher); hasChanged |= applyIfChanged(persistentProject, project, Project::getClassifier, persistentProject::setClassifier); @@ -530,7 +409,7 @@ private static Project processProject(final Context ctx, final PersistenceManage if (projectMetadata != null) { if (persistentProject.getMetadata() == null) { projectMetadata.setProject(persistentProject); - pm.makePersistent(projectMetadata); + qm.getPersistenceManager().makePersistent(projectMetadata); hasChanged = true; } else { hasChanged |= applyIfChanged(persistentProject.getMetadata(), projectMetadata, ProjectMetadata::getAuthors, @@ -541,7 +420,7 @@ private static Project processProject(final Context ctx, final PersistenceManage } if (hasChanged) { - pm.flush(); + qm.getPersistenceManager().flush(); } return persistentProject; @@ -554,11 +433,9 @@ private static Map processComponents(final QueryMa final MultiValuedMap bomRefsByIdentity) { assertPersistent(project, "Project must be persistent"); - final PersistenceManager pm = qm.getPersistenceManager(); - // Fetch IDs of all components that exist in the project already. // We'll need them later to determine which components to delete. - final Set oldComponentIds = getAllComponentIds(pm, project, Component.class); + final Set idsOfComponentsToDelete = getAllComponentIds(qm, project, Component.class); // Avoid redundant queries by caching resolved licenses. // It is likely that if license IDs were present in a BOM, @@ -569,118 +446,82 @@ private static Map processComponents(final QueryMa // To avoid any conflicts with license IDs, cache those separately. final var customLicenseCache = new HashMap(); + final var internalComponentIdentifier = new InternalComponentIdentifier(); final var persistentComponents = new HashMap(); - try (final var flushHelper = new FlushHelper(qm, FLUSH_THRESHOLD)) { - for (final Component component : components) { - component.setInternal(isInternalComponent(component, qm)); - - // CycloneDX components can declare multiple licenses, but we currently - // only support one. We assume that the licenseCandidates list is ordered - // by priority, and simply take the first resolvable candidate. - for (final org.cyclonedx.model.License licenseCandidate : component.getLicenseCandidates()) { - if (isNotBlank(licenseCandidate.getId())) { - final License resolvedLicense = resolveLicense(pm, licenseCache, licenseCandidate.getId()); - if (resolvedLicense != null) { - component.setResolvedLicense(resolvedLicense); - component.setLicenseUrl(trimToNull(licenseCandidate.getUrl())); - break; - } - } - - if (isNotBlank(licenseCandidate.getName())) { - final License resolvedCustomLicense = resolveCustomLicense(pm, customLicenseCache, licenseCandidate.getName()); - if (resolvedCustomLicense != null) { - component.setResolvedLicense(resolvedCustomLicense); - component.setLicenseUrl(trimToNull(licenseCandidate.getUrl())); - break; - } - } - } + for (final Component component : components) { + component.setInternal(internalComponentIdentifier.isInternal(component)); + resolveAndApplyLicense(qm, component, licenseCache, customLicenseCache); - // If we were unable to resolve any license by its ID, at least - // populate the license name. Again assuming order by priority. - if (component.getResolvedLicense() == null) { - component.getLicenseCandidates().stream() - .filter(license -> isNotBlank(license.getName())) - .findFirst() - .ifPresent(license -> { - component.setLicense(trim(license.getName())); - component.setLicenseUrl(trimToNull(license.getUrl())); - }); - } - - final boolean isNewOrUpdated; - final var componentIdentity = new ComponentIdentity(component); - Component persistentComponent = qm.matchSingleIdentity(project, componentIdentity); - if (persistentComponent == null) { - component.setProject(project); - persistentComponent = pm.makePersistent(component); - persistentComponent.setNew(true); // transient - isNewOrUpdated = true; - } else { - var changed = false; - changed |= applyIfChanged(persistentComponent, component, Component::getAuthor, persistentComponent::setAuthor); - changed |= applyIfChanged(persistentComponent, component, Component::getPublisher, persistentComponent::setPublisher); - changed |= applyIfChanged(persistentComponent, component, Component::getSupplier, persistentComponent::setSupplier); - changed |= applyIfChanged(persistentComponent, component, Component::getClassifier, persistentComponent::setClassifier); - changed |= applyIfChanged(persistentComponent, component, Component::getGroup, persistentComponent::setGroup); - changed |= applyIfChanged(persistentComponent, component, Component::getName, persistentComponent::setName); - changed |= applyIfChanged(persistentComponent, component, Component::getVersion, persistentComponent::setVersion); - changed |= applyIfChanged(persistentComponent, component, Component::getDescription, persistentComponent::setDescription); - changed |= applyIfChanged(persistentComponent, component, Component::getCopyright, persistentComponent::setCopyright); - changed |= applyIfChanged(persistentComponent, component, Component::getCpe, persistentComponent::setCpe); - changed |= applyIfChanged(persistentComponent, component, Component::getPurl, persistentComponent::setPurl); - changed |= applyIfChanged(persistentComponent, component, Component::getSwidTagId, persistentComponent::setSwidTagId); - changed |= applyIfChanged(persistentComponent, component, Component::getMd5, persistentComponent::setMd5); - changed |= applyIfChanged(persistentComponent, component, Component::getSha1, persistentComponent::setSha1); - changed |= applyIfChanged(persistentComponent, component, Component::getSha256, persistentComponent::setSha256); - changed |= applyIfChanged(persistentComponent, component, Component::getSha384, persistentComponent::setSha384); - changed |= applyIfChanged(persistentComponent, component, Component::getSha512, persistentComponent::setSha512); - changed |= applyIfChanged(persistentComponent, component, Component::getSha3_256, persistentComponent::setSha3_256); - changed |= applyIfChanged(persistentComponent, component, Component::getSha3_384, persistentComponent::setSha3_384); - changed |= applyIfChanged(persistentComponent, component, Component::getSha3_512, persistentComponent::setSha3_512); - changed |= applyIfChanged(persistentComponent, component, Component::getBlake2b_256, persistentComponent::setBlake2b_256); - changed |= applyIfChanged(persistentComponent, component, Component::getBlake2b_384, persistentComponent::setBlake2b_384); - changed |= applyIfChanged(persistentComponent, component, Component::getBlake2b_512, persistentComponent::setBlake2b_512); - changed |= applyIfChanged(persistentComponent, component, Component::getBlake3, persistentComponent::setBlake3); - changed |= applyIfChanged(persistentComponent, component, Component::getResolvedLicense, persistentComponent::setResolvedLicense); - changed |= applyIfChanged(persistentComponent, component, Component::getLicense, persistentComponent::setLicense); - changed |= applyIfChanged(persistentComponent, component, Component::getLicenseUrl, persistentComponent::setLicenseUrl); - changed |= applyIfChanged(persistentComponent, component, Component::isInternal, persistentComponent::setInternal); - changed |= applyIfChanged(persistentComponent, component, Component::getExternalReferences, persistentComponent::setExternalReferences); - isNewOrUpdated = changed; - - // BOM ref is transient and thus doesn't count towards the changed status. - persistentComponent.setBomRef(component.getBomRef()); - - // Exclude from components to delete. - if (!oldComponentIds.isEmpty()) { - oldComponentIds.remove(persistentComponent.getId()); - } + final var componentIdentity = new ComponentIdentity(component); + Component persistentComponent; + try { + persistentComponent = qm.matchSingleIdentityExact(project, componentIdentity); + } catch (JDOUserException e) { + if (!(ExceptionUtils.getRootCause(e) instanceof QueryNotUniqueException)) { + throw e; } - // Update component identities in our Identity->BOMRef map, - // as after persisting the components, their identities now include UUIDs. - // Applications like the frontend rely on UUIDs being there. - final var newIdentity = new ComponentIdentity(persistentComponent); - final ComponentIdentity oldIdentity = identitiesByBomRef.put(persistentComponent.getBomRef(), newIdentity); - for (final String bomRef : bomRefsByIdentity.get(oldIdentity)) { - identitiesByBomRef.put(bomRef, newIdentity); - } - persistentComponents.put(newIdentity, persistentComponent); + LOGGER.warn(""" + More than one existing component match the identity %s; \ + Proceeding with first match, others will be deleted\ + """.formatted(componentIdentity.toJSON())); + persistentComponent = qm.matchFirstIdentityExact(project, componentIdentity); + } + if (persistentComponent == null) { + component.setProject(project); + persistentComponent = qm.getPersistenceManager().makePersistent(component); + persistentComponent.setNew(true); // Transient + } else { + persistentComponent.setBomRef(component.getBomRef()); // Transient + applyIfChanged(persistentComponent, component, Component::getAuthor, persistentComponent::setAuthor); + applyIfChanged(persistentComponent, component, Component::getPublisher, persistentComponent::setPublisher); + applyIfChanged(persistentComponent, component, Component::getSupplier, persistentComponent::setSupplier); + applyIfChanged(persistentComponent, component, Component::getClassifier, persistentComponent::setClassifier); + applyIfChanged(persistentComponent, component, Component::getGroup, persistentComponent::setGroup); + applyIfChanged(persistentComponent, component, Component::getName, persistentComponent::setName); + applyIfChanged(persistentComponent, component, Component::getVersion, persistentComponent::setVersion); + applyIfChanged(persistentComponent, component, Component::getDescription, persistentComponent::setDescription); + applyIfChanged(persistentComponent, component, Component::getCopyright, persistentComponent::setCopyright); + applyIfChanged(persistentComponent, component, Component::getCpe, persistentComponent::setCpe); + applyIfChanged(persistentComponent, component, Component::getPurl, persistentComponent::setPurl); + applyIfChanged(persistentComponent, component, Component::getSwidTagId, persistentComponent::setSwidTagId); + applyIfChanged(persistentComponent, component, Component::getMd5, persistentComponent::setMd5); + applyIfChanged(persistentComponent, component, Component::getSha1, persistentComponent::setSha1); + applyIfChanged(persistentComponent, component, Component::getSha256, persistentComponent::setSha256); + applyIfChanged(persistentComponent, component, Component::getSha384, persistentComponent::setSha384); + applyIfChanged(persistentComponent, component, Component::getSha512, persistentComponent::setSha512); + applyIfChanged(persistentComponent, component, Component::getSha3_256, persistentComponent::setSha3_256); + applyIfChanged(persistentComponent, component, Component::getSha3_384, persistentComponent::setSha3_384); + applyIfChanged(persistentComponent, component, Component::getSha3_512, persistentComponent::setSha3_512); + applyIfChanged(persistentComponent, component, Component::getBlake2b_256, persistentComponent::setBlake2b_256); + applyIfChanged(persistentComponent, component, Component::getBlake2b_384, persistentComponent::setBlake2b_384); + applyIfChanged(persistentComponent, component, Component::getBlake2b_512, persistentComponent::setBlake2b_512); + applyIfChanged(persistentComponent, component, Component::getBlake3, persistentComponent::setBlake3); + applyIfChanged(persistentComponent, component, Component::getResolvedLicense, persistentComponent::setResolvedLicense); + applyIfChanged(persistentComponent, component, Component::getLicense, persistentComponent::setLicense); + applyIfChanged(persistentComponent, component, Component::getLicenseUrl, persistentComponent::setLicenseUrl); + applyIfChanged(persistentComponent, component, Component::isInternal, persistentComponent::setInternal); + applyIfChanged(persistentComponent, component, Component::getExternalReferences, persistentComponent::setExternalReferences); + idsOfComponentsToDelete.remove(persistentComponent.getId()); + } - if (isNewOrUpdated) { // Flushing is only necessary when something changed - flushHelper.maybeFlush(); - } + // Update component identities in our Identity->BOMRef map, + // as after persisting the components, their identities now include UUIDs. + final var newIdentity = new ComponentIdentity(persistentComponent); + final ComponentIdentity oldIdentity = identitiesByBomRef.put(persistentComponent.getBomRef(), newIdentity); + for (final String bomRef : bomRefsByIdentity.get(oldIdentity)) { + identitiesByBomRef.put(bomRef, newIdentity); } + + persistentComponents.put(newIdentity, persistentComponent); } - // License cache is no longer needed; Let go of it. - licenseCache.clear(); - customLicenseCache.clear(); + qm.getPersistenceManager().flush(); - // Delete components that existed before this BOM import, but do not exist anymore. - deleteComponentsById(pm, oldComponentIds); + final long componentsDeleted = deleteComponentsById(qm, idsOfComponentsToDelete); + if (componentsDeleted > 0) { + qm.getPersistenceManager().flush(); + } return persistentComponents; } @@ -692,118 +533,102 @@ private static Map processServices(final Qu final MultiValuedMap bomRefsByIdentity) { assertPersistent(project, "Project must be persistent"); - final PersistenceManager pm = qm.getPersistenceManager(); - // Fetch IDs of all services that exist in the project already. // We'll need them later to determine which services to delete. - final Set oldServiceIds = getAllComponentIds(pm, project, ServiceComponent.class); + final Set idsOfServicesToDelete = getAllComponentIds(qm, project, ServiceComponent.class); final var persistentServices = new HashMap(); - try (final var flushHelper = new FlushHelper(qm, FLUSH_THRESHOLD)) { - for (final ServiceComponent service : services) { - final boolean isNewOrUpdated; - final var componentIdentity = new ComponentIdentity(service); - ServiceComponent persistentService = qm.matchServiceIdentity(project, componentIdentity); - if (persistentService == null) { - service.setProject(project); - persistentService = pm.makePersistent(service); - isNewOrUpdated = true; - } else { - var changed = false; - changed |= applyIfChanged(persistentService, service, ServiceComponent::getGroup, persistentService::setGroup); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getName, persistentService::setName); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getVersion, persistentService::setVersion); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getDescription, persistentService::setDescription); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getAuthenticated, persistentService::setAuthenticated); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getCrossesTrustBoundary, persistentService::setCrossesTrustBoundary); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getExternalReferences, persistentService::setExternalReferences); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getProvider, persistentService::setProvider); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getData, persistentService::setData); - changed |= applyIfChanged(persistentService, service, ServiceComponent::getEndpoints, persistentService::setEndpoints); - isNewOrUpdated = changed; - - // BOM ref is transient and thus doesn't count towards the changed status. - persistentService.setBomRef(service.getBomRef()); - - // Exclude from components to delete. - if (!oldServiceIds.isEmpty()) { - oldServiceIds.remove(persistentService.getId()); - } - } - - // Update component identities in our Identity->BOMRef map, - // as after persisting the services, their identities now include UUIDs. - // Applications like the frontend rely on UUIDs being there. - final var newIdentity = new ComponentIdentity(persistentService); - final ComponentIdentity oldIdentity = identitiesByBomRef.put(service.getBomRef(), newIdentity); - for (final String bomRef : bomRefsByIdentity.get(oldIdentity)) { - identitiesByBomRef.put(bomRef, newIdentity); - } - persistentServices.put(newIdentity, persistentService); + for (final ServiceComponent service : services) { + final var componentIdentity = new ComponentIdentity(service); + ServiceComponent persistentService = qm.matchServiceIdentity(project, componentIdentity); + if (persistentService == null) { + service.setProject(project); + persistentService = qm.getPersistenceManager().makePersistent(service); + } else { + persistentService.setBomRef(service.getBomRef()); // Transient + applyIfChanged(persistentService, service, ServiceComponent::getGroup, persistentService::setGroup); + applyIfChanged(persistentService, service, ServiceComponent::getName, persistentService::setName); + applyIfChanged(persistentService, service, ServiceComponent::getVersion, persistentService::setVersion); + applyIfChanged(persistentService, service, ServiceComponent::getDescription, persistentService::setDescription); + applyIfChanged(persistentService, service, ServiceComponent::getAuthenticated, persistentService::setAuthenticated); + applyIfChanged(persistentService, service, ServiceComponent::getCrossesTrustBoundary, persistentService::setCrossesTrustBoundary); + applyIfChanged(persistentService, service, ServiceComponent::getExternalReferences, persistentService::setExternalReferences); + applyIfChanged(persistentService, service, ServiceComponent::getProvider, persistentService::setProvider); + applyIfChanged(persistentService, service, ServiceComponent::getData, persistentService::setData); + applyIfChanged(persistentService, service, ServiceComponent::getEndpoints, persistentService::setEndpoints); + idsOfServicesToDelete.remove(persistentService.getId()); + } - if (isNewOrUpdated) { // Flushing is only necessary when something changed - flushHelper.maybeFlush(); - } + // Update component identities in our Identity->BOMRef map, + // as after persisting the services, their identities now include UUIDs. + final var newIdentity = new ComponentIdentity(persistentService); + final ComponentIdentity oldIdentity = identitiesByBomRef.put(service.getBomRef(), newIdentity); + for (final String bomRef : bomRefsByIdentity.get(oldIdentity)) { + identitiesByBomRef.put(bomRef, newIdentity); } + + persistentServices.put(newIdentity, persistentService); } - // Delete components that existed before this BOM import, but do not exist anymore. - deleteServicesById(pm, oldServiceIds); + qm.getPersistenceManager().flush(); + + final long servicesDeleted = deleteServicesById(qm, idsOfServicesToDelete); + if (servicesDeleted > 0) { + qm.getPersistenceManager().flush(); + } return persistentServices; } - private static void processDependencyGraph(final Context ctx, final PersistenceManager pm, final org.cyclonedx.model.Bom cdxBom, - final Project project, final Map componentsByIdentity, - @SuppressWarnings("unused") final Map servicesByIdentity, - final Map identitiesByBomRef) { + private void processDependencyGraph(final QueryManager qm, + final Project project, + final MultiValuedMap dependencyGraph, + final Map componentsByIdentity, + final Map identitiesByBomRef) { assertPersistent(project, "Project must be persistent"); - if (cdxBom.getMetadata() != null - && cdxBom.getMetadata().getComponent() != null - && cdxBom.getMetadata().getComponent().getBomRef() != null) { - final org.cyclonedx.model.Dependency dependency = - findDependencyByBomRef(cdxBom.getDependencies(), cdxBom.getMetadata().getComponent().getBomRef()); - final String directDependenciesJson = resolveDirectDependenciesJson(ctx, dependency, identitiesByBomRef); + if (project.getBomRef() != null) { + final Collection directDependencyBomRefs = dependencyGraph.get(project.getBomRef()); + final String directDependenciesJson = resolveDirectDependenciesJson(project.getBomRef(), directDependencyBomRefs, identitiesByBomRef); if (!Objects.equals(directDependenciesJson, project.getDirectDependencies())) { project.setDirectDependencies(directDependenciesJson); - pm.flush(); + qm.getPersistenceManager().flush(); } } else { // Make sure we don't retain stale data from previous BOM uploads. if (project.getDirectDependencies() != null) { project.setDirectDependencies(null); - pm.flush(); + qm.getPersistenceManager().flush(); } } - try (final var flushHelper = new FlushHelper(pm, FLUSH_THRESHOLD)) { - for (final Map.Entry entry : identitiesByBomRef.entrySet()) { - final org.cyclonedx.model.Dependency dependency = findDependencyByBomRef(cdxBom.getDependencies(), entry.getKey()); - final String directDependenciesJson = resolveDirectDependenciesJson(ctx, dependency, identitiesByBomRef); - - final ComponentIdentity dependencyIdentity = identitiesByBomRef.get(entry.getKey()); - final Component component = componentsByIdentity.get(dependencyIdentity); - // TODO: Check servicesByIdentity when persistentComponent is null - // We do not currently store directDependencies for ServiceComponent - if (component != null) { - assertPersistent(component, "Component must be persistent"); - if (!Objects.equals(directDependenciesJson, component.getDirectDependencies())) { - component.setDirectDependencies(directDependenciesJson); - flushHelper.maybeFlush(); - } - } else { - LOGGER.warn(""" - Unable to resolve component identity %s to a persistent component; \ - As a result, the dependency graph of project %s will likely be incomplete (%s)""" - .formatted(dependencyIdentity.toJSON(), ctx.project.getUuid(), ctx)); + for (final Map.Entry entry : identitiesByBomRef.entrySet()) { + final String componentBomRef = entry.getKey(); + final Collection directDependencyBomRefs = dependencyGraph.get(componentBomRef); + final String directDependenciesJson = resolveDirectDependenciesJson(componentBomRef, directDependencyBomRefs, identitiesByBomRef); + + final ComponentIdentity dependencyIdentity = identitiesByBomRef.get(entry.getKey()); + final Component component = componentsByIdentity.get(dependencyIdentity); + // TODO: Check servicesByIdentity when persistentComponent is null + // We do not currently store directDependencies for ServiceComponent + if (component != null) { + assertPersistent(component, "Component must be persistent"); + if (!Objects.equals(directDependenciesJson, component.getDirectDependencies())) { + component.setDirectDependencies(directDependenciesJson); } + } else { + LOGGER.warn(""" + Unable to resolve component identity %s to a persistent component; \ + As a result, the dependency graph will likely be incomplete\ + """.formatted(dependencyIdentity.toJSON())); } } + + qm.getPersistenceManager().flush(); } - private static void recordBomImport(final Context ctx, final PersistenceManager pm, final Project project, Date bomGeneratedTimestamp) { + private static void recordBomImport(final Context ctx, final QueryManager qm, final Project project) { assertPersistent(project, "Project must be persistent"); final var bomImportDate = new Date(); @@ -815,166 +640,140 @@ private static void recordBomImport(final Context ctx, final PersistenceManager bom.setSerialNumber(ctx.bomSerialNumber); bom.setBomVersion(ctx.bomVersion); bom.setImported(bomImportDate); - bom.setGenerated(bomGeneratedTimestamp); - pm.makePersistent(bom); + bom.setGenerated(ctx.bomTimestamp); + qm.getPersistenceManager().makePersistent(bom); project.setLastBomImport(bomImportDate); project.setLastBomImportFormat("%s %s".formatted(ctx.bomFormat.getFormatShortName(), ctx.bomSpecVersion)); } - private static String resolveDirectDependenciesJson(final Context ctx, - final org.cyclonedx.model.Dependency dependency, - final Map identitiesByBomRef) { - final var jsonDependencies = new JSONArray(); + private String resolveDirectDependenciesJson(final String dependencyBomRef, + final Collection directDependencyBomRefs, + final Map identitiesByBomRef) { + if (directDependencyBomRefs == null || directDependencyBomRefs.isEmpty()) { + return null; + } - if (dependency != null && dependency.getDependencies() != null) { - for (final org.cyclonedx.model.Dependency subDependency : dependency.getDependencies()) { - final ComponentIdentity subDependencyIdentity = identitiesByBomRef.get(subDependency.getRef()); - if (subDependencyIdentity != null) { - jsonDependencies.put(subDependencyIdentity.toJSON()); - } else { - LOGGER.warn(""" - Unable to resolve BOM ref %s to a component identity; \ - As a result, the dependency graph of project %s will likely be incomplete (%s)""" - .formatted(dependency.getRef(), ctx.project.getUuid(), ctx)); - } + final var jsonDependencies = new JSONArray(); + for (final String directDependencyBomRef : directDependencyBomRefs) { + final ComponentIdentity directDependencyIdentity = identitiesByBomRef.get(directDependencyBomRef); + if (directDependencyIdentity != null) { + jsonDependencies.put(directDependencyIdentity.toJSON()); + } else { + LOGGER.warn(""" + Unable to resolve BOM ref %s to a component identity while processing direct \ + dependencies of BOM ref %s; As a result, the dependency graph will likely be incomplete\ + """.formatted(dependencyBomRef, directDependencyBomRef)); } } return jsonDependencies.isEmpty() ? null : jsonDependencies.toString(); } - /** - * Re-implementation of {@link QueryManager#recursivelyDelete(Component, boolean)} that does not use multiple - * small {@link Transaction}s, but relies on an already active one instead. Instead of committing, it uses - * {@link FlushHelper} to flush changes every {@link #FLUSH_THRESHOLD} write operations. - *

- * TODO: Move to {@link QueryManager}; Implement for {@link Project}s as well. - * When working on #636. - * - * @param pm The {@link PersistenceManager} to use - * @param componentIds IDs of {@link Component}s to delete - */ - private static void deleteComponentsById(final PersistenceManager pm, final Set componentIds) { + private static long deleteComponentsById(final QueryManager qm, final Collection componentIds) { if (componentIds.isEmpty()) { - return; + return 0; } - try (final var flushHelper = new FlushHelper(pm, FLUSH_THRESHOLD)) { - for (final Long componentId : componentIds) { - // Note: Bulk DELETE queries are executed directly in the database and do not need to be flushed. - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.AnalysisComment WHERE analysis.component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.Analysis WHERE component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.ViolationAnalysisComment WHERE violationAnalysis.component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.ViolationAnalysis WHERE component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.DependencyMetrics WHERE component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.FindingAttribution WHERE component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.PolicyViolation WHERE component.id == :cid").execute(componentId); - pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.IntegrityAnalysis WHERE component.id == :cid").execute(componentId); - - // Can't use bulk DELETE for the component itself, as it doesn't remove entries from - // relationship tables like COMPONENTS_VULNERABILITIES. deletePersistentAll does, but - // it will also fetch the component prior to deleting it, which is slightly inefficient. - pm.newQuery(Component.class, "id == :cid").deletePersistentAll(componentId); - flushHelper.maybeFlush(); - } - } + final PersistenceManager pm = qm.getPersistenceManager(); + LOGGER.info("Deleting %d component(s) that are no longer part of the project".formatted(componentIds.size())); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.AnalysisComment WHERE :ids.contains(analysis.component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.Analysis WHERE :ids.contains(component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.ViolationAnalysisComment WHERE :ids.contains(violationAnalysis.component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.ViolationAnalysis WHERE :ids.contains(component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.DependencyMetrics WHERE :ids.contains(component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.FindingAttribution WHERE :ids.contains(component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.PolicyViolation WHERE :ids.contains(component.id)").execute(componentIds); + pm.newQuery(Query.JDOQL, "DELETE FROM org.dependencytrack.model.IntegrityAnalysis WHERE :ids.contains(component.id)").execute(componentIds); + return pm.newQuery(Component.class, ":ids.contains(id)").deletePersistentAll(componentIds); } - /** - * Re-implementation of {@link QueryManager#recursivelyDelete(ServiceComponent, boolean)} that does not use multiple - * small {@link Transaction}s, but relies on an already active one instead. Instead of committing, it uses - * {@link FlushHelper} to flush changes every {@link #FLUSH_THRESHOLD} write operations. - * - * @param pm The {@link PersistenceManager} to use - * @param serviceIds IDs of {@link ServiceComponent}s to delete - */ - private static void deleteServicesById(final PersistenceManager pm, final Set serviceIds) { + private static long deleteServicesById(final QueryManager qm, final Collection serviceIds) { if (serviceIds.isEmpty()) { - return; + return 0; } - try (final var flushHelper = new FlushHelper(pm, FLUSH_THRESHOLD)) { - for (final Long serviceId : serviceIds) { - // Can't use bulk DELETE for the component itself, as it doesn't remove entries from - // relationship tables like COMPONENTS_VULNERABILITIES. deletePersistentAll does, but - // it will also fetch the component prior to deleting it, which is slightly inefficient. - pm.newQuery(ServiceComponent.class, "id == :cid").deletePersistentAll(serviceId); - flushHelper.maybeFlush(); + final PersistenceManager pm = qm.getPersistenceManager(); + LOGGER.info("Deleting %d service(s) that are no longer part of the project".formatted(serviceIds.size())); + return pm.newQuery(ServiceComponent.class, ":ids.contains(id)").deletePersistentAll(serviceIds); + } + + private static void resolveAndApplyLicense(final QueryManager qm, + final Component component, + final Map licenseCache, + final Map customLicenseCache) { + // CycloneDX components can declare multiple licenses, but we currently + // only support one. We assume that the licenseCandidates list is ordered + // by priority, and simply take the first resolvable candidate. + for (final org.cyclonedx.model.License licenseCandidate : component.getLicenseCandidates()) { + if (isNotBlank(licenseCandidate.getId())) { + final License resolvedLicense = licenseCache.computeIfAbsent(licenseCandidate.getId(), + licenseId -> resolveLicense(qm, licenseId)); + if (resolvedLicense != License.UNRESOLVED) { + component.setResolvedLicense(resolvedLicense); + component.setLicenseUrl(trimToNull(licenseCandidate.getUrl())); + break; + } + } + + if (isNotBlank(licenseCandidate.getName())) { + final License resolvedLicense = licenseCache.computeIfAbsent(licenseCandidate.getName(), + licenseName -> resolveLicense(qm, licenseName)); + if (resolvedLicense != License.UNRESOLVED) { + component.setResolvedLicense(resolvedLicense); + component.setLicenseUrl(trimToNull(licenseCandidate.getUrl())); + break; + } + + final License resolvedCustomLicense = customLicenseCache.computeIfAbsent(licenseCandidate.getName(), + licenseName -> resolveCustomLicense(qm, licenseName)); + if (resolvedCustomLicense != License.UNRESOLVED) { + component.setResolvedLicense(resolvedCustomLicense); + component.setLicenseUrl(trimToNull(licenseCandidate.getUrl())); + break; + } } } - } - /** - * Lookup a {@link License} by its ID, and cache the result in {@code cache}. - * - * @param pm The {@link PersistenceManager} to use - * @param cache A {@link Map} to use for caching - * @param licenseId The {@link License} ID to lookup - * @return The resolved {@link License}, or {@code null} if no {@link License} was found - */ - private static License resolveLicense(final PersistenceManager pm, final Map cache, final String licenseId) { - if (cache.containsKey(licenseId)) { - return cache.get(licenseId); + // If we were unable to resolve any license by its ID, at least + // populate the license name. Again assuming order by priority. + if (component.getResolvedLicense() == null) { + component.getLicenseCandidates().stream() + .filter(license -> isNotBlank(license.getName())) + .findFirst() + .ifPresent(license -> { + component.setLicense(trim(license.getName())); + component.setLicenseUrl(trimToNull(license.getUrl())); + }); } + } - final Query query = pm.newQuery(License.class); - query.setFilter("licenseId == :licenseId"); - query.setParameters(licenseId); - final License license; + private static License resolveLicense(final QueryManager qm, final String licenseIdOrName) { + final Query query = qm.getPersistenceManager().newQuery(License.class); + query.setFilter("licenseId == :licenseIdOrName || name == :licenseIdOrName"); + query.setNamedParameters(Map.of("licenseIdOrName", licenseIdOrName)); try { - license = query.executeUnique(); + final License license = query.executeUnique(); + return license != null ? license : License.UNRESOLVED; } finally { query.closeAll(); } - - cache.put(licenseId, license); - return license; } - /** - * Lookup a custom {@link License} by its name, and cache the result in {@code cache}. - * - * @param pm The {@link PersistenceManager} to use - * @param cache A {@link Map} to use for caching - * @param licenseName The {@link License} name to lookup - * @return The resolved {@link License}, or {@code null} if no {@link License} was found - */ - private static License resolveCustomLicense(final PersistenceManager pm, final Map cache, final String licenseName) { - if (cache.containsKey(licenseName)) { - return cache.get(licenseName); - } - - final Query query = pm.newQuery(License.class); + private static License resolveCustomLicense(final QueryManager qm, final String licenseName) { + final Query query = qm.getPersistenceManager().newQuery(License.class); query.setFilter("name == :name && customLicense == true"); query.setParameters(licenseName); - final License license; try { - license = query.executeUnique(); + final License license = query.executeUnique(); + return license != null ? license : License.UNRESOLVED; } finally { query.closeAll(); } - - cache.put(licenseName, license); - return license; } - private static org.cyclonedx.model.Dependency findDependencyByBomRef(final List dependencies, final String bomRef) { - if (dependencies == null || dependencies.isEmpty() || bomRef == null) { - return null; - } - - for (final org.cyclonedx.model.Dependency dependency : dependencies) { - if (bomRef.equals(dependency.getRef())) { - return dependency; - } - } - - return null; - } - - private static Set getAllComponentIds(final PersistenceManager pm, final Project project, final Class clazz) { - final Query query = pm.newQuery(clazz); + private static Set getAllComponentIds(final QueryManager qm, final Project project, final Class clazz) { + final Query query = qm.getPersistenceManager().newQuery(clazz); query.setFilter("project == :project"); query.setParameters(project); query.setResult("id"); @@ -986,131 +785,270 @@ private static Set getAllComponentIds(final PersistenceManager pm, fin } } - /** - * Factory for a stateful {@link Predicate} for de-duplicating {@link Component}s based on their {@link ComponentIdentity}. - *

- * The predicate will populate {@code identitiesByBomRef} and {@code bomRefsByIdentity}. - * - * @param identitiesByBomRef The mapping of BOM refs to {@link ComponentIdentity}s to populate - * @param bomRefsByIdentity The mapping of {@link ComponentIdentity}s to BOM refs to populate - * @return A {@link Predicate} to use in {@link Stream#filter(Predicate)} - */ private static Predicate distinctComponentsByIdentity(final Map identitiesByBomRef, final MultiValuedMap bomRefsByIdentity) { final var identitiesSeen = new HashSet(); - return component -> { final var componentIdentity = new ComponentIdentity(component); - identitiesByBomRef.putIfAbsent(component.getBomRef(), componentIdentity); + + final boolean isBomRefUnique = identitiesByBomRef.putIfAbsent(component.getBomRef(), componentIdentity) == null; + if (!isBomRefUnique) { + LOGGER.warn(""" + BOM ref %s is associated with multiple components in the BOM; \ + BOM refs are required to be unique; Please report this to the vendor \ + of the tool that generated the BOM""".formatted(component.getBomRef())); + } + bomRefsByIdentity.put(componentIdentity, component.getBomRef()); - return identitiesSeen.add(componentIdentity); + + final boolean isSeenBefore = !identitiesSeen.add(componentIdentity); + if (LOGGER.isDebugEnabled() && isSeenBefore) { + LOGGER.debug("Filtering component with BOM ref %s and identity %s due to duplicate identity" + .formatted(component.getBomRef(), componentIdentity.toJSON())); + } + + return !isSeenBefore; }; } - /** - * Factory for a stateful {@link Predicate} for de-duplicating {@link ServiceComponent}s based on their {@link ComponentIdentity}. - *

- * The predicate will populate {@code identitiesByBomRef} and {@code bomRefsByIdentity}. - * - * @param identitiesByBomRef The mapping of BOM refs to {@link ComponentIdentity}s to populate - * @param bomRefsByIdentity The mapping of {@link ComponentIdentity}s to BOM refs to populate - * @return A {@link Predicate} to use in {@link Stream#filter(Predicate)} - */ private static Predicate distinctServicesByIdentity(final Map identitiesByBomRef, final MultiValuedMap bomRefsByIdentity) { final var identitiesSeen = new HashSet(); - return service -> { final var componentIdentity = new ComponentIdentity(service); identitiesByBomRef.putIfAbsent(service.getBomRef(), componentIdentity); bomRefsByIdentity.put(componentIdentity, service.getBomRef()); - return identitiesSeen.add(componentIdentity); + final boolean isSeenBefore = !identitiesSeen.add(componentIdentity); + if (LOGGER.isDebugEnabled() && isSeenBefore) { + LOGGER.debug("Filtering service with BOM ref %s and identity %s due to duplicate identity" + .formatted(service.getBomRef(), componentIdentity.toJSON())); + } + + return !isSeenBefore; }; } - private void dispatchBomProcessedNotification(final Context ctx) { - kafkaEventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_PROCESSED) - .level(NotificationLevel.INFORMATIONAL) - .title(NotificationConstants.Title.BOM_PROCESSED) - .content("A %s BOM was processed".formatted(ctx.bomFormat.getFormatShortName())) - // FIXME: Add reference to BOM after we have dedicated BOM server - .subject(new BomConsumedOrProcessed(ctx.uploadToken, ctx.project, /* bom */ "(Omitted)", ctx.bomFormat, ctx.bomSpecVersion))); + private static void startBomConsumptionWorkflowStep(final Context ctx) { + // TODO: This should be a single UPDATE query. + try (final var qm = new QueryManager()) { + qm.runInTransaction(() -> { + final WorkflowState bomConsumptionState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.BOM_CONSUMPTION); + bomConsumptionState.setStartedAt(Date.from(Instant.now())); + }); + } } - /** - * An {@link Exception} that signals failures during BOM processing. - */ - private static final class BomProcessingException extends Exception { - - private final Context ctx; + private static void startBomProcessingWorkflowStep(final Context ctx) { + // TODO: This should be a batched UPDATE query. + try (var qm = new QueryManager()) { + qm.runInTransaction(() -> { + final WorkflowState bomConsumptionState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.BOM_CONSUMPTION); + bomConsumptionState.setStatus(WorkflowStatus.COMPLETED); + bomConsumptionState.setUpdatedAt(Date.from(Instant.now())); - private BomProcessingException(final Context ctx, final String message, final Throwable cause) { - super(message, cause); - this.ctx = ctx; + final WorkflowState bomProcessingState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.BOM_PROCESSING); + bomProcessingState.setStartedAt(Date.from(Instant.now())); + }); } + } - private BomProcessingException(final Context ctx, final String message) { - this(ctx, message, null); + private static void completeBomProcessingWorkflowStep(final Context ctx) { + // TODO: This should be a single UPDATE query. + try (final var qm = new QueryManager()) { + qm.runInTransaction(() -> { + final WorkflowState bomProcessingState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.BOM_PROCESSING); + bomProcessingState.setStatus(WorkflowStatus.COMPLETED); + bomProcessingState.setUpdatedAt(new Date()); + }); } + } + private static void failWorkflowStepAndCancelDescendants(final Context ctx, final WorkflowStep step, final Throwable failureCause) { + try (var qm = new QueryManager()) { + qm.runInTransaction(() -> { + final var now = new Date(); + final WorkflowState workflowState = qm.getWorkflowStateByTokenAndStep(ctx.token, step); + workflowState.setStatus(WorkflowStatus.FAILED); + workflowState.setFailureReason(failureCause.getMessage()); + workflowState.setUpdatedAt(now); + qm.updateAllDescendantStatesOfParent(workflowState, WorkflowStatus.CANCELLED, now); + }); + } } - /** - * An {@link Exception} that signals failures during BOM consumption. - */ - private static final class BomConsumptionException extends Exception { + private List> initiateVulnerabilityAnalysis(final Context ctx, final Collection events) { + if (events.isEmpty()) { + // No components to be sent for vulnerability analysis. + // If the BOM_PROCESSED notification was delayed, dispatch it now. + if (delayBomProcessedNotification) { + dispatchBomProcessedNotification(ctx); + } + + try (final var qm = new QueryManager()) { + qm.runInTransaction(() -> { + final WorkflowState vulnAnalysisWorkflowState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.VULN_ANALYSIS); + vulnAnalysisWorkflowState.setStatus(WorkflowStatus.NOT_APPLICABLE); + vulnAnalysisWorkflowState.setUpdatedAt(new Date()); + + final WorkflowState policyEvalWorkflowState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.POLICY_EVALUATION); + policyEvalWorkflowState.setStatus(WorkflowStatus.NOT_APPLICABLE); + policyEvalWorkflowState.setUpdatedAt(new Date()); + }); + } - private final Context ctx; + // Trigger project metrics update no matter if vuln analysis is applicable or not. + final ChainableEvent metricsUpdateEvent = new ProjectMetricsUpdateEvent(ctx.project.getUuid()); + metricsUpdateEvent.setChainIdentifier(ctx.token); + Event.dispatch(metricsUpdateEvent); - private BomConsumptionException(final Context ctx, final String message, final Throwable cause) { - super(message, cause); - this.ctx = ctx; + return Collections.emptyList(); } - private BomConsumptionException(final Context ctx, final String message) { - this(ctx, message, null); + try (final var qm = new QueryManager()) { + // TODO: Creation of the scan, and starting of the workflow step, should happen in the same transaction. + // Requires a bit of refactoring in QueryManager#createVulnerabilityScan. + + qm.createVulnerabilityScan( + TargetType.PROJECT, + ctx.project.getUuid(), + ctx.token.toString(), + events.size() + ); + + qm.runInTransaction(() -> { + final WorkflowState vulnAnalysisWorkflowState = + qm.getWorkflowStateByTokenAndStep(ctx.token, WorkflowStep.VULN_ANALYSIS); + vulnAnalysisWorkflowState.setStartedAt(new Date()); + }); } + return events.stream() + .>map(event -> kafkaEventDispatcher.dispatchEvent(event).whenComplete( + (ignored, throwable) -> { + if (throwable != null) { + // Include context in the log message to make log correlation easier. + LOGGER.error("Failed to produce %s to Kafka".formatted(event), throwable); + } + } + )) + .toList(); } - /** - * Context holder for identifiers and metadata that describe a processing execution. - * Intended to be passed around and enriched during various stages of processing. - */ - private static final class Context { + private List> initiateRepoMetaAnalysis(final Collection events) { + return events.stream() + .>map(event -> kafkaEventDispatcher.dispatchEvent(event).whenComplete( + (ignored, throwable) -> { + if (throwable != null) { + // Include context in the log message to make log correlation easier. + LOGGER.error("Failed to produce %s to Kafka".formatted(event), throwable); + } + } + )) + .toList(); + } - private final Project project; - private final UUID uploadToken; - private Bom.Format bomFormat; - private String bomSpecVersion; - private String bomSerialNumber; - private Integer bomVersion; + private void dispatchBomConsumedNotification(final Context ctx) { + kafkaEventDispatcher.dispatchNotification(new Notification() + .scope(NotificationScope.PORTFOLIO) + .group(NotificationGroup.BOM_CONSUMED) + .level(NotificationLevel.INFORMATIONAL) + .title(NotificationConstants.Title.BOM_CONSUMED) + .content("A %s BOM was consumed and will be processed".formatted(ctx.bomFormat.getFormatShortName())) + .subject(new BomConsumedOrProcessed(ctx.token, ctx.project, /* bom */ "(Omitted)", ctx.bomFormat, ctx.bomSpecVersion))); + } - private Context(final Project project, final UUID uploadToken) { - this.project = project; - this.uploadToken = uploadToken; - } + private void dispatchBomProcessedNotification(final Context ctx) { + kafkaEventDispatcher.dispatchNotification(new Notification() + .scope(NotificationScope.PORTFOLIO) + .group(NotificationGroup.BOM_PROCESSED) + .level(NotificationLevel.INFORMATIONAL) + .title(NotificationConstants.Title.BOM_PROCESSED) + .content("A %s BOM was processed".formatted(ctx.bomFormat.getFormatShortName())) + // FIXME: Add reference to BOM after we have dedicated BOM server + .subject(new BomConsumedOrProcessed(ctx.token, ctx.project, /* bom */ "(Omitted)", ctx.bomFormat, ctx.bomSpecVersion))); + } + + private void dispatchBomProcessingFailedNotification(final Context ctx, final Throwable throwable) { + kafkaEventDispatcher.dispatchNotification(new Notification() + .scope(NotificationScope.PORTFOLIO) + .group(NotificationGroup.BOM_PROCESSING_FAILED) + .level(NotificationLevel.ERROR) + .title(NotificationConstants.Title.BOM_PROCESSING_FAILED) + .content("An error occurred while processing a BOM") + // TODO: Look into adding more fields to BomProcessingFailed, to also cover serial number, version, etc. + // FIXME: Add reference to BOM after we have dedicated BOM server + .subject(new BomProcessingFailed(ctx.token, ctx.project, /* bom */ "(Omitted)", throwable.getMessage(), ctx.bomFormat, ctx.bomSpecVersion))); + } + + private static List createVulnAnalysisEvents(final Context ctx, final Collection components) { + return components.stream() + .map(component -> new ComponentVulnerabilityAnalysisEvent( + ctx.token, + component, + VulnerabilityAnalysisLevel.BOM_UPLOAD_ANALYSIS, + component.isNew() + )) + .toList(); + } + + private static List createRepoMetaAnalysisEvents(final Collection components) { + final var events = new ArrayList(components.size()); + // TODO: This should be more efficient (https://github.com/DependencyTrack/hyades/issues/1306) + + try (final var qm = new QueryManager()) { + qm.getPersistenceManager().setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false"); + qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true"); - @Override - public String toString() { - return "Context{" + - "project=" + project.getUuid() + - ", uploadToken=" + uploadToken + - ", bomFormat=" + bomFormat + - ", bomSpecVersion=" + bomSpecVersion + - ", bomSerialNumber=" + bomSerialNumber + - ", bomVersion=" + bomVersion + - '}'; + for (final Component component : components) { + if (component.getPurl() == null) { + continue; + } + + if (!SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK.contains(component.getPurl().getType())) { + events.add(new ComponentRepositoryMetaAnalysisEvent( + /* componentUuid */ null, + component.getPurlCoordinates().toString(), + component.isInternal(), + FETCH_META_LATEST_VERSION + )); + continue; + } + + final boolean shouldFetchIntegrityData = qm.runInTransaction(() -> prepareIntegrityMetaComponent(qm, component)); + if (shouldFetchIntegrityData) { + events.add(new ComponentRepositoryMetaAnalysisEvent( + component.getUuid(), + component.getPurl().toString(), + component.isInternal(), + FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION + )); + } else { + // If integrity metadata was fetched recently, we don't want to fetch it again + // as it's unlikely to change frequently. Fall back to fetching only the latest + // version information. + events.add(new ComponentRepositoryMetaAnalysisEvent( + /* componentUuid */ null, + component.getPurlCoordinates().toString(), + component.isInternal(), + FETCH_META_LATEST_VERSION + )); + } + } } + return events; } - private static boolean prepareIntegrityMetaComponent(ComponentRepositoryMetaAnalysisEvent event, QueryManager qm) { - final IntegrityMetaComponent integrityMetaComponent = qm.getIntegrityMetaComponent(event.purlCoordinates()); + private static boolean prepareIntegrityMetaComponent(final QueryManager qm, final Component component) { + final IntegrityMetaComponent integrityMetaComponent = qm.getIntegrityMetaComponent(component.getPurlCoordinates().toString()); if (integrityMetaComponent == null) { - qm.createIntegrityMetaHandlingConflict(AbstractMetaHandler.createIntegrityMetaComponent(event.purlCoordinates())); + qm.createIntegrityMetaHandlingConflict(AbstractMetaHandler.createIntegrityMetaComponent(component.getPurlCoordinates().toString())); return true; } else if (integrityMetaComponent.getStatus() == null || (integrityMetaComponent.getStatus() == FetchStatus.IN_PROGRESS @@ -1118,7 +1056,8 @@ private static boolean prepareIntegrityMetaComponent(ComponentRepositoryMetaAnal integrityMetaComponent.setLastFetch(Date.from(Instant.now())); return true; } else if (integrityMetaComponent.getStatus() == FetchStatus.PROCESSED || integrityMetaComponent.getStatus() == FetchStatus.NOT_AVAILABLE) { - EventService.getInstance().publish(new IntegrityAnalysisEvent(event.componentUuid(), integrityMetaComponent)); + qm.getPersistenceManager().makeTransient(integrityMetaComponent); + EventService.getInstance().publish(new IntegrityAnalysisEvent(component.getUuid(), integrityMetaComponent)); return false; } //don't send event because integrity metadata would be sent recently and don't want to send again diff --git a/src/main/java/org/dependencytrack/tasks/InternalComponentIdentificationTask.java b/src/main/java/org/dependencytrack/tasks/InternalComponentIdentificationTask.java index c8feceebc..650a68208 100644 --- a/src/main/java/org/dependencytrack/tasks/InternalComponentIdentificationTask.java +++ b/src/main/java/org/dependencytrack/tasks/InternalComponentIdentificationTask.java @@ -26,11 +26,10 @@ import net.javacrumbs.shedlock.core.LockingTaskExecutor; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateFormatUtils; -import org.datanucleus.PropertyNames; import org.dependencytrack.event.InternalComponentIdentificationEvent; import org.dependencytrack.model.Component; import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.util.InternalComponentIdentificationUtil; +import org.dependencytrack.util.InternalComponentIdentifier; import org.dependencytrack.util.LockProvider; import javax.jdo.PersistenceManager; @@ -57,7 +56,7 @@ public class InternalComponentIdentificationTask implements Subscriber { public void inform(final Event e) { if (e instanceof InternalComponentIdentificationEvent) { try { - LockProvider.executeWithLock(INTERNAL_COMPONENT_IDENTIFICATION_TASK_LOCK, (LockingTaskExecutor.Task) () -> analyze()); + LockProvider.executeWithLock(INTERNAL_COMPONENT_IDENTIFICATION_TASK_LOCK, (LockingTaskExecutor.Task) this::analyze); } catch (Throwable ex) { LOGGER.error("Error in acquiring lock and executing internal component identification task", ex); } @@ -71,11 +70,7 @@ private void analyze() throws Exception { try (final var qm = new QueryManager()) { final PersistenceManager pm = qm.getPersistenceManager(); - // Disable the DataNucleus L2 cache for this persistence manager. - // The cache will hold references to the queried objects, preventing them - // from being garbage collected. This is not required the case of this task. - pm.setProperty(PropertyNames.PROPERTY_CACHE_L2_TYPE, "none"); - + final var internalComponentIdentifier = new InternalComponentIdentifier(); List components = fetchNextComponentsPage(pm, null); while (!components.isEmpty()) { //Extend the lock by 5 min everytime we have a page. @@ -93,7 +88,7 @@ private void analyze() throws Exception { coordinates = component.getGroup() + ":" + coordinates; } - final boolean internal = InternalComponentIdentificationUtil.isInternalComponent(component, qm); + final boolean internal = internalComponentIdentifier.isInternal(component);; if (internal) { LOGGER.debug("Component " + coordinates + " (" + component.getUuid() + ") was identified to be internal"); } @@ -122,7 +117,7 @@ private void analyze() throws Exception { } } - final long lastId = components.get(components.size() - 1).getId(); + final long lastId = components.getLast().getId(); components = fetchNextComponentsPage(pm, lastId); } } diff --git a/src/main/java/org/dependencytrack/util/InternalComponentIdentificationUtil.java b/src/main/java/org/dependencytrack/util/InternalComponentIdentificationUtil.java deleted file mode 100644 index d91f08e67..000000000 --- a/src/main/java/org/dependencytrack/util/InternalComponentIdentificationUtil.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.util; - -import alpine.model.ConfigProperty; -import org.apache.commons.lang3.StringUtils; -import org.dependencytrack.model.Component; -import org.dependencytrack.model.ConfigPropertyConstants; -import org.dependencytrack.persistence.QueryManager; - -import java.util.regex.Pattern; - -/** - * @author nscuro - * @since 3.7.0 - */ -public final class InternalComponentIdentificationUtil { - - private InternalComponentIdentificationUtil() { - } - - public static boolean isInternalComponent(final Component component, final QueryManager qm) { - return isInternalGroup(component.getGroup(), qm) || isInternalName(component.getName(), qm); - } - - private static boolean isInternalGroup(final String group, final QueryManager qm) { - if (StringUtils.trimToNull(group) == null) { - return false; - } - - final ConfigProperty internalGroupsRegexProperty = qm.getConfigProperty( - ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX.getGroupName(), - ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX.getPropertyName() - ); - if (internalGroupsRegexProperty == null || StringUtils.trimToNull(internalGroupsRegexProperty.getPropertyValue()) == null) { - return false; - } - - return Pattern.matches(StringUtils.trimToNull(internalGroupsRegexProperty.getPropertyValue()), group); - } - - private static boolean isInternalName(final String name, final QueryManager qm) { - if (StringUtils.trimToNull(name) == null) { - return false; - } - - final ConfigProperty internalNamesRegexProperty = qm.getConfigProperty( - ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX.getGroupName(), - ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX.getPropertyName() - ); - if (internalNamesRegexProperty == null || StringUtils.trimToNull(internalNamesRegexProperty.getPropertyValue()) == null) { - return false; - } - - return Pattern.matches(StringUtils.trimToNull(internalNamesRegexProperty.getPropertyValue()), name); - } - -} diff --git a/src/main/java/org/dependencytrack/util/InternalComponentIdentifier.java b/src/main/java/org/dependencytrack/util/InternalComponentIdentifier.java new file mode 100644 index 000000000..27e1da132 --- /dev/null +++ b/src/main/java/org/dependencytrack/util/InternalComponentIdentifier.java @@ -0,0 +1,103 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.util; + +import alpine.model.ConfigProperty; +import com.google.common.base.Supplier; +import com.google.common.base.Suppliers; +import org.apache.commons.lang3.StringUtils; +import org.dependencytrack.model.Component; +import org.dependencytrack.persistence.QueryManager; + +import java.util.Optional; +import java.util.regex.Pattern; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.dependencytrack.model.ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX; +import static org.dependencytrack.model.ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX; + +/** + * Utility class to identify internal components based on the configured group and name regular expressions. + *

+ * RegEx patterns are loaded and compiled once upon first invocation of {@link #isInternal(Component)}, + * and then re-used for the lifetime of the {@link InternalComponentIdentifier} instance. + * + * @since 4.11.0 + */ +public class InternalComponentIdentifier { + + private record Patterns(Pattern groupPattern, Pattern namePattern) { + + private boolean hasPattern() { + return groupPattern != null || namePattern != null; + } + + } + + private final Supplier patternsSupplier = Suppliers.memoize(InternalComponentIdentifier::loadPatterns); + + public boolean isInternal(final Component component) { + final Patterns patterns = patternsSupplier.get(); + if (!patterns.hasPattern()) { + return false; + } + + final boolean matchesGroup; + if (isNotBlank(component.getGroup()) && patterns.groupPattern() != null) { + matchesGroup = patterns.groupPattern().matcher(component.getGroup()).matches(); + } else { + matchesGroup = false; + } + + final boolean matchesName; + if (isNotBlank(component.getName()) && patterns.namePattern() != null) { + matchesName = patterns.namePattern().matcher(component.getName()).matches(); + } else { + matchesName = false; + } + + return matchesGroup || matchesName; + } + + private static Patterns loadPatterns() { + try (final var qm = new QueryManager()) { + final ConfigProperty groupsRegexProperty = qm.getConfigProperty( + INTERNAL_COMPONENTS_GROUPS_REGEX.getGroupName(), + INTERNAL_COMPONENTS_GROUPS_REGEX.getPropertyName() + ); + final ConfigProperty namesRegexProperty = qm.getConfigProperty( + INTERNAL_COMPONENTS_NAMES_REGEX.getGroupName(), + INTERNAL_COMPONENTS_NAMES_REGEX.getPropertyName() + ); + + return new Patterns( + tryCompilePattern(groupsRegexProperty).orElse(null), + tryCompilePattern(namesRegexProperty).orElse(null) + ); + } + } + + private static Optional tryCompilePattern(final ConfigProperty property) { + return Optional.ofNullable(property) + .map(ConfigProperty::getPropertyValue) + .map(StringUtils::trimToNull) + .map(Pattern::compile); + } + +} \ No newline at end of file diff --git a/src/main/java/org/dependencytrack/util/LockProvider.java b/src/main/java/org/dependencytrack/util/LockProvider.java index d2f8258d2..9b401ec05 100644 --- a/src/main/java/org/dependencytrack/util/LockProvider.java +++ b/src/main/java/org/dependencytrack/util/LockProvider.java @@ -85,11 +85,19 @@ public static void executeWithLock(LockName lockName, LockingTaskExecutor.Task t public static void executeWithLockWaiting(final WaitingLockConfiguration lockConfiguration, final LockingTaskExecutor.Task task) throws Throwable { + executeWithLockWaiting(lockConfiguration, () -> { + task.call(); + return null; + }); + } + + public static T executeWithLockWaiting(final WaitingLockConfiguration lockConfiguration, + final LockingTaskExecutor.TaskWithResult task) throws Throwable { final JdbcLockProvider jdbcLockProvider = getJdbcLockProviderInstance(); final var waitingLockProvider = new WaitingLockProvider(jdbcLockProvider, lockConfiguration.getPollInterval(), lockConfiguration.getWaitTimeout()); final var executor = new DefaultLockingTaskExecutor(waitingLockProvider); - executor.executeWithLock(task, lockConfiguration); + return executor.executeWithLock(task, lockConfiguration).getResult(); } private static JdbcLockProvider getJdbcLockProviderInstance() { diff --git a/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java b/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java index 555fd422a..1794fffbf 100644 --- a/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java +++ b/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java @@ -109,9 +109,9 @@ public void informTest() throws Exception { assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.REPO_META_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.REPO_META_ANALYSIS_COMMAND.name()) ); qm.getPersistenceManager().refresh(project); assertThat(project.getClassifier()).isEqualTo(Classifier.APPLICATION); @@ -235,9 +235,9 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.REPO_META_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.REPO_META_ANALYSIS_COMMAND.name()) ); qm.getPersistenceManager().refresh(project); assertThat(project.getClassifier()).isEqualTo(Classifier.APPLICATION); @@ -400,7 +400,7 @@ public void informWithInvalidBomTest() throws Exception { state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(FAILED); - assertThat(state.getFailureReason()).isEqualTo("Failed to parse BOM"); + assertThat(state.getFailureReason()).isEqualTo("Unable to parse BOM from byte array"); assertThat(state.getUpdatedAt()).isBefore(Date.from(Instant.now())); }, state -> { @@ -492,6 +492,7 @@ public void informWithBloatedBomTest() throws Exception { final var project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-bloated.json")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertBomProcessedNotification(); @@ -586,6 +587,7 @@ public void informIssue2519Test() throws Exception { // Ensure processing does not fail, and the number of components ingested doesn't change. for (int i = 0; i < 3; i++) { var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue2519.xml")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertBomProcessedNotification(); kafkaMockProducer.clear(); @@ -599,8 +601,11 @@ public void informIssue2519Test() throws Exception { public void informIssue2859Test() { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - assertThatNoException() - .isThrownBy(() -> new BomUploadProcessingTask().inform(new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue2859.xml")))); + assertThatNoException().isThrownBy(() -> { + final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue2859.xml")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); + new BomUploadProcessingTask().inform(bomUploadEvent); + }); } @Test // https://github.com/DependencyTrack/dependency-track/issues/1905 @@ -609,6 +614,7 @@ public void informIssue1905Test() throws Exception { for (int i = 0; i < 3; i++) { var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue1905.json")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertBomProcessedNotification(); @@ -651,6 +657,7 @@ public void informIssue3309Test() throws Exception { }; var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue3309.json")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertBomProcessedNotification(); assertProjectAuthors.run(); @@ -658,6 +665,7 @@ public void informIssue3309Test() throws Exception { kafkaMockProducer.clear(); bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue3309.json")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertBomProcessedNotification(); assertProjectAuthors.run(); @@ -668,6 +676,7 @@ public void informWithComponentsUnderMetadataBomTest() throws Exception { final var project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-metadata-components.json")); + qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); new BomUploadProcessingTask().inform(bomUploadEvent); assertThat(kafkaMockProducer.history()) @@ -768,9 +777,9 @@ public void informWithComponentWithoutPurl() throws Exception { assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) // (No REPO_META_ANALYSIS_COMMAND event because the component doesn't have a PURL) - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) ); assertThat(qm.getAllComponents(project)) @@ -793,10 +802,10 @@ public void informWithCustomLicenseResolutionTest() throws Exception { assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) ); assertThat(qm.getAllComponents(project)).satisfiesExactly( @@ -831,8 +840,8 @@ public void informWithBomContainingLicenseExpressionTest() throws Exception { assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) ); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -859,8 +868,8 @@ public void informWithBomContainingLicenseExpressionWithSingleIdTest() throws Ex assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) ); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -883,8 +892,8 @@ public void informWithBomContainingInvalidLicenseExpressionTest() throws Excepti assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) ); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -906,8 +915,8 @@ public void informWithBomContainingServiceTest() throws Exception { assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()), - event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()) + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()), + event -> assertThat(event.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) ); assertThat(qm.getAllComponents(project)).isNotEmpty(); diff --git a/src/test/java/org/dependencytrack/tasks/IntegrityAnalysisTaskTest.java b/src/test/java/org/dependencytrack/tasks/IntegrityAnalysisTaskTest.java index 1c5429a78..d77381913 100644 --- a/src/test/java/org/dependencytrack/tasks/IntegrityAnalysisTaskTest.java +++ b/src/test/java/org/dependencytrack/tasks/IntegrityAnalysisTaskTest.java @@ -75,7 +75,7 @@ public void shouldPerformIntegrityAnalysisIfMetaDataExists() { integrityMetaComponent.setLastFetch(date); IntegrityMetaComponent integrityData = qm.persist(integrityMetaComponent); - new IntegrityAnalysisTask().inform(new IntegrityAnalysisEvent(c.getUuid(), integrityData)); + new IntegrityAnalysisTask().inform(new IntegrityAnalysisEvent(c.getUuid(), qm.detach(IntegrityMetaComponent.class, integrityData.getId()))); IntegrityAnalysis integrityResult = qm.getIntegrityAnalysisByComponentUuid(c.getUuid()); assertThat(integrityResult).isNotNull(); assertThat(integrityResult.getIntegrityCheckStatus()).isEqualTo(HASH_MATCH_PASSED); @@ -107,7 +107,7 @@ public void shouldNotPerformAnalysisIfComponentUuidIsMissing() { integrityMetaComponent.setLastFetch(date); IntegrityMetaComponent integrityData = qm.persist(integrityMetaComponent); - new IntegrityAnalysisTask().inform(new IntegrityAnalysisEvent(null, integrityData)); + new IntegrityAnalysisTask().inform(new IntegrityAnalysisEvent(null, qm.detach(IntegrityMetaComponent.class, integrityData.getId()))); IntegrityAnalysis integrityResult = qm.getIntegrityAnalysisByComponentUuid(c.getUuid()); assertThat(integrityResult).isNull(); } diff --git a/src/test/java/org/dependencytrack/util/InternalComponentIdentificationUtilTest.java b/src/test/java/org/dependencytrack/util/InternalComponentIdentifierTest.java similarity index 63% rename from src/test/java/org/dependencytrack/util/InternalComponentIdentificationUtilTest.java rename to src/test/java/org/dependencytrack/util/InternalComponentIdentifierTest.java index e022f1533..5368ddb43 100644 --- a/src/test/java/org/dependencytrack/util/InternalComponentIdentificationUtilTest.java +++ b/src/test/java/org/dependencytrack/util/InternalComponentIdentifierTest.java @@ -18,11 +18,8 @@ */ package org.dependencytrack.util; -import alpine.model.ConfigProperty; +import org.dependencytrack.PersistenceCapableTest; import org.dependencytrack.model.Component; -import org.dependencytrack.model.ConfigPropertyConstants; -import org.dependencytrack.persistence.QueryManager; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -30,29 +27,22 @@ import java.util.Arrays; import java.util.Collection; +import static org.dependencytrack.model.ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX; +import static org.dependencytrack.model.ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX; import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; @RunWith(Parameterized.class) -public class InternalComponentIdentificationUtilTest { +public class InternalComponentIdentifierTest extends PersistenceCapableTest { private final String groupsRegexProperty; private final String componentGroup; private final String namesRegexProperty; private final String componentName; private final boolean shouldBeInternal; - private QueryManager queryManagerMock; - - @Before - public void setUp() { - queryManagerMock = mock(QueryManager.class); - } @Parameterized.Parameters(name = "[{index}] groupsRegexProperty={0} componentGroup={1} " + "namesRegexProperty={2} componentName={3} shouldBeInternal={4}") - public static Collection testParameters() { + public static Collection testParameters() { return Arrays.asList(new Object[][]{ // neither regexes nor group / name provided {"", "", "", "", false}, @@ -79,9 +69,9 @@ public static Collection testParameters() { }); } - public InternalComponentIdentificationUtilTest(final String groupsRegexProperty, final String componentGroup, - final String namesRegexProperty, final String componentName, - final boolean shouldBeInternal) { + public InternalComponentIdentifierTest(final String groupsRegexProperty, final String componentGroup, + final String namesRegexProperty, final String componentName, + final boolean shouldBeInternal) { this.groupsRegexProperty = groupsRegexProperty; this.componentGroup = componentGroup; this.namesRegexProperty = namesRegexProperty; @@ -91,27 +81,26 @@ public InternalComponentIdentificationUtilTest(final String groupsRegexProperty, @Test public void testIsInternal() { - final ConfigProperty groupConfigProperty = new ConfigProperty(); - groupConfigProperty.setPropertyValue(groupsRegexProperty); - - final ConfigProperty nameConfigProperty = new ConfigProperty(); - nameConfigProperty.setPropertyValue(namesRegexProperty); - - doReturn(groupConfigProperty).when(queryManagerMock) - .getConfigProperty( - eq(ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX.getGroupName()), - eq(ConfigPropertyConstants.INTERNAL_COMPONENTS_GROUPS_REGEX.getPropertyName())); - - doReturn(nameConfigProperty).when(queryManagerMock) - .getConfigProperty( - eq(ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX.getGroupName()), - eq(ConfigPropertyConstants.INTERNAL_COMPONENTS_NAMES_REGEX.getPropertyName())); + qm.createConfigProperty( + INTERNAL_COMPONENTS_GROUPS_REGEX.getGroupName(), + INTERNAL_COMPONENTS_GROUPS_REGEX.getPropertyName(), + groupsRegexProperty, + INTERNAL_COMPONENTS_GROUPS_REGEX.getPropertyType(), + INTERNAL_COMPONENTS_GROUPS_REGEX.getDescription() + ); + qm.createConfigProperty( + INTERNAL_COMPONENTS_NAMES_REGEX.getGroupName(), + INTERNAL_COMPONENTS_NAMES_REGEX.getPropertyName(), + namesRegexProperty, + INTERNAL_COMPONENTS_NAMES_REGEX.getPropertyType(), + INTERNAL_COMPONENTS_NAMES_REGEX.getDescription() + ); final Component component = new Component(); component.setGroup(componentGroup); component.setName(componentName); - assertEquals(shouldBeInternal, InternalComponentIdentificationUtil.isInternalComponent(component, queryManagerMock)); + assertEquals(shouldBeInternal, new InternalComponentIdentifier().isInternal(component)); } -} +} \ No newline at end of file