Skip to content

Commit

Permalink
Externalize BOM ingestion pipeline
Browse files Browse the repository at this point in the history
Signed-off-by: nscuro <[email protected]>
  • Loading branch information
nscuro committed Jul 24, 2024
1 parent f78a300 commit 81206ad
Show file tree
Hide file tree
Showing 23 changed files with 811 additions and 325 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ public void contextInitialized(final ServletContextEvent event) {
}

final var topicsToCreate = new ArrayList<>(List.of(
new NewTopic(KafkaTopics.EVENT_BOM_UPLOADED.name(), 1, (short) 1),
new NewTopic(KafkaTopics.NEW_EPSS.name(), 1, (short) 1).configs(Map.of(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT)),
new NewTopic(KafkaTopics.NEW_VULNERABILITY.name(), 1, (short) 1).configs(Map.of(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT)),
new NewTopic(KafkaTopics.NOTIFICATION_ANALYZER.name(), 1, (short) 1),
Expand Down
9 changes: 1 addition & 8 deletions src/main/java/org/dependencytrack/event/BomUploadEvent.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
import alpine.event.framework.AbstractChainableEvent;
import org.dependencytrack.model.Project;

import java.io.File;

/**
* Defines an event triggered when a bill-of-material (bom) document is submitted.
*
Expand All @@ -32,18 +30,13 @@
public class BomUploadEvent extends AbstractChainableEvent {

private final Project project;
private final File file;

public BomUploadEvent(final Project project, final File file) {
public BomUploadEvent(final Project project) {
this.project = project;
this.file = file;
}

public Project getProject() {
return project;
}

public File getFile() {
return file;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import alpine.event.LdapSyncEvent;
import alpine.event.framework.EventService;
import org.dependencytrack.common.ConfigKey;
import org.dependencytrack.tasks.BomUploadProcessingTask;
import org.dependencytrack.tasks.CallbackTask;
import org.dependencytrack.tasks.CloneProjectTask;
import org.dependencytrack.tasks.DefectDojoUploadTask;
Expand All @@ -42,12 +41,12 @@
import org.dependencytrack.tasks.TaskScheduler;
import org.dependencytrack.tasks.VexUploadProcessingTask;
import org.dependencytrack.tasks.VulnerabilityAnalysisTask;
import org.dependencytrack.tasks.vulnerabilitypolicy.VulnerabilityPolicyFetchTask;
import org.dependencytrack.tasks.VulnerabilityScanCleanupTask;
import org.dependencytrack.tasks.WorkflowStateCleanupTask;
import org.dependencytrack.tasks.metrics.PortfolioMetricsUpdateTask;
import org.dependencytrack.tasks.metrics.ProjectMetricsUpdateTask;
import org.dependencytrack.tasks.metrics.VulnerabilityMetricsUpdateTask;
import org.dependencytrack.tasks.vulnerabilitypolicy.VulnerabilityPolicyFetchTask;

import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
Expand Down Expand Up @@ -75,7 +74,6 @@ public class EventSubsystemInitializer implements ServletContextListener {
public void contextInitialized(final ServletContextEvent event) {
LOGGER.info("Initializing asynchronous event subsystem");

EVENT_SERVICE.subscribe(BomUploadEvent.class, BomUploadProcessingTask.class);
EVENT_SERVICE.subscribe(VexUploadEvent.class, VexUploadProcessingTask.class);
EVENT_SERVICE.subscribe(LdapSyncEvent.class, LdapSyncTaskWrapper.class);
EVENT_SERVICE.subscribe(GitHubAdvisoryMirrorEvent.class, GitHubAdvisoryMirrorTask.class);
Expand Down Expand Up @@ -114,7 +112,6 @@ public void contextDestroyed(final ServletContextEvent event) {
LOGGER.info("Shutting down asynchronous event subsystem");
TaskScheduler.getInstance().shutdown();

EVENT_SERVICE.unsubscribe(BomUploadProcessingTask.class);
EVENT_SERVICE.unsubscribe(VexUploadProcessingTask.class);
EVENT_SERVICE.unsubscribe(LdapSyncTaskWrapper.class);
EVENT_SERVICE.unsubscribe(GitHubAdvisoryMirrorTask.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import alpine.event.framework.Event;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import org.dependencytrack.event.BomUploadEvent;
import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.ComponentVulnerabilityAnalysisEvent;
import org.dependencytrack.event.EpssMirrorEvent;
Expand All @@ -30,6 +31,7 @@
import org.dependencytrack.event.kafka.KafkaTopics.Topic;
import org.dependencytrack.model.Vulnerability;
import org.dependencytrack.parser.dependencytrack.NotificationModelConverter;
import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent;
import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject;
import org.dependencytrack.proto.notification.v1.BomProcessingFailedSubject;
import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject;
Expand Down Expand Up @@ -67,6 +69,7 @@ private KafkaEventConverter() {

static KafkaEvent<?, ?> convert(final Event event) {
return switch (event) {
case BomUploadEvent e -> convert(e);
case ComponentRepositoryMetaAnalysisEvent e -> convert(e);
case ComponentVulnerabilityAnalysisEvent e -> convert(e);
case GitHubAdvisoryMirrorEvent e -> convert(e);
Expand Down Expand Up @@ -104,6 +107,22 @@ private KafkaEventConverter() {
return kafkaEvents;
}

static KafkaEvent<UUID, BomUploadedEvent> convert(final BomUploadEvent event) {
final BomUploadedEvent.Project.Builder projectBuilder = BomUploadedEvent.Project.newBuilder()
.setUuid(event.getProject().getUuid().toString())
.setName(event.getProject().getName());
Optional.ofNullable(event.getProject().getVersion()).ifPresent(projectBuilder::setVersion);

return new KafkaEvent<>(
KafkaTopics.EVENT_BOM_UPLOADED,
event.getProject().getUuid(),
BomUploadedEvent.newBuilder()
.setToken(event.getChainIdentifier().toString())
.setProject(projectBuilder)
.build()
);
}

static KafkaEvent<ScanKey, ScanCommand> convert(final ComponentVulnerabilityAnalysisEvent event) {
final var componentBuilder = Component.newBuilder()
.setUuid(event.uuid().toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.cyclonedx.proto.v1_6.Bom;
import org.dependencytrack.common.ConfigKey;
import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerde;
import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent;
import org.dependencytrack.proto.mirror.v1.EpssItem;
import org.dependencytrack.proto.notification.v1.Notification;
import org.dependencytrack.proto.repometaanalysis.v1.AnalysisCommand;
Expand All @@ -32,8 +33,11 @@
import org.dependencytrack.proto.vulnanalysis.v1.ScanKey;
import org.dependencytrack.proto.vulnanalysis.v1.ScanResult;

import java.util.UUID;

public final class KafkaTopics {

public static final Topic<UUID, BomUploadedEvent> EVENT_BOM_UPLOADED;
public static final Topic<String, Notification> NOTIFICATION_ANALYZER;
public static final Topic<String, Notification> NOTIFICATION_BOM;
public static final Topic<String, Notification> NOTIFICATION_CONFIGURATION;
Expand Down Expand Up @@ -61,6 +65,7 @@ public final class KafkaTopics {
private static final Serde<Notification> NOTIFICATION_SERDE = new KafkaProtobufSerde<>(Notification.parser());

static {
EVENT_BOM_UPLOADED = new Topic<>("dtrack.event.bom-uploaded", Serdes.UUID(), new KafkaProtobufSerde<>(BomUploadedEvent.parser()));
NOTIFICATION_ANALYZER = new Topic<>("dtrack.notification.analyzer", Serdes.String(), NOTIFICATION_SERDE);
NOTIFICATION_BOM = new Topic<>("dtrack.notification.bom", Serdes.String(), NOTIFICATION_SERDE);
NOTIFICATION_CONFIGURATION = new Topic<>("dtrack.notification.configuration", Serdes.String(), NOTIFICATION_SERDE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,32 +16,34 @@
* SPDX-License-Identifier: Apache-2.0
* Copyright (c) OWASP Foundation. All Rights Reserved.
*/
package org.dependencytrack.tasks;
package org.dependencytrack.event.kafka.processor;

import alpine.Config;
import alpine.common.logging.Logger;
import alpine.event.framework.ChainableEvent;
import alpine.event.framework.Event;
import alpine.event.framework.EventService;
import alpine.event.framework.Subscriber;
import alpine.model.ConfigProperty;
import alpine.notification.Notification;
import alpine.notification.NotificationLevel;
import org.apache.commons.collections4.MultiValuedMap;
import org.apache.commons.collections4.multimap.HashSetValuedHashMap;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.cyclonedx.exception.ParseException;
import org.cyclonedx.parsers.BomParserFactory;
import org.cyclonedx.parsers.Parser;
import org.datanucleus.flush.FlushMode;
import org.datanucleus.store.query.QueryNotUniqueException;
import org.dependencytrack.common.ConfigKey;
import org.dependencytrack.event.BomUploadEvent;
import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent;
import org.dependencytrack.event.ComponentVulnerabilityAnalysisEvent;
import org.dependencytrack.event.IntegrityAnalysisEvent;
import org.dependencytrack.event.ProjectMetricsUpdateEvent;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.event.kafka.componentmeta.AbstractMetaHandler;
import org.dependencytrack.event.kafka.processor.api.Processor;
import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
import org.dependencytrack.model.Bom;
import org.dependencytrack.model.Component;
import org.dependencytrack.model.ComponentIdentity;
Expand All @@ -52,7 +54,7 @@
import org.dependencytrack.model.ProjectMetadata;
import org.dependencytrack.model.ServiceComponent;
import org.dependencytrack.model.VulnerabilityAnalysisLevel;
import org.dependencytrack.model.VulnerabilityScan.TargetType;
import org.dependencytrack.model.VulnerabilityScan;
import org.dependencytrack.model.WorkflowState;
import org.dependencytrack.model.WorkflowStatus;
import org.dependencytrack.model.WorkflowStep;
Expand All @@ -62,18 +64,16 @@
import org.dependencytrack.notification.vo.BomConsumedOrProcessed;
import org.dependencytrack.notification.vo.BomProcessingFailed;
import org.dependencytrack.persistence.QueryManager;
import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent;
import org.dependencytrack.storage.BomUploadStorageProvider;
import org.dependencytrack.util.InternalComponentIdentifier;
import org.dependencytrack.util.WaitingLockConfiguration;
import org.json.JSONArray;
import org.slf4j.MDC;

import javax.jdo.JDOUserException;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
Expand Down Expand Up @@ -107,6 +107,7 @@
import static org.dependencytrack.common.MdcKeys.MDC_PROJECT_VERSION;
import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK;
import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.TIME_SPAN;
import static org.dependencytrack.model.ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER;
import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertComponents;
import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertDependencyGraph;
import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertServices;
Expand All @@ -115,18 +116,17 @@
import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.flatten;
import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION;
import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_LATEST_VERSION;
import static org.dependencytrack.util.LockProvider.executeWithLockWaiting;
import static org.dependencytrack.util.PersistenceUtil.applyIfChanged;
import static org.dependencytrack.util.PersistenceUtil.assertPersistent;

/**
* Subscriber task that performs processing of bill-of-material (bom)
* when it is uploaded.
*
* @author Steve Springett
* @since 3.0.0
* @since 5.6.0
*/
public class BomUploadProcessingTask implements Subscriber {
public class BomUploadProcessor implements Processor<UUID, BomUploadedEvent> {

private static final Logger LOGGER = Logger.getLogger(BomUploadProcessor.class);

static final String PROCESSOR_NAME = "bom.upload";

private static final class Context {

Expand All @@ -139,52 +139,71 @@ private static final class Context {
private Date bomTimestamp;
private Integer bomVersion;

private Context(final UUID token, final Project project) {
private Context(final UUID token, final BomUploadedEvent.Project project) {
this.token = token;
this.project = project;
this.project = new Project();
this.project.setUuid(UUID.fromString(project.getUuid()));
this.project.setName(project.getName());
this.project.setVersion(project.getVersion());
this.bomFormat = Bom.Format.CYCLONEDX;
this.startTimeNs = System.nanoTime();
}

}

private static final Logger LOGGER = Logger.getLogger(BomUploadProcessingTask.class);

private final KafkaEventDispatcher kafkaEventDispatcher;
private final boolean delayBomProcessedNotification;

public BomUploadProcessingTask() {
public BomUploadProcessor() {
this(new KafkaEventDispatcher(), Config.getInstance().getPropertyAsBoolean(ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION));
}

BomUploadProcessingTask(final KafkaEventDispatcher kafkaEventDispatcher, final boolean delayBomProcessedNotification) {
BomUploadProcessor(final KafkaEventDispatcher kafkaEventDispatcher, final boolean delayBomProcessedNotification) {
this.kafkaEventDispatcher = kafkaEventDispatcher;
this.delayBomProcessedNotification = delayBomProcessedNotification;
}

/**
* {@inheritDoc}
*/
public void inform(final Event e) {
if (!(e instanceof final BomUploadEvent event)) {
return;
@Override
public void process(final ConsumerRecord<UUID, BomUploadedEvent> record) throws ProcessingException {
final BomUploadedEvent event = record.value();

final BomUploadStorageProvider storageProvider;
try (final var qm = new QueryManager()) {
final ConfigProperty storageProviderProperty = qm.getConfigProperty(
BOM_UPLOAD_STORAGE_PROVIDER.getGroupName(),
BOM_UPLOAD_STORAGE_PROVIDER.getPropertyName()
);
final String storageProviderClassName = storageProviderProperty != null
? storageProviderProperty.getPropertyValue()
: BOM_UPLOAD_STORAGE_PROVIDER.getDefaultPropertyValue();
storageProvider = BomUploadStorageProvider.getForClassName(storageProviderClassName);
}

final var ctx = new Context(event.getChainIdentifier(), event.getProject());
final var ctx = new Context(UUID.fromString(event.getToken()), event.getProject());
try (var ignoredMdcProjectUuid = MDC.putCloseable(MDC_PROJECT_UUID, ctx.project.getUuid().toString());
var ignoredMdcProjectName = MDC.putCloseable(MDC_PROJECT_NAME, ctx.project.getName());
var ignoredMdcProjectVersion = MDC.putCloseable(MDC_PROJECT_VERSION, ctx.project.getVersion());
var ignoredMdcBomUploadToken = MDC.putCloseable(MDC_BOM_UPLOAD_TOKEN, ctx.token.toString())) {
processEvent(ctx, event);
processEvent(ctx, storageProvider);
} finally {
try {
storageProvider.deleteBomByToken(ctx.token);
} catch (IOException | RuntimeException e) {
LOGGER.warn("Failed to delete BOM from storage", e);
}
}
}

private void processEvent(final Context ctx, final BomUploadEvent event) {
private void processEvent(final Context ctx, final BomUploadStorageProvider storageProvider) {
startBomConsumptionWorkflowStep(ctx);

final ConsumedBom consumedBom;
try (final var bomFileInputStream = Files.newInputStream(event.getFile().toPath(), StandardOpenOption.DELETE_ON_CLOSE)) {
final byte[] cdxBomBytes = bomFileInputStream.readAllBytes();
try {
final byte[] cdxBomBytes = storageProvider.getBomByToken(ctx.token);
if (cdxBomBytes == null) {
LOGGER.warn("No BOM found in storage");
return;
}

final Parser parser = BomParserFactory.createParser(cdxBomBytes);
final org.cyclonedx.model.Bom cdxBom = parser.parse(cdxBomBytes);

Expand All @@ -199,6 +218,7 @@ private void processEvent(final Context ctx, final BomUploadEvent event) {

consumedBom = consumeBom(cdxBom);
} catch (IOException | ParseException | RuntimeException e) {
LOGGER.error("Failed to consume BOM", e);
failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_CONSUMPTION, e);
dispatchBomProcessingFailedNotification(ctx, e);
return;
Expand All @@ -212,11 +232,9 @@ private void processEvent(final Context ctx, final BomUploadEvent event) {
var ignoredMdcBomSpecVersion = MDC.putCloseable(MDC_BOM_SPEC_VERSION, ctx.bomSpecVersion);
var ignoredMdcBomSerialNumber = MDC.putCloseable(MDC_BOM_SERIAL_NUMBER, ctx.bomSerialNumber);
var ignoredMdcBomVersion = MDC.putCloseable(MDC_BOM_VERSION, String.valueOf(ctx.bomVersion))) {
// Prevent BOMs for the same project to be processed concurrently.
// Note that this is an edge case, we're not expecting any lock waits under normal circumstances.
final WaitingLockConfiguration lockConfiguration = createLockConfiguration(ctx);
processedBom = executeWithLockWaiting(lockConfiguration, () -> processBom(ctx, consumedBom));
processedBom = processBom(ctx, consumedBom);
} catch (Throwable e) {
LOGGER.error("Failed to process BOM", e);
failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_PROCESSING, e);
dispatchBomProcessingFailedNotification(ctx, e);
return;
Expand Down Expand Up @@ -949,7 +967,7 @@ private List<CompletableFuture<?>> initiateVulnerabilityAnalysis(
// Requires a bit of refactoring in QueryManager#createVulnerabilityScan.

qm.createVulnerabilityScan(
TargetType.PROJECT,
VulnerabilityScan.TargetType.PROJECT,
ctx.project.getUuid(),
ctx.token.toString(),
events.size()
Expand Down Expand Up @@ -1101,15 +1119,4 @@ private static boolean prepareIntegrityMetaComponent(final QueryManager qm, fina
return false;
}

private static WaitingLockConfiguration createLockConfiguration(final Context ctx) {
return new WaitingLockConfiguration(
/* createdAt */ Instant.now(),
/* name */ "%s-%s".formatted(BomUploadProcessingTask.class.getSimpleName(), ctx.project.getUuid()),
/* lockAtMostFor */ Duration.ofMinutes(5),
/* lockAtLeastFor */ Duration.ZERO,
/* pollInterval */ Duration.ofMillis(100),
/* waitTimeout */ Duration.ofMinutes(5)
);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ public class ProcessorInitializer implements ServletContextListener {
public void contextInitialized(final ServletContextEvent event) {
LOGGER.info("Initializing processors");

PROCESSOR_MANAGER.registerProcessor(BomUploadProcessor.PROCESSOR_NAME,
KafkaTopics.EVENT_BOM_UPLOADED, new BomUploadProcessor());
PROCESSOR_MANAGER.registerProcessor(VulnerabilityMirrorProcessor.PROCESSOR_NAME,
KafkaTopics.NEW_VULNERABILITY, new VulnerabilityMirrorProcessor());
PROCESSOR_MANAGER.registerProcessor(RepositoryMetaResultProcessor.PROCESSOR_NAME,
Expand Down
Loading

0 comments on commit 81206ad

Please sign in to comment.