From 81206ad51686a98b27ca64da87c999ddeae8ac82 Mon Sep 17 00:00:00 2001 From: nscuro Date: Mon, 22 Jul 2024 22:56:36 +0200 Subject: [PATCH] Externalize BOM ingestion pipeline Signed-off-by: nscuro --- .../dev/DevServicesInitializer.java | 1 + .../dependencytrack/event/BomUploadEvent.java | 9 +- .../event/EventSubsystemInitializer.java | 5 +- .../event/kafka/KafkaEventConverter.java | 19 + .../event/kafka/KafkaTopics.java | 5 + .../kafka/processor/BomUploadProcessor.java} | 105 +++--- .../kafka/processor/ProcessorInitializer.java | 2 + .../model/ConfigPropertyConstants.java | 6 + .../persistence/jdbi/BomDao.java | 62 +++ .../v1/AbstractConfigPropertyResource.java | 20 + .../resources/v1/BomResource.java | 62 +-- .../storage/BomUploadStorageProvider.java | 75 ++++ .../DatabaseBomUploadStorageProvider.java | 55 +++ .../LocalBomUploadStorageProvider.java | 120 ++++++ .../event/v1alpha1/event.proto | 19 + ...encytrack.storage.BomUploadStorageProvider | 2 + src/main/resources/application.properties | 42 +++ .../resources/migration/changelog-main.xml | 1 + .../resources/migration/changelog-v5.6.0.xml | 25 ++ .../event/BomUploadEventTest.java | 7 +- .../processor/BomUploadProcessorTest.java} | 355 +++++++++--------- .../policy/cel/CelPolicyEngineTest.java | 45 --- .../v1/ConfigPropertyResourceTest.java | 94 ++++- 23 files changed, 811 insertions(+), 325 deletions(-) rename src/main/java/org/dependencytrack/{tasks/BomUploadProcessingTask.java => event/kafka/processor/BomUploadProcessor.java} (95%) create mode 100644 src/main/java/org/dependencytrack/persistence/jdbi/BomDao.java create mode 100644 src/main/java/org/dependencytrack/storage/BomUploadStorageProvider.java create mode 100644 src/main/java/org/dependencytrack/storage/DatabaseBomUploadStorageProvider.java create mode 100644 src/main/java/org/dependencytrack/storage/LocalBomUploadStorageProvider.java create mode 100644 src/main/proto/org/dependencytrack/event/v1alpha1/event.proto create mode 100644 src/main/resources/META-INF/services/org.dependencytrack.storage.BomUploadStorageProvider create mode 100644 src/main/resources/migration/changelog-v5.6.0.xml rename src/test/java/org/dependencytrack/{tasks/BomUploadProcessingTaskTest.java => event/kafka/processor/BomUploadProcessorTest.java} (82%) diff --git a/src/main/java/org/dependencytrack/dev/DevServicesInitializer.java b/src/main/java/org/dependencytrack/dev/DevServicesInitializer.java index 486993664..95d9e27f0 100644 --- a/src/main/java/org/dependencytrack/dev/DevServicesInitializer.java +++ b/src/main/java/org/dependencytrack/dev/DevServicesInitializer.java @@ -150,6 +150,7 @@ public void contextInitialized(final ServletContextEvent event) { } final var topicsToCreate = new ArrayList<>(List.of( + new NewTopic(KafkaTopics.EVENT_BOM_UPLOADED.name(), 1, (short) 1), new NewTopic(KafkaTopics.NEW_EPSS.name(), 1, (short) 1).configs(Map.of(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT)), new NewTopic(KafkaTopics.NEW_VULNERABILITY.name(), 1, (short) 1).configs(Map.of(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT)), new NewTopic(KafkaTopics.NOTIFICATION_ANALYZER.name(), 1, (short) 1), diff --git a/src/main/java/org/dependencytrack/event/BomUploadEvent.java b/src/main/java/org/dependencytrack/event/BomUploadEvent.java index fdf7f7e8b..a772c1b89 100644 --- a/src/main/java/org/dependencytrack/event/BomUploadEvent.java +++ b/src/main/java/org/dependencytrack/event/BomUploadEvent.java @@ -21,8 +21,6 @@ import alpine.event.framework.AbstractChainableEvent; import org.dependencytrack.model.Project; -import java.io.File; - /** * Defines an event triggered when a bill-of-material (bom) document is submitted. * @@ -32,18 +30,13 @@ public class BomUploadEvent extends AbstractChainableEvent { private final Project project; - private final File file; - public BomUploadEvent(final Project project, final File file) { + public BomUploadEvent(final Project project) { this.project = project; - this.file = file; } public Project getProject() { return project; } - public File getFile() { - return file; - } } diff --git a/src/main/java/org/dependencytrack/event/EventSubsystemInitializer.java b/src/main/java/org/dependencytrack/event/EventSubsystemInitializer.java index ee170d5ce..0b6f4b6af 100644 --- a/src/main/java/org/dependencytrack/event/EventSubsystemInitializer.java +++ b/src/main/java/org/dependencytrack/event/EventSubsystemInitializer.java @@ -23,7 +23,6 @@ import alpine.event.LdapSyncEvent; import alpine.event.framework.EventService; import org.dependencytrack.common.ConfigKey; -import org.dependencytrack.tasks.BomUploadProcessingTask; import org.dependencytrack.tasks.CallbackTask; import org.dependencytrack.tasks.CloneProjectTask; import org.dependencytrack.tasks.DefectDojoUploadTask; @@ -42,12 +41,12 @@ import org.dependencytrack.tasks.TaskScheduler; import org.dependencytrack.tasks.VexUploadProcessingTask; import org.dependencytrack.tasks.VulnerabilityAnalysisTask; -import org.dependencytrack.tasks.vulnerabilitypolicy.VulnerabilityPolicyFetchTask; import org.dependencytrack.tasks.VulnerabilityScanCleanupTask; import org.dependencytrack.tasks.WorkflowStateCleanupTask; import org.dependencytrack.tasks.metrics.PortfolioMetricsUpdateTask; import org.dependencytrack.tasks.metrics.ProjectMetricsUpdateTask; import org.dependencytrack.tasks.metrics.VulnerabilityMetricsUpdateTask; +import org.dependencytrack.tasks.vulnerabilitypolicy.VulnerabilityPolicyFetchTask; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; @@ -75,7 +74,6 @@ public class EventSubsystemInitializer implements ServletContextListener { public void contextInitialized(final ServletContextEvent event) { LOGGER.info("Initializing asynchronous event subsystem"); - EVENT_SERVICE.subscribe(BomUploadEvent.class, BomUploadProcessingTask.class); EVENT_SERVICE.subscribe(VexUploadEvent.class, VexUploadProcessingTask.class); EVENT_SERVICE.subscribe(LdapSyncEvent.class, LdapSyncTaskWrapper.class); EVENT_SERVICE.subscribe(GitHubAdvisoryMirrorEvent.class, GitHubAdvisoryMirrorTask.class); @@ -114,7 +112,6 @@ public void contextDestroyed(final ServletContextEvent event) { LOGGER.info("Shutting down asynchronous event subsystem"); TaskScheduler.getInstance().shutdown(); - EVENT_SERVICE.unsubscribe(BomUploadProcessingTask.class); EVENT_SERVICE.unsubscribe(VexUploadProcessingTask.class); EVENT_SERVICE.unsubscribe(LdapSyncTaskWrapper.class); EVENT_SERVICE.unsubscribe(GitHubAdvisoryMirrorTask.class); diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java b/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java index e2cd33e17..3e4dab7c8 100644 --- a/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java +++ b/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java @@ -21,6 +21,7 @@ import alpine.event.framework.Event; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; +import org.dependencytrack.event.BomUploadEvent; import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent; import org.dependencytrack.event.ComponentVulnerabilityAnalysisEvent; import org.dependencytrack.event.EpssMirrorEvent; @@ -30,6 +31,7 @@ import org.dependencytrack.event.kafka.KafkaTopics.Topic; import org.dependencytrack.model.Vulnerability; import org.dependencytrack.parser.dependencytrack.NotificationModelConverter; +import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent; import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.BomProcessingFailedSubject; import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; @@ -67,6 +69,7 @@ private KafkaEventConverter() { static KafkaEvent convert(final Event event) { return switch (event) { + case BomUploadEvent e -> convert(e); case ComponentRepositoryMetaAnalysisEvent e -> convert(e); case ComponentVulnerabilityAnalysisEvent e -> convert(e); case GitHubAdvisoryMirrorEvent e -> convert(e); @@ -104,6 +107,22 @@ private KafkaEventConverter() { return kafkaEvents; } + static KafkaEvent convert(final BomUploadEvent event) { + final BomUploadedEvent.Project.Builder projectBuilder = BomUploadedEvent.Project.newBuilder() + .setUuid(event.getProject().getUuid().toString()) + .setName(event.getProject().getName()); + Optional.ofNullable(event.getProject().getVersion()).ifPresent(projectBuilder::setVersion); + + return new KafkaEvent<>( + KafkaTopics.EVENT_BOM_UPLOADED, + event.getProject().getUuid(), + BomUploadedEvent.newBuilder() + .setToken(event.getChainIdentifier().toString()) + .setProject(projectBuilder) + .build() + ); + } + static KafkaEvent convert(final ComponentVulnerabilityAnalysisEvent event) { final var componentBuilder = Component.newBuilder() .setUuid(event.uuid().toString()); diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java b/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java index f0356e458..ebc1188fc 100644 --- a/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java +++ b/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java @@ -24,6 +24,7 @@ import org.cyclonedx.proto.v1_6.Bom; import org.dependencytrack.common.ConfigKey; import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerde; +import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent; import org.dependencytrack.proto.mirror.v1.EpssItem; import org.dependencytrack.proto.notification.v1.Notification; import org.dependencytrack.proto.repometaanalysis.v1.AnalysisCommand; @@ -32,8 +33,11 @@ import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import java.util.UUID; + public final class KafkaTopics { + public static final Topic EVENT_BOM_UPLOADED; public static final Topic NOTIFICATION_ANALYZER; public static final Topic NOTIFICATION_BOM; public static final Topic NOTIFICATION_CONFIGURATION; @@ -61,6 +65,7 @@ public final class KafkaTopics { private static final Serde NOTIFICATION_SERDE = new KafkaProtobufSerde<>(Notification.parser()); static { + EVENT_BOM_UPLOADED = new Topic<>("dtrack.event.bom-uploaded", Serdes.UUID(), new KafkaProtobufSerde<>(BomUploadedEvent.parser())); NOTIFICATION_ANALYZER = new Topic<>("dtrack.notification.analyzer", Serdes.String(), NOTIFICATION_SERDE); NOTIFICATION_BOM = new Topic<>("dtrack.notification.bom", Serdes.String(), NOTIFICATION_SERDE); NOTIFICATION_CONFIGURATION = new Topic<>("dtrack.notification.configuration", Serdes.String(), NOTIFICATION_SERDE); diff --git a/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java b/src/main/java/org/dependencytrack/event/kafka/processor/BomUploadProcessor.java similarity index 95% rename from src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java rename to src/main/java/org/dependencytrack/event/kafka/processor/BomUploadProcessor.java index 7dbc60a1e..4e5734d46 100644 --- a/src/main/java/org/dependencytrack/tasks/BomUploadProcessingTask.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/BomUploadProcessor.java @@ -16,32 +16,34 @@ * SPDX-License-Identifier: Apache-2.0 * Copyright (c) OWASP Foundation. All Rights Reserved. */ -package org.dependencytrack.tasks; +package org.dependencytrack.event.kafka.processor; import alpine.Config; import alpine.common.logging.Logger; import alpine.event.framework.ChainableEvent; import alpine.event.framework.Event; import alpine.event.framework.EventService; -import alpine.event.framework.Subscriber; +import alpine.model.ConfigProperty; import alpine.notification.Notification; import alpine.notification.NotificationLevel; import org.apache.commons.collections4.MultiValuedMap; import org.apache.commons.collections4.multimap.HashSetValuedHashMap; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.kafka.clients.consumer.ConsumerRecord; import org.cyclonedx.exception.ParseException; import org.cyclonedx.parsers.BomParserFactory; import org.cyclonedx.parsers.Parser; import org.datanucleus.flush.FlushMode; import org.datanucleus.store.query.QueryNotUniqueException; import org.dependencytrack.common.ConfigKey; -import org.dependencytrack.event.BomUploadEvent; import org.dependencytrack.event.ComponentRepositoryMetaAnalysisEvent; import org.dependencytrack.event.ComponentVulnerabilityAnalysisEvent; import org.dependencytrack.event.IntegrityAnalysisEvent; import org.dependencytrack.event.ProjectMetricsUpdateEvent; import org.dependencytrack.event.kafka.KafkaEventDispatcher; import org.dependencytrack.event.kafka.componentmeta.AbstractMetaHandler; +import org.dependencytrack.event.kafka.processor.api.Processor; +import org.dependencytrack.event.kafka.processor.exception.ProcessingException; import org.dependencytrack.model.Bom; import org.dependencytrack.model.Component; import org.dependencytrack.model.ComponentIdentity; @@ -52,7 +54,7 @@ import org.dependencytrack.model.ProjectMetadata; import org.dependencytrack.model.ServiceComponent; import org.dependencytrack.model.VulnerabilityAnalysisLevel; -import org.dependencytrack.model.VulnerabilityScan.TargetType; +import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.model.WorkflowState; import org.dependencytrack.model.WorkflowStatus; import org.dependencytrack.model.WorkflowStep; @@ -62,8 +64,9 @@ import org.dependencytrack.notification.vo.BomConsumedOrProcessed; import org.dependencytrack.notification.vo.BomProcessingFailed; import org.dependencytrack.persistence.QueryManager; +import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent; +import org.dependencytrack.storage.BomUploadStorageProvider; import org.dependencytrack.util.InternalComponentIdentifier; -import org.dependencytrack.util.WaitingLockConfiguration; import org.json.JSONArray; import org.slf4j.MDC; @@ -71,9 +74,6 @@ import javax.jdo.PersistenceManager; import javax.jdo.Query; import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.StandardOpenOption; -import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; @@ -107,6 +107,7 @@ import static org.dependencytrack.common.MdcKeys.MDC_PROJECT_VERSION; import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.SUPPORTED_PACKAGE_URLS_FOR_INTEGRITY_CHECK; import static org.dependencytrack.event.kafka.componentmeta.RepoMetaConstants.TIME_SPAN; +import static org.dependencytrack.model.ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertComponents; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertDependencyGraph; import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.convertServices; @@ -115,18 +116,17 @@ import static org.dependencytrack.parser.cyclonedx.util.ModelConverter.flatten; import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_INTEGRITY_DATA_AND_LATEST_VERSION; import static org.dependencytrack.proto.repometaanalysis.v1.FetchMeta.FETCH_META_LATEST_VERSION; -import static org.dependencytrack.util.LockProvider.executeWithLockWaiting; import static org.dependencytrack.util.PersistenceUtil.applyIfChanged; import static org.dependencytrack.util.PersistenceUtil.assertPersistent; /** - * Subscriber task that performs processing of bill-of-material (bom) - * when it is uploaded. - * - * @author Steve Springett - * @since 3.0.0 + * @since 5.6.0 */ -public class BomUploadProcessingTask implements Subscriber { +public class BomUploadProcessor implements Processor { + + private static final Logger LOGGER = Logger.getLogger(BomUploadProcessor.class); + + static final String PROCESSOR_NAME = "bom.upload"; private static final class Context { @@ -139,52 +139,71 @@ private static final class Context { private Date bomTimestamp; private Integer bomVersion; - private Context(final UUID token, final Project project) { + private Context(final UUID token, final BomUploadedEvent.Project project) { this.token = token; - this.project = project; + this.project = new Project(); + this.project.setUuid(UUID.fromString(project.getUuid())); + this.project.setName(project.getName()); + this.project.setVersion(project.getVersion()); this.bomFormat = Bom.Format.CYCLONEDX; this.startTimeNs = System.nanoTime(); } } - private static final Logger LOGGER = Logger.getLogger(BomUploadProcessingTask.class); - private final KafkaEventDispatcher kafkaEventDispatcher; private final boolean delayBomProcessedNotification; - public BomUploadProcessingTask() { + public BomUploadProcessor() { this(new KafkaEventDispatcher(), Config.getInstance().getPropertyAsBoolean(ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION)); } - - BomUploadProcessingTask(final KafkaEventDispatcher kafkaEventDispatcher, final boolean delayBomProcessedNotification) { + BomUploadProcessor(final KafkaEventDispatcher kafkaEventDispatcher, final boolean delayBomProcessedNotification) { this.kafkaEventDispatcher = kafkaEventDispatcher; this.delayBomProcessedNotification = delayBomProcessedNotification; } - /** - * {@inheritDoc} - */ - public void inform(final Event e) { - if (!(e instanceof final BomUploadEvent event)) { - return; + @Override + public void process(final ConsumerRecord record) throws ProcessingException { + final BomUploadedEvent event = record.value(); + + final BomUploadStorageProvider storageProvider; + try (final var qm = new QueryManager()) { + final ConfigProperty storageProviderProperty = qm.getConfigProperty( + BOM_UPLOAD_STORAGE_PROVIDER.getGroupName(), + BOM_UPLOAD_STORAGE_PROVIDER.getPropertyName() + ); + final String storageProviderClassName = storageProviderProperty != null + ? storageProviderProperty.getPropertyValue() + : BOM_UPLOAD_STORAGE_PROVIDER.getDefaultPropertyValue(); + storageProvider = BomUploadStorageProvider.getForClassName(storageProviderClassName); } - final var ctx = new Context(event.getChainIdentifier(), event.getProject()); + final var ctx = new Context(UUID.fromString(event.getToken()), event.getProject()); try (var ignoredMdcProjectUuid = MDC.putCloseable(MDC_PROJECT_UUID, ctx.project.getUuid().toString()); var ignoredMdcProjectName = MDC.putCloseable(MDC_PROJECT_NAME, ctx.project.getName()); var ignoredMdcProjectVersion = MDC.putCloseable(MDC_PROJECT_VERSION, ctx.project.getVersion()); var ignoredMdcBomUploadToken = MDC.putCloseable(MDC_BOM_UPLOAD_TOKEN, ctx.token.toString())) { - processEvent(ctx, event); + processEvent(ctx, storageProvider); + } finally { + try { + storageProvider.deleteBomByToken(ctx.token); + } catch (IOException | RuntimeException e) { + LOGGER.warn("Failed to delete BOM from storage", e); + } } } - private void processEvent(final Context ctx, final BomUploadEvent event) { + private void processEvent(final Context ctx, final BomUploadStorageProvider storageProvider) { startBomConsumptionWorkflowStep(ctx); final ConsumedBom consumedBom; - try (final var bomFileInputStream = Files.newInputStream(event.getFile().toPath(), StandardOpenOption.DELETE_ON_CLOSE)) { - final byte[] cdxBomBytes = bomFileInputStream.readAllBytes(); + try { + final byte[] cdxBomBytes = storageProvider.getBomByToken(ctx.token); + if (cdxBomBytes == null) { + LOGGER.warn("No BOM found in storage"); + return; + } + final Parser parser = BomParserFactory.createParser(cdxBomBytes); final org.cyclonedx.model.Bom cdxBom = parser.parse(cdxBomBytes); @@ -199,6 +218,7 @@ private void processEvent(final Context ctx, final BomUploadEvent event) { consumedBom = consumeBom(cdxBom); } catch (IOException | ParseException | RuntimeException e) { + LOGGER.error("Failed to consume BOM", e); failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_CONSUMPTION, e); dispatchBomProcessingFailedNotification(ctx, e); return; @@ -212,11 +232,9 @@ private void processEvent(final Context ctx, final BomUploadEvent event) { var ignoredMdcBomSpecVersion = MDC.putCloseable(MDC_BOM_SPEC_VERSION, ctx.bomSpecVersion); var ignoredMdcBomSerialNumber = MDC.putCloseable(MDC_BOM_SERIAL_NUMBER, ctx.bomSerialNumber); var ignoredMdcBomVersion = MDC.putCloseable(MDC_BOM_VERSION, String.valueOf(ctx.bomVersion))) { - // Prevent BOMs for the same project to be processed concurrently. - // Note that this is an edge case, we're not expecting any lock waits under normal circumstances. - final WaitingLockConfiguration lockConfiguration = createLockConfiguration(ctx); - processedBom = executeWithLockWaiting(lockConfiguration, () -> processBom(ctx, consumedBom)); + processedBom = processBom(ctx, consumedBom); } catch (Throwable e) { + LOGGER.error("Failed to process BOM", e); failWorkflowStepAndCancelDescendants(ctx, WorkflowStep.BOM_PROCESSING, e); dispatchBomProcessingFailedNotification(ctx, e); return; @@ -949,7 +967,7 @@ private List> initiateVulnerabilityAnalysis( // Requires a bit of refactoring in QueryManager#createVulnerabilityScan. qm.createVulnerabilityScan( - TargetType.PROJECT, + VulnerabilityScan.TargetType.PROJECT, ctx.project.getUuid(), ctx.token.toString(), events.size() @@ -1101,15 +1119,4 @@ private static boolean prepareIntegrityMetaComponent(final QueryManager qm, fina return false; } - private static WaitingLockConfiguration createLockConfiguration(final Context ctx) { - return new WaitingLockConfiguration( - /* createdAt */ Instant.now(), - /* name */ "%s-%s".formatted(BomUploadProcessingTask.class.getSimpleName(), ctx.project.getUuid()), - /* lockAtMostFor */ Duration.ofMinutes(5), - /* lockAtLeastFor */ Duration.ZERO, - /* pollInterval */ Duration.ofMillis(100), - /* waitTimeout */ Duration.ofMinutes(5) - ); - } - } diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java index 2f6738a3f..2f85c1ae3 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java @@ -35,6 +35,8 @@ public class ProcessorInitializer implements ServletContextListener { public void contextInitialized(final ServletContextEvent event) { LOGGER.info("Initializing processors"); + PROCESSOR_MANAGER.registerProcessor(BomUploadProcessor.PROCESSOR_NAME, + KafkaTopics.EVENT_BOM_UPLOADED, new BomUploadProcessor()); PROCESSOR_MANAGER.registerProcessor(VulnerabilityMirrorProcessor.PROCESSOR_NAME, KafkaTopics.NEW_VULNERABILITY, new VulnerabilityMirrorProcessor()); PROCESSOR_MANAGER.registerProcessor(RepositoryMetaResultProcessor.PROCESSOR_NAME, diff --git a/src/main/java/org/dependencytrack/model/ConfigPropertyConstants.java b/src/main/java/org/dependencytrack/model/ConfigPropertyConstants.java index 29b9d9864..9e8ff1f85 100644 --- a/src/main/java/org/dependencytrack/model/ConfigPropertyConstants.java +++ b/src/main/java/org/dependencytrack/model/ConfigPropertyConstants.java @@ -20,10 +20,13 @@ import alpine.model.IConfigProperty; import alpine.model.IConfigProperty.PropertyType; +import com.github.luben.zstd.Zstd; import org.apache.commons.lang3.SystemUtils; +import org.dependencytrack.storage.DatabaseBomUploadStorageProvider; import java.util.Arrays; import java.util.UUID; +import java.util.concurrent.TimeUnit; public enum ConfigPropertyConstants { @@ -68,6 +71,9 @@ public enum ConfigPropertyConstants { VULNERABILITY_SOURCE_EPSS_FEEDS_URL("vuln-source", "epss.feeds.url", "https://epss.cyentia.com", PropertyType.URL, "A base URL pointing to the hostname and path of the EPSS feeds", ConfigPropertyAccessMode.READ_WRITE), ACCEPT_ARTIFACT_CYCLONEDX("artifact", "cyclonedx.enabled", "true", PropertyType.BOOLEAN, "Flag to enable/disable the systems ability to accept CycloneDX uploads", ConfigPropertyAccessMode.READ_WRITE), BOM_VALIDATION_ENABLED("artifact", "bom.validation.enabled", "true", PropertyType.BOOLEAN, "Flag to control bom validation", ConfigPropertyAccessMode.READ_WRITE), + BOM_UPLOAD_STORAGE_PROVIDER("artifact", "bom.upload.storage.provider", DatabaseBomUploadStorageProvider.class.getName(), PropertyType.STRING, "", ConfigPropertyAccessMode.READ_WRITE), + BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL("artifact", "bom.upload.storage.compression.level", String.valueOf(Zstd.defaultCompressionLevel()), PropertyType.INTEGER, "", ConfigPropertyAccessMode.READ_WRITE), + BOM_UPLOAD_STORAGE_RETENTION_MS("artifact", "bom.upload.storage.retention.ms", String.valueOf(TimeUnit.HOURS.toMillis(1)), PropertyType.INTEGER, "", ConfigPropertyAccessMode.READ_WRITE), FORTIFY_SSC_ENABLED("integrations", "fortify.ssc.enabled", "false", PropertyType.BOOLEAN, "Flag to enable/disable Fortify SSC integration", ConfigPropertyAccessMode.READ_WRITE), FORTIFY_SSC_SYNC_CADENCE("integrations", "fortify.ssc.sync.cadence", "60", PropertyType.INTEGER, "The cadence (in minutes) to upload to Fortify SSC", ConfigPropertyAccessMode.READ_WRITE), FORTIFY_SSC_URL("integrations", "fortify.ssc.url", null, PropertyType.URL, "Base URL to Fortify SSC", ConfigPropertyAccessMode.READ_WRITE), diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/BomDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/BomDao.java new file mode 100644 index 000000000..684437f45 --- /dev/null +++ b/src/main/java/org/dependencytrack/persistence/jdbi/BomDao.java @@ -0,0 +1,62 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.persistence.jdbi; + +import org.jdbi.v3.sqlobject.SingleValue; +import org.jdbi.v3.sqlobject.customizer.Bind; +import org.jdbi.v3.sqlobject.statement.SqlQuery; +import org.jdbi.v3.sqlobject.statement.SqlUpdate; + +import java.time.Duration; +import java.util.UUID; + +/** + * @since 5.6.0 + */ +public interface BomDao { + + @SqlUpdate(""" + INSERT INTO "BOM_UPLOAD" ("TOKEN", "UPLOADED_AT", "BOM") + VALUES (:token, NOW(), :bomBytes) + """) + void createUpload(@Bind UUID token, @Bind byte[] bomBytes); + + @SqlQuery(""" + SELECT "BOM" + FROM "BOM_UPLOAD" + WHERE "TOKEN" = :token + """) + @SingleValue + byte[] getUploadByToken(@Bind UUID token); + + @SqlUpdate(""" + DELETE + FROM "BOM_UPLOAD" + WHERE "TOKEN" = :token + """) + boolean deleteUploadByToken(@Bind UUID token); + + @SqlUpdate(""" + DELETE + FROM "BOM_UPLOAD" + WHERE "UPLOADED_AT" < (NOW() - :duration) + """) + int deleteAllUploadsForRetentionDuration(@Bind Duration duration); + +} diff --git a/src/main/java/org/dependencytrack/resources/v1/AbstractConfigPropertyResource.java b/src/main/java/org/dependencytrack/resources/v1/AbstractConfigPropertyResource.java index 45a7c7027..b35a676dd 100644 --- a/src/main/java/org/dependencytrack/resources/v1/AbstractConfigPropertyResource.java +++ b/src/main/java/org/dependencytrack/resources/v1/AbstractConfigPropertyResource.java @@ -24,9 +24,11 @@ import alpine.model.IConfigProperty; import alpine.security.crypto.DataEncryption; import alpine.server.resources.AlpineResource; +import com.github.luben.zstd.Zstd; import org.dependencytrack.model.ConfigPropertyAccessMode; import org.dependencytrack.model.ConfigPropertyConstants; import org.dependencytrack.persistence.QueryManager; +import org.dependencytrack.storage.BomUploadStorageProvider; import org.owasp.security.logging.SecurityMarkers; import javax.ws.rs.core.Response; @@ -65,6 +67,24 @@ private Response updatePropertyValueInternal(IConfigProperty json, IConfigProper .build(); } + if (wellKnownProperty == ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER + && !BomUploadStorageProvider.exists(json.getPropertyValue())) { + return Response + .status(Response.Status.BAD_REQUEST) + .entity("%s is not a known storage provider".formatted(json.getPropertyValue())) + .build(); + } else if (wellKnownProperty == ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL + && json.getPropertyValue() != null) { + final int compressionLevel = Integer.parseInt(json.getPropertyValue()); + if (compressionLevel < Zstd.minCompressionLevel() || compressionLevel > Zstd.maxCompressionLevel()) { + return Response + .status(Response.Status.BAD_REQUEST) + .entity("Compression level %d is out of the valid [%d..%d] range" + .formatted(compressionLevel, Zstd.minCompressionLevel(), Zstd.maxCompressionLevel())) + .build(); + } + } + if (property.getPropertyType() == IConfigProperty.PropertyType.BOOLEAN) { boolean propertyValue = BooleanUtil.valueOf(json.getPropertyValue()); if (ConfigPropertyConstants.CUSTOM_RISK_SCORE_HISTORY_ENABLED.getPropertyName().equals(json.getPropertyName())){ diff --git a/src/main/java/org/dependencytrack/resources/v1/BomResource.java b/src/main/java/org/dependencytrack/resources/v1/BomResource.java index fb37f1a06..44ff105a5 100644 --- a/src/main/java/org/dependencytrack/resources/v1/BomResource.java +++ b/src/main/java/org/dependencytrack/resources/v1/BomResource.java @@ -19,7 +19,7 @@ package org.dependencytrack.resources.v1; import alpine.common.logging.Logger; -import alpine.event.framework.Event; +import alpine.model.ConfigProperty; import alpine.server.auth.PermissionRequired; import alpine.server.resources.AlpineResource; import io.swagger.annotations.Api; @@ -35,6 +35,7 @@ import org.cyclonedx.exception.GeneratorException; import org.dependencytrack.auth.Permissions; import org.dependencytrack.event.BomUploadEvent; +import org.dependencytrack.event.kafka.KafkaEventDispatcher; import org.dependencytrack.model.Component; import org.dependencytrack.model.Project; import org.dependencytrack.model.WorkflowState; @@ -49,6 +50,7 @@ import org.dependencytrack.resources.v1.vo.BomSubmitRequest; import org.dependencytrack.resources.v1.vo.BomUploadResponse; import org.dependencytrack.resources.v1.vo.IsTokenBeingProcessedResponse; +import org.dependencytrack.storage.BomUploadStorageProvider; import org.glassfish.jersey.media.multipart.BodyPartEntity; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -67,17 +69,16 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.ByteArrayInputStream; -import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.StandardOpenOption; import java.security.Principal; import java.util.Base64; import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; +import static org.dependencytrack.model.ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL; +import static org.dependencytrack.model.ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER; import static org.dependencytrack.model.ConfigPropertyConstants.BOM_VALIDATION_ENABLED; /** @@ -429,21 +430,21 @@ private Response process(QueryManager qm, Project project, String encodedBomData return Response.status(Response.Status.FORBIDDEN).entity("Access to the specified project is forbidden").build(); } - final File bomFile; + final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId())); try (final var encodedInputStream = new ByteArrayInputStream(encodedBomData.getBytes(StandardCharsets.UTF_8)); final var decodedInputStream = Base64.getDecoder().wrap(encodedInputStream); final var byteOrderMarkInputStream = new BOMInputStream(decodedInputStream)) { - bomFile = validateAndStoreBom(IOUtils.toByteArray(byteOrderMarkInputStream), project); + final byte[] bomBytes = IOUtils.toByteArray(byteOrderMarkInputStream); + validateAndStoreBom(qm, bomUploadEvent.getChainIdentifier(), bomBytes); } catch (IOException e) { LOGGER.error("An unexpected error occurred while validating or storing a BOM uploaded to project: " + project.getUuid(), e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } - final BomUploadEvent bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), bomFile); qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - Event.dispatch(bomUploadEvent); + new KafkaEventDispatcher().dispatchEvent(bomUploadEvent)/* .join() */; - BomUploadResponse bomUploadResponse = new BomUploadResponse(); + final var bomUploadResponse = new BomUploadResponse(); bomUploadResponse.setToken(bomUploadEvent.getChainIdentifier()); return Response.ok(bomUploadResponse).build(); } else { @@ -462,21 +463,18 @@ private Response process(QueryManager qm, Project project, List provider.type().getName().equals(providerClassName)) + .findFirst() + .map(ServiceLoader.Provider::get) + .orElseThrow(() -> new NoSuchElementException("%s is not a known storage provider".formatted(providerClassName))); + } + + static boolean exists(final String providerClassName) { + final var serviceLoader = ServiceLoader.load(BomUploadStorageProvider.class); + return serviceLoader.stream().anyMatch(provider -> provider.type().getName().equals(providerClassName)); + } + +} diff --git a/src/main/java/org/dependencytrack/storage/DatabaseBomUploadStorageProvider.java b/src/main/java/org/dependencytrack/storage/DatabaseBomUploadStorageProvider.java new file mode 100644 index 000000000..c40a3ccfc --- /dev/null +++ b/src/main/java/org/dependencytrack/storage/DatabaseBomUploadStorageProvider.java @@ -0,0 +1,55 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.storage; + +import org.dependencytrack.persistence.jdbi.BomDao; + +import java.time.Duration; +import java.util.UUID; + +import static org.dependencytrack.persistence.jdbi.JdbiFactory.inJdbiTransaction; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.useJdbiTransaction; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.withJdbiHandle; + +/** + * @since 5.6.0 + */ +public class DatabaseBomUploadStorageProvider implements BomUploadStorageProvider { + + @Override + public void storeBom(final UUID token, final byte[] bom) { + useJdbiTransaction(handle -> handle.attach(BomDao.class).createUpload(token, bom)); + } + + @Override + public byte[] getBomByToken(final UUID token) { + return withJdbiHandle(handle -> handle.attach(BomDao.class).getUploadByToken(token)); + } + + @Override + public boolean deleteBomByToken(final UUID token) { + return inJdbiTransaction(handle -> handle.attach(BomDao.class).deleteUploadByToken(token)); + } + + @Override + public int deleteBomsForRetentionDuration(final Duration duration) { + return inJdbiTransaction(handle -> handle.attach(BomDao.class).deleteAllUploadsForRetentionDuration(duration)); + } + +} diff --git a/src/main/java/org/dependencytrack/storage/LocalBomUploadStorageProvider.java b/src/main/java/org/dependencytrack/storage/LocalBomUploadStorageProvider.java new file mode 100644 index 000000000..8a5ac3d30 --- /dev/null +++ b/src/main/java/org/dependencytrack/storage/LocalBomUploadStorageProvider.java @@ -0,0 +1,120 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.storage; + +import alpine.Config; +import alpine.common.logging.Logger; +import org.dependencytrack.common.ClusterInfo; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.time.Duration; +import java.time.Instant; +import java.util.UUID; + +/** + * @since 5.6.0 + */ +public class LocalBomUploadStorageProvider implements BomUploadStorageProvider { + + private static final Logger LOGGER = Logger.getLogger(LocalBomUploadStorageProvider.class); + + private final Path baseDirPath; + + @SuppressWarnings("unused") // Used by ServiceLoader. + public LocalBomUploadStorageProvider() { + this(defaultBaseDirPath()); + } + + LocalBomUploadStorageProvider(final Path baseDirPath) { + this.baseDirPath = baseDirPath; + } + + @Override + public void storeBom(final UUID token, final byte[] bom) throws IOException { + final Path outputFilePath = baseDirPath.resolve(token.toString()); + LOGGER.info("Storing BOM at %s".formatted(outputFilePath)); + + try (final var fileOutputStream = Files.newOutputStream(outputFilePath); + final var bufferedOutputStream = new BufferedOutputStream(fileOutputStream)) { + bufferedOutputStream.write(bom); + } + } + + @Override + public byte[] getBomByToken(final UUID token) throws IOException { + final Path inputFilePath = baseDirPath.resolve(token.toString()); + LOGGER.info("Retrieving BOM from %s".formatted(inputFilePath)); + + return Files.readAllBytes(inputFilePath); + } + + @Override + public boolean deleteBomByToken(final UUID token) throws IOException { + final Path bomFilePath = baseDirPath.resolve(token.toString()); + LOGGER.info("Deleting BOM from %s".formatted(token)); + + return Files.deleteIfExists(bomFilePath); + } + + @Override + public int deleteBomsForRetentionDuration(final Duration duration) throws IOException { + final File[] bomFiles = baseDirPath.toFile().listFiles(); + if (bomFiles == null || bomFiles.length == 0) { + return 0; + } + + final Instant retentionCutoff = Instant.now().minus(duration); + + int bomFilesDeleted = 0; + for (final File file : bomFiles) { + final Path filePath = file.toPath(); + + // TODO: Is this reliable for all filesystems? + // TODO: Is this problematic for network volumes in other timezones? + final var attributes = Files.readAttributes(filePath, BasicFileAttributes.class); + if (retentionCutoff.isAfter(attributes.lastModifiedTime().toInstant())) { + LOGGER.info("Deleting BOM from %s".formatted(filePath)); + Files.delete(filePath); + bomFilesDeleted++; + } + } + + return bomFilesDeleted; + } + + private static Path defaultBaseDirPath() { + // TODO: Use separate, operator-controllable directory. + final Path dataDirPath = Config.getInstance().getDataDirectorty().toPath(); + final Path bomUploadsDirPath = dataDirPath.resolve("bom-uploads").resolve(ClusterInfo.getClusterId()); + + try { + return Files.createDirectories(bomUploadsDirPath); + } catch (IOException e) { + throw new RuntimeException(""" + Failed to create directory for BOM upload storage at %s\ + """.formatted(bomUploadsDirPath), e); + } + } + +} diff --git a/src/main/proto/org/dependencytrack/event/v1alpha1/event.proto b/src/main/proto/org/dependencytrack/event/v1alpha1/event.proto new file mode 100644 index 000000000..3772a471c --- /dev/null +++ b/src/main/proto/org/dependencytrack/event/v1alpha1/event.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package org.dependencytrack.event.v1alpha1; + +option java_multiple_files = true; +option java_package = "org.dependencytrack.proto.event.v1alpha1"; + +// Notifies that a BOM was uploaded. +message BomUploadedEvent { + string token = 1; // Foo + Project project = 2; // Project the BOM was uploaded to. + + message Project { + string uuid = 1; // UUID of the project. + string name = 2; // Name of the project. + string version = 3; // Version of the project. + } + +} \ No newline at end of file diff --git a/src/main/resources/META-INF/services/org.dependencytrack.storage.BomUploadStorageProvider b/src/main/resources/META-INF/services/org.dependencytrack.storage.BomUploadStorageProvider new file mode 100644 index 000000000..79dc18e13 --- /dev/null +++ b/src/main/resources/META-INF/services/org.dependencytrack.storage.BomUploadStorageProvider @@ -0,0 +1,2 @@ +org.dependencytrack.storage.DatabaseBomUploadStorageProvider +org.dependencytrack.storage.LocalBomUploadStorageProvider \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a4f79a217..a9e3d82cb 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -699,6 +699,48 @@ kafka.topic.prefix= # Refer to https://kafka.apache.org/documentation/#consumerconfigs for available options. # alpine.kafka.processor..consumer.= +# @category: Kafka +# @type: integer +# @required +alpine.kafka.processor.bom.upload.max.concurrency=-1 + +# @category: Kafka +# @type: enum +# @valid-values: [key, partition, unordered] +# @required +alpine.kafka.processor.bom.upload.processing.order=key + +# @category: Kafka +# @type: integer +# @required +alpine.kafka.processor.bom.upload.retry.initial.delay.ms=3000 + +# @category: Kafka +# @type: integer +# @required +alpine.kafka.processor.bom.upload.retry.multiplier=2 + +# @category: Kafka +# @type: double +# @required +alpine.kafka.processor.bom.upload.retry.randomization.factor=0.3 + +# @category: Kafka +# @type: integer +# @required +alpine.kafka.processor.bom.upload.retry.max.delay.ms=180000 + +# @category: Kafka +# @type: string +# @required +alpine.kafka.processor.bom.upload.consumer.group.id=dtrack-apiserver-processor + +# @category: Kafka +# @type: enum +# @valid-values: [earliest, latest, none] +# @required +alpine.kafka.processor.bom.upload.consumer.auto.offset.reset=earliest + # @category: Kafka # @type: integer # @required diff --git a/src/main/resources/migration/changelog-main.xml b/src/main/resources/migration/changelog-main.xml index 5e7614397..e55e42262 100644 --- a/src/main/resources/migration/changelog-main.xml +++ b/src/main/resources/migration/changelog-main.xml @@ -12,5 +12,6 @@ + \ No newline at end of file diff --git a/src/main/resources/migration/changelog-v5.6.0.xml b/src/main/resources/migration/changelog-v5.6.0.xml new file mode 100644 index 000000000..53f909fd0 --- /dev/null +++ b/src/main/resources/migration/changelog-v5.6.0.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/java/org/dependencytrack/event/BomUploadEventTest.java b/src/test/java/org/dependencytrack/event/BomUploadEventTest.java index 343f7f617..143b30000 100644 --- a/src/test/java/org/dependencytrack/event/BomUploadEventTest.java +++ b/src/test/java/org/dependencytrack/event/BomUploadEventTest.java @@ -18,21 +18,16 @@ */ package org.dependencytrack.event; -import alpine.common.util.SystemUtil; import org.dependencytrack.model.Project; import org.junit.Assert; import org.junit.Test; -import java.io.File; - public class BomUploadEventTest { @Test public void testFileConstructor() { Project project = new Project(); - File bitBucket = new File(SystemUtil.getBitBucket()); - BomUploadEvent event = new BomUploadEvent(project, bitBucket); + BomUploadEvent event = new BomUploadEvent(project); Assert.assertEquals(project, event.getProject()); - Assert.assertEquals(bitBucket, event.getFile()); } } diff --git a/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/BomUploadProcessorTest.java similarity index 82% rename from src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java rename to src/test/java/org/dependencytrack/event/kafka/processor/BomUploadProcessorTest.java index 7bad328a5..d50206425 100644 --- a/src/test/java/org/dependencytrack/tasks/BomUploadProcessingTaskTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/BomUploadProcessorTest.java @@ -16,20 +16,16 @@ * SPDX-License-Identifier: Apache-2.0 * Copyright (c) OWASP Foundation. All Rights Reserved. */ -package org.dependencytrack.tasks; +package org.dependencytrack.event.kafka.processor; import alpine.model.IConfigProperty.PropertyType; -import com.github.packageurl.PackageURL; import org.apache.kafka.clients.producer.ProducerRecord; -import org.dependencytrack.PersistenceCapableTest; -import org.dependencytrack.event.BomUploadEvent; import org.dependencytrack.event.kafka.KafkaEventDispatcher; import org.dependencytrack.event.kafka.KafkaTopics; import org.dependencytrack.model.Bom; import org.dependencytrack.model.Classifier; import org.dependencytrack.model.Component; import org.dependencytrack.model.ComponentProperty; -import org.dependencytrack.model.ConfigPropertyConstants; import org.dependencytrack.model.FetchStatus; import org.dependencytrack.model.IntegrityMetaComponent; import org.dependencytrack.model.License; @@ -37,36 +33,28 @@ import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.model.WorkflowStep; import org.dependencytrack.persistence.DefaultObjectGenerator; +import org.dependencytrack.persistence.jdbi.BomDao; +import org.dependencytrack.proto.event.v1alpha1.BomUploadedEvent; import org.dependencytrack.proto.notification.v1.BomProcessingFailedSubject; import org.dependencytrack.proto.notification.v1.Group; import org.dependencytrack.proto.notification.v1.Notification; import org.junit.Before; import org.junit.Test; -import java.io.File; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.time.Instant; import java.time.temporal.ChronoUnit; -import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.UUID; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import static org.apache.commons.io.IOUtils.resourceToURL; +import static org.apache.commons.io.IOUtils.resourceToByteArray; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatNoException; import static org.assertj.core.api.Assertions.fail; +import static org.dependencytrack.model.ConfigPropertyConstants.ACCEPT_ARTIFACT_CYCLONEDX; import static org.dependencytrack.model.WorkflowStatus.CANCELLED; import static org.dependencytrack.model.WorkflowStatus.COMPLETED; import static org.dependencytrack.model.WorkflowStatus.FAILED; @@ -77,6 +65,7 @@ import static org.dependencytrack.model.WorkflowStep.METRICS_UPDATE; import static org.dependencytrack.model.WorkflowStep.POLICY_EVALUATION; import static org.dependencytrack.model.WorkflowStep.VULN_ANALYSIS; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.useJdbiTransaction; import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSED; import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSING_FAILED; import static org.dependencytrack.proto.notification.v1.Level.LEVEL_ERROR; @@ -84,17 +73,21 @@ import static org.dependencytrack.util.KafkaTestUtil.deserializeKey; import static org.dependencytrack.util.KafkaTestUtil.deserializeValue; -public class BomUploadProcessingTaskTest extends PersistenceCapableTest { +public class BomUploadProcessorTest extends AbstractProcessorTest { @Before @Override public void before() throws Exception { super.before(); + // Enable processing of CycloneDX BOMs - qm.createConfigProperty(ConfigPropertyConstants.ACCEPT_ARTIFACT_CYCLONEDX.getGroupName(), - ConfigPropertyConstants.ACCEPT_ARTIFACT_CYCLONEDX.getPropertyName(), "true", - ConfigPropertyConstants.ACCEPT_ARTIFACT_CYCLONEDX.getPropertyType(), - ConfigPropertyConstants.ACCEPT_ARTIFACT_CYCLONEDX.getDescription()); + qm.createConfigProperty( + ACCEPT_ARTIFACT_CYCLONEDX.getGroupName(), + ACCEPT_ARTIFACT_CYCLONEDX.getPropertyName(), + "true", + ACCEPT_ARTIFACT_CYCLONEDX.getPropertyType(), + ACCEPT_ARTIFACT_CYCLONEDX.getDescription() + ); } @Test @@ -104,10 +97,11 @@ public void informTest() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-1.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); + final UUID token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-1.xml"); + qm.createWorkflowSteps(token); - new BomUploadProcessingTask().inform(bomUploadEvent); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), @@ -160,7 +154,7 @@ public void informTest() throws Exception { final List components = qm.getAllComponents(project); assertThat(components).hasSize(1); - final Component component = components.get(0); + final Component component = components.getFirst(); assertThat(component.getAuthor()).isEqualTo("Sometimes this field is long because it is composed of a list of authors......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................"); assertThat(component.getPublisher()).isEqualTo("Example Incorporated"); assertThat(component.getSupplier().getName()).isEqualTo("Foo Incorporated"); @@ -206,7 +200,7 @@ public void informTest() throws Exception { } ); - assertThat(qm.getAllWorkflowStatesForAToken(bomUploadEvent.getChainIdentifier())).satisfiesExactlyInAnyOrder( + assertThat(qm.getAllWorkflowStatesForAToken(token)).satisfiesExactlyInAnyOrder( state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(COMPLETED); @@ -243,9 +237,9 @@ public void informTest() throws Exception { assertThat(state.getUpdatedAt()).isBefore(Date.from(Instant.now())); } ); - final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(bomUploadEvent.getChainIdentifier().toString()); + final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(token.toString()); assertThat(vulnerabilityScan).isNotNull(); - var workflowStatus = qm.getWorkflowStateByTokenAndStep(bomUploadEvent.getChainIdentifier(), WorkflowStep.VULN_ANALYSIS); + var workflowStatus = qm.getWorkflowStateByTokenAndStep(token, WorkflowStep.VULN_ANALYSIS); assertThat(workflowStatus.getStartedAt()).isNotNull(); } @@ -256,15 +250,15 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-1.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - PackageURL packageUrl = new PackageURL("pkg:maven/com.example/xmlutil@1.0.0?download_url=https%3A%2F%2Fon-premises.url%2Frepository%2Fnpm%2F%40babel%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration-7.18.6.tgz"); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-1.xml"); + qm.createWorkflowSteps(token); var integrityMeta = new IntegrityMetaComponent(); integrityMeta.setPurl("pkg:maven/com.example/xmlutil@1.0.0?download_url=https%3A%2F%2Fon-premises.url%2Frepository%2Fnpm%2F%40babel%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration%2Fhelper-split-export-declaration-7.18.6.tgz"); integrityMeta.setStatus(FetchStatus.IN_PROGRESS); integrityMeta.setLastFetch(Date.from(Instant.now().minus(2, ChronoUnit.HOURS))); qm.createIntegrityMetaComponent(integrityMeta); - new BomUploadProcessingTask().inform(bomUploadEvent); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), @@ -283,7 +277,7 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep final List components = qm.getAllComponents(project); assertThat(components).hasSize(1); - final Component component = components.get(0); + final Component component = components.getFirst(); assertThat(component.getAuthor()).isEqualTo("Sometimes this field is long because it is composed of a list of authors......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................"); assertThat(component.getPublisher()).isEqualTo("Example Incorporated"); assertThat(component.getGroup()).isEqualTo("com.example"); @@ -297,7 +291,7 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep assertThat(component.getLicense()).isNull(); assertThat(component.getLicenseUrl()).isEqualTo("https://www.apache.org/licenses/LICENSE-2.0.txt"); - assertThat(qm.getAllWorkflowStatesForAToken(bomUploadEvent.getChainIdentifier())).satisfiesExactlyInAnyOrder( + assertThat(qm.getAllWorkflowStatesForAToken(token)).satisfiesExactlyInAnyOrder( state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(COMPLETED); @@ -334,9 +328,9 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep assertThat(state.getUpdatedAt()).isBefore(Date.from(Instant.now())); } ); - final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(bomUploadEvent.getChainIdentifier().toString()); + final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(token.toString()); assertThat(vulnerabilityScan).isNotNull(); - var workflowStatus = qm.getWorkflowStateByTokenAndStep(bomUploadEvent.getChainIdentifier(), WorkflowStep.VULN_ANALYSIS); + var workflowStatus = qm.getWorkflowStateByTokenAndStep(token, WorkflowStep.VULN_ANALYSIS); assertThat(workflowStatus.getStartedAt()).isNotNull(); } @@ -344,9 +338,10 @@ public void informTestWithComponentAlreadyExistsForIntegrityCheck() throws Excep public void informWithEmptyBomTest() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-empty.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-empty.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), @@ -358,7 +353,7 @@ public void informWithEmptyBomTest() throws Exception { assertThat(project.getClassifier()).isNull(); assertThat(project.getLastBomImport()).isNotNull(); - assertThat(qm.getAllWorkflowStatesForAToken(bomUploadEvent.getChainIdentifier())).satisfiesExactlyInAnyOrder( + assertThat(qm.getAllWorkflowStatesForAToken(token)).satisfiesExactlyInAnyOrder( state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(COMPLETED); @@ -396,16 +391,17 @@ public void informWithEmptyBomTest() throws Exception { final List components = qm.getAllComponents(project); assertThat(components).isEmpty(); - final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(bomUploadEvent.getChainIdentifier().toString()); + final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(token.toString()); assertThat(vulnerabilityScan).isNull(); } @Test public void informWithInvalidBomTest() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-invalid.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-invalid.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), @@ -430,7 +426,7 @@ public void informWithInvalidBomTest() throws Exception { qm.getPersistenceManager().refresh(project); - assertThat(qm.getAllWorkflowStatesForAToken(bomUploadEvent.getChainIdentifier())).satisfiesExactlyInAnyOrder( + assertThat(qm.getAllWorkflowStatesForAToken(token)).satisfiesExactlyInAnyOrder( state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(FAILED); @@ -480,11 +476,13 @@ public void testBomProcessingShouldFailIfProjectDoesNotExists() throws Exception project.setUuid(UUID.randomUUID()); project.setName("test-project"); project.setId(1); - var bomUploadEvent = new BomUploadEvent(project, createTempBomFile("bom-1.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); - assertThat(qm.getAllWorkflowStatesForAToken(bomUploadEvent.getChainIdentifier())).satisfiesExactlyInAnyOrder( + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-1.xml"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); + + assertThat(qm.getAllWorkflowStatesForAToken(token)).satisfiesExactlyInAnyOrder( state -> { assertThat(state.getStep()).isEqualTo(BOM_CONSUMPTION); assertThat(state.getStatus()).isEqualTo(COMPLETED); @@ -525,9 +523,10 @@ public void testBomProcessingShouldFailIfProjectDoesNotExists() throws Exception public void informWithBloatedBomTest() throws Exception { final var project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-bloated.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-bloated.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()) @@ -551,7 +550,7 @@ public void informWithBloatedBomTest() throws Exception { final List boms = qm.getAllBoms(project); assertThat(boms).hasSize(1); - final Bom bom = boms.get(0); + final Bom bom = boms.getFirst(); assertThat(bom.getBomFormat()).isEqualTo("CycloneDX"); assertThat(bom.getSpecVersion()).isEqualTo("1.3"); assertThat(bom.getBomVersion()).isEqualTo(1); @@ -591,7 +590,7 @@ public void informWithBloatedBomTest() throws Exception { assertThat(componentsWithoutDirectDependencies).isEqualTo(6378); // A VulnerabilityScan should've been initiated properly. - final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(bomUploadEvent.getChainIdentifier().toString()); + final VulnerabilityScan vulnerabilityScan = qm.getVulnerabilityScan(token.toString()); assertThat(vulnerabilityScan).isNotNull(); assertThat(vulnerabilityScan.getTargetType()).isEqualTo(VulnerabilityScan.TargetType.PROJECT); assertThat(vulnerabilityScan.getTargetIdentifier()).isEqualTo(project.getUuid()); @@ -620,9 +619,10 @@ public void informIssue2519Test() throws Exception { // Upload the same BOM again a few times. // Ensure processing does not fail, and the number of components ingested doesn't change. for (int i = 0; i < 3; i++) { - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue2519.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-issue2519.xml"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); kafkaMockProducer.clear(); @@ -636,9 +636,10 @@ public void informIssue2859Test() { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); assertThatNoException().isThrownBy(() -> { - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue2859.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-issue2859.xml"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); }); } @@ -647,9 +648,10 @@ public void informIssue1905Test() throws Exception { final var project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); for (int i = 0; i < 3; i++) { - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue1905.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-issue1905.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); kafkaMockProducer.clear(); @@ -690,17 +692,19 @@ public void informIssue3309Test() throws Exception { }); }; - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue3309.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenA = UUID.randomUUID(); + BomUploadedEvent bomUploadedEvent = createEvent(tokenA, project, "bom-issue3309.json"); + qm.createWorkflowSteps(tokenA); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertProjectAuthors.run(); kafkaMockProducer.clear(); - bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-issue3309.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenB = UUID.randomUUID(); + bomUploadedEvent = createEvent(tokenB, project, "bom-issue3309.json"); + qm.createWorkflowSteps(tokenB); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertProjectAuthors.run(); } @@ -709,9 +713,10 @@ public void informIssue3309Test() throws Exception { public void informWithComponentsUnderMetadataBomTest() throws Exception { final var project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-metadata-components.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-metadata-components.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertThat(kafkaMockProducer.history()) .anySatisfy(record -> { @@ -734,7 +739,7 @@ public void informWithComponentsUnderMetadataBomTest() throws Exception { final List boms = qm.getAllBoms(project); assertThat(boms).hasSize(1); - final Bom bom = boms.get(0); + final Bom bom = boms.getFirst(); assertThat(bom.getBomFormat()).isEqualTo("CycloneDX"); assertThat(bom.getSpecVersion()).isEqualTo("1.4"); assertThat(bom.getBomVersion()).isEqualTo(1); @@ -758,10 +763,12 @@ public void informWithComponentsUnderMetadataBomTest() throws Exception { public void informWithDelayedBomProcessedNotification() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-1.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-1.xml"); + qm.createWorkflowSteps(token); - new BomUploadProcessingTask(new KafkaEventDispatcher(), /* delayBomProcessedNotification */ true).inform(bomUploadEvent); + new BomUploadProcessor(new KafkaEventDispatcher(), /* delayBomProcessedNotification */ true) + .process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), event -> { @@ -779,10 +786,12 @@ public void informWithDelayedBomProcessedNotification() throws Exception { public void informWithDelayedBomProcessedNotificationAndNoComponents() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-empty.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-empty.json"); + qm.createWorkflowSteps(token); - new BomUploadProcessingTask(new KafkaEventDispatcher(), /* delayBomProcessedNotification */ true).inform(bomUploadEvent); + new BomUploadProcessor(new KafkaEventDispatcher(), /* delayBomProcessedNotification */ true) + .process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( event -> assertThat(event.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_CREATED.name()), @@ -803,9 +812,10 @@ public void informWithDelayedBomProcessedNotificationAndNoComponents() throws Ex public void informWithComponentWithoutPurl() throws Exception { Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-no-purl.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-no-purl.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -828,9 +838,10 @@ public void informWithCustomLicenseResolutionTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-custom-license.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-custom-license.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -866,9 +877,10 @@ public void informWithCustomLicenseResolutionTest() throws Exception { public void informWithBomContainingLicenseExpressionTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-license-expression.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-license-expression.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -894,9 +906,10 @@ public void informWithBomContainingLicenseExpressionWithSingleIdTest() throws Ex final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-license-expression-single-license.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-license-expression-single-license.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -918,9 +931,10 @@ public void informWithBomContainingLicenseExpressionWithSingleIdTest() throws Ex public void informWithBomContainingInvalidLicenseExpressionTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-invalid-license-expression.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-invalid-license-expression.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(token, bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -970,9 +984,10 @@ public void informIssue3433Test() throws Exception { } """.getBytes(StandardCharsets.UTF_8); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(bomBytes)); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, bomBytes); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -1022,9 +1037,10 @@ public void informUpdateExistingLicenseTest() throws Exception { } """.getBytes(StandardCharsets.UTF_8); - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(existingBomBytes)); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenA = UUID.randomUUID(); + BomUploadedEvent bomUploadedEvent = createEvent(tokenA, project, existingBomBytes); + qm.createWorkflowSteps(tokenA); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -1058,9 +1074,10 @@ public void informUpdateExistingLicenseTest() throws Exception { } """.getBytes(StandardCharsets.UTF_8); - bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(updatedBomBytes)); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenB = UUID.randomUUID(); + bomUploadedEvent = createEvent(tokenB, project, updatedBomBytes); + qm.createWorkflowSteps(tokenB); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().evictAll(); @@ -1107,9 +1124,10 @@ public void informDeleteExistingLicenseTest() throws Exception { } """.getBytes(StandardCharsets.UTF_8); - var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(existingBomBytes)); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenA = UUID.randomUUID(); + BomUploadedEvent bomUploadedEvent = createEvent(tokenA, project, existingBomBytes); + qm.createWorkflowSteps(tokenA); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(qm.getAllComponents(project)).satisfiesExactly(component -> { @@ -1137,9 +1155,10 @@ public void informDeleteExistingLicenseTest() throws Exception { } """.getBytes(StandardCharsets.UTF_8); - bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(updatedBomBytes)); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var tokenB = UUID.randomUUID(); + bomUploadedEvent = createEvent(tokenB, project, updatedBomBytes); + qm.createWorkflowSteps(tokenB); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().evictAll(); @@ -1155,9 +1174,10 @@ public void informDeleteExistingLicenseTest() throws Exception { public void informWithBomContainingServiceTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-service.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-service.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); assertThat(kafkaMockProducer.history()).satisfiesExactly( @@ -1175,9 +1195,10 @@ public void informWithBomContainingServiceTest() throws Exception { public void informWithBomContainingMetadataToolsDeprecatedTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-metadata-tool-deprecated.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-metadata-tool-deprecated.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().refresh(project); @@ -1198,9 +1219,10 @@ public void informWithBomContainingMetadataToolsDeprecatedTest() throws Exceptio public void informWithBomContainingMetadataToolsTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-metadata-tool.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-metadata-tool.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().refresh(project); @@ -1231,53 +1253,15 @@ public void informWithBomContainingMetadataToolsTest() throws Exception { @Test public void informWithBomContainingTimestampTest() throws Exception { final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-metadata-timestamp.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-metadata-timestamp.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); var boms = qm.getAllBoms(project); - assertThat(boms.get(0).getGenerated()).isEqualTo("2021-02-09T20:40:32Z"); - } - - @Test - public void informWithLockingTest() throws Exception { - final Project project = qm.createProject("Acme Example", null, "1.0", null, null, null, true, false); - final Project detachedProject = qm.detach(Project.class, project.getId()); - - final ExecutorService executor = Executors.newFixedThreadPool(5); - final var countDownLatch = new CountDownLatch(1); - - final var events = new ArrayList(25); - for (int i = 0; i < 25; i++) { - final var bomUploadEvent = new BomUploadEvent(detachedProject, createTempBomFile("bom-1.xml")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - events.add(bomUploadEvent); - } - - final var exceptions = new ArrayBlockingQueue(25); - for (final BomUploadEvent bomUploadEvent : events) { - executor.submit(() -> { - try { - countDownLatch.await(); - } catch (InterruptedException e) { - exceptions.offer(e); - return; - } - - try { - new BomUploadProcessingTask().inform(bomUploadEvent); - } catch (Exception e) { - exceptions.offer(e); - } - }); - } - - countDownLatch.countDown(); - executor.shutdown(); - assertThat(executor.awaitTermination(15, TimeUnit.SECONDS)).isTrue(); - - assertThat(exceptions).isEmpty(); + assertThat(boms.getFirst().getGenerated()).isEqualTo("2021-02-09T20:40:32Z"); } @Test @@ -1300,7 +1284,8 @@ public void informWithExistingComponentPropertiesAndBomWithoutComponentPropertie componentProperty.setPropertyType(PropertyType.STRING); qm.persist(componentProperty); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile(""" + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, """ { "$schema": "http://cyclonedx.org/schema/bom-1.4.schema.json", "bomFormat": "CycloneDX", @@ -1314,9 +1299,9 @@ public void informWithExistingComponentPropertiesAndBomWithoutComponentPropertie } ] } - """.getBytes())); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + """.getBytes()); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().refresh(component); @@ -1342,9 +1327,10 @@ public void informWithExistingComponentPropertiesAndBomWithComponentProperties() componentProperty.setPropertyType(PropertyType.STRING); qm.persist(componentProperty); - final var bomUploadEvent = new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-component-property.json")); - qm.createWorkflowSteps(bomUploadEvent.getChainIdentifier()); - new BomUploadProcessingTask().inform(bomUploadEvent); + final var token = UUID.randomUUID(); + final BomUploadedEvent bomUploadedEvent = createEvent(token, project, "bom-component-property.json"); + qm.createWorkflowSteps(token); + new BomUploadProcessor().process(aConsumerRecord(project.getUuid(), bomUploadedEvent).build()); assertBomProcessedNotification(); qm.getPersistenceManager().refresh(component); @@ -1378,20 +1364,25 @@ private void assertBomProcessedNotification() throws Exception { } } - private static File createTempBomFile(final String testFileName) throws Exception { - // The task will delete the input file after processing it, - // so create a temporary copy to not impact other tests. - final Path bomFilePath = Files.createTempFile(null, null); - Files.copy(Paths.get(resourceToURL("/unit/" + testFileName).toURI()), bomFilePath, StandardCopyOption.REPLACE_EXISTING); - return bomFilePath.toFile(); + private static BomUploadedEvent createEvent(final UUID token, final Project project, final String bomFileName) throws Exception { + final byte[] bomBytes = resourceToByteArray("/unit/" + bomFileName); + return createEvent(token, project, bomBytes); + } + + private static BomUploadedEvent createEvent(final UUID token, final Project project, final byte[] bomBytes) { + createBomUpload(token, bomBytes); + + return BomUploadedEvent.newBuilder() + .setToken(token.toString()) + .setProject(BomUploadedEvent.Project.newBuilder() + .setUuid(project.getUuid().toString()) + .setName(project.getName()) + .setVersion(project.getVersion() != null ? project.getVersion() : "")) + .build(); } - private static File createTempBomFile(final byte[] bomBytes) throws Exception { - // The task will delete the input file after processing it, - // so create a temporary copy to not impact other tests. - final Path bomFilePath = Files.createTempFile(null, null); - Files.write(bomFilePath, bomBytes); - return bomFilePath.toFile(); + private static void createBomUpload(final UUID token, final byte[] bomBytes) { + useJdbiTransaction(handle -> handle.attach(BomDao.class).createUpload(token, bomBytes)); } } diff --git a/src/test/java/org/dependencytrack/policy/cel/CelPolicyEngineTest.java b/src/test/java/org/dependencytrack/policy/cel/CelPolicyEngineTest.java index 61dea456f..2dbc31b4b 100644 --- a/src/test/java/org/dependencytrack/policy/cel/CelPolicyEngineTest.java +++ b/src/test/java/org/dependencytrack/policy/cel/CelPolicyEngineTest.java @@ -22,7 +22,6 @@ import com.github.packageurl.MalformedPackageURLException; import com.github.packageurl.PackageURL; import org.dependencytrack.PersistenceCapableTest; -import org.dependencytrack.event.BomUploadEvent; import org.dependencytrack.model.AnalyzerIdentity; import org.dependencytrack.model.Bom; import org.dependencytrack.model.Classifier; @@ -47,19 +46,11 @@ import org.dependencytrack.model.ViolationAnalysisState; import org.dependencytrack.model.Vulnerability; import org.dependencytrack.model.VulnerabilityAlias; -import org.dependencytrack.persistence.DefaultObjectGenerator; -import org.dependencytrack.tasks.BomUploadProcessingTask; import org.junit.Assert; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; -import java.io.File; import java.math.BigDecimal; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; @@ -67,7 +58,6 @@ import java.util.List; import java.util.UUID; -import static org.apache.commons.io.IOUtils.resourceToURL; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatNoException; @@ -1940,39 +1930,4 @@ public void testEvaluateProjectWithNoLongerApplicableViolationWithAnalysis() { assertThat(violation.getPolicyCondition().getPolicy().getName()).isEqualTo("Policy A")); } - @Test - @Ignore // Un-ignore for manual profiling purposes. - public void testWithBloatedBom() throws Exception { - // Import all default objects (includes licenses and license groups). - new DefaultObjectGenerator().contextInitialized(null); - - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.2.3"); - qm.persist(project); - - // Create a policy that will be violated by the vast majority (>8000) components. - final var policy = qm.createPolicy("policy", Policy.Operator.ANY, Policy.ViolationState.FAIL); - final PolicyCondition policyConditionA = qm.createPolicyCondition(policy, - PolicyCondition.Subject.EXPRESSION, PolicyCondition.Operator.MATCHES, """ - component.resolved_license.groups.exists(lg, lg.name == "Permissive") - """); - policyConditionA.setViolationType(PolicyViolation.Type.OPERATIONAL); - qm.persist(policyConditionA); - - // Import the bloated BOM. - new BomUploadProcessingTask().inform(new BomUploadEvent(qm.detach(Project.class, project.getId()), createTempBomFile("bom-bloated.json"))); - - // Evaluate policies on the project. - new CelPolicyEngine().evaluateProject(project.getUuid()); - } - - private static File createTempBomFile(final String testFileName) throws Exception { - // The task will delete the input file after processing it, - // so create a temporary copy to not impact other tests. - final Path bomFilePath = Files.createTempFile(null, null); - Files.copy(Paths.get(resourceToURL("/unit/" + testFileName).toURI()), bomFilePath, StandardCopyOption.REPLACE_EXISTING); - return bomFilePath.toFile(); - } - } \ No newline at end of file diff --git a/src/test/java/org/dependencytrack/resources/v1/ConfigPropertyResourceTest.java b/src/test/java/org/dependencytrack/resources/v1/ConfigPropertyResourceTest.java index 95d620457..8b92517a5 100644 --- a/src/test/java/org/dependencytrack/resources/v1/ConfigPropertyResourceTest.java +++ b/src/test/java/org/dependencytrack/resources/v1/ConfigPropertyResourceTest.java @@ -37,12 +37,12 @@ import javax.ws.rs.core.Response; import java.util.Arrays; +import static net.javacrumbs.jsonunit.assertj.JsonAssertions.assertThatJson; import static org.assertj.core.api.Assertions.assertThat; - import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_CRITICAL; import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_HIGH; -import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_MEDIUM; import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_LOW; +import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_MEDIUM; import static org.dependencytrack.model.ConfigPropertyConstants.CUSTOM_RISK_SCORE_UNASSIGNED; public class ConfigPropertyResourceTest extends ResourceTest { @@ -240,6 +240,96 @@ public void updateConfigPropertyReadOnlyTest() { assertThat(getPlainTextBody(response)).isEqualTo("The property internal.cluster.id can not be modified"); } + @Test + public void updateConfigPropertyBomStorageProviderTest() { + qm.createConfigProperty( + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER.getGroupName(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER.getPropertyName(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER.getDefaultPropertyValue(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER.getPropertyType(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_PROVIDER.getDescription() + ); + + Response response = jersey.target(V1_CONFIG_PROPERTY).request() + .header(X_API_KEY, apiKey) + .post(Entity.entity(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.provider", + "propertyValue": "foobar" + } + """, MediaType.APPLICATION_JSON)); + + assertThat(response.getStatus()).isEqualTo(400); + assertThat(getPlainTextBody(response)).isEqualTo("foobar is not a known storage provider"); + + response = jersey.target(V1_CONFIG_PROPERTY).request() + .header(X_API_KEY, apiKey) + .post(Entity.entity(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.provider", + "propertyValue": "org.dependencytrack.storage.LocalBomUploadStorageProvider" + } + """, MediaType.APPLICATION_JSON)); + + assertThat(response.getStatus()).isEqualTo(200); + assertThatJson(getPlainTextBody(response)).isEqualTo(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.provider", + "propertyValue": "org.dependencytrack.storage.LocalBomUploadStorageProvider", + "propertyType": "STRING", + "description": "" + } + """); + } + + @Test + public void updateConfigPropertyBomStorageCompressionLevelTest() { + qm.createConfigProperty( + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL.getGroupName(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL.getPropertyName(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL.getDefaultPropertyValue(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL.getPropertyType(), + ConfigPropertyConstants.BOM_UPLOAD_STORAGE_COMPRESSION_LEVEL.getDescription() + ); + + Response response = jersey.target(V1_CONFIG_PROPERTY).request() + .header(X_API_KEY, apiKey) + .post(Entity.entity(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.compression.level", + "propertyValue": "999" + } + """, MediaType.APPLICATION_JSON)); + + assertThat(response.getStatus()).isEqualTo(400); + assertThat(getPlainTextBody(response)).isEqualTo("Compression level 999 is out of the valid [-131072..22] range"); + + response = jersey.target(V1_CONFIG_PROPERTY).request() + .header(X_API_KEY, apiKey) + .post(Entity.entity(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.compression.level", + "propertyValue": "11" + } + """, MediaType.APPLICATION_JSON)); + + assertThat(response.getStatus()).isEqualTo(200); + assertThatJson(getPlainTextBody(response)).isEqualTo(""" + { + "groupName": "artifact", + "propertyName": "bom.upload.storage.compression.level", + "propertyValue": "11", + "propertyType": "INTEGER", + "description": "" + } + """); + } + @Test public void testRiskScoreInvalid(){ qm.createConfigProperty(