From 528c5e9a71188f69552b9f8387fd2c9a292723b5 Mon Sep 17 00:00:00 2001 From: nscuro Date: Tue, 26 Mar 2024 15:51:37 +0100 Subject: [PATCH 01/24] Migrate `VulnerabilityScanResultProcessor` from Kafka Streams to Parallel Consumer Signed-off-by: nscuro --- .../org/dependencytrack/common/MdcKeys.java | 2 + .../event/kafka/KafkaEventConverter.java | 4 +- .../event/kafka/KafkaTopics.java | 2 + ...essedVulnerabilityScanResultProcessor.java | 182 +++ .../kafka/processor/ProcessorInitializer.java | 4 + .../VulnerabilityScanResultProcessor.java | 1226 +++++++++++++++++ .../dependencytrack/model/WorkflowState.java | 10 + .../jdbi/VulnerabilityScanDao.java | 61 + .../persistence/jdbi/WorkflowDao.java | 81 ++ src/main/resources/application.properties | 23 + src/main/webapp/WEB-INF/web.xml | 4 +- 11 files changed, 1595 insertions(+), 4 deletions(-) create mode 100644 src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java create mode 100644 src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java create mode 100644 src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java create mode 100644 src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java diff --git a/src/main/java/org/dependencytrack/common/MdcKeys.java b/src/main/java/org/dependencytrack/common/MdcKeys.java index 6ef2f5730..b5c531d4c 100644 --- a/src/main/java/org/dependencytrack/common/MdcKeys.java +++ b/src/main/java/org/dependencytrack/common/MdcKeys.java @@ -23,10 +23,12 @@ */ public final class MdcKeys { + public static final String MDC_COMPONENT_UUID = "componentUuid"; public static final String MDC_KAFKA_RECORD_TOPIC = "kafkaRecordTopic"; public static final String MDC_KAFKA_RECORD_PARTITION = "kafkaRecordPartition"; public static final String MDC_KAFKA_RECORD_OFFSET = "kafkaRecordOffset"; public static final String MDC_KAFKA_RECORD_KEY = "kafkaRecordKey"; + public static final String MDC_SCAN_TOKEN = "scanToken"; private MdcKeys() { } diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java b/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java index ab9316b8f..445bf1ec3 100644 --- a/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java +++ b/src/main/java/org/dependencytrack/event/kafka/KafkaEventConverter.java @@ -57,7 +57,7 @@ * Utility class to convert {@link alpine.event.framework.Event}s and {@link alpine.notification.Notification}s * to {@link KafkaEvent}s. */ -final class KafkaEventConverter { +public final class KafkaEventConverter { private KafkaEventConverter() { } @@ -78,7 +78,7 @@ private KafkaEventConverter() { return convert(protoNotification); } - static KafkaEvent convert(final Notification notification) { + public static KafkaEvent convert(final Notification notification) { final Topic topic = extractDestinationTopic(notification); final String recordKey; diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java b/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java index 3f738f143..f31ccd26b 100644 --- a/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java +++ b/src/main/java/org/dependencytrack/event/kafka/KafkaTopics.java @@ -52,6 +52,7 @@ public final class KafkaTopics { public static final Topic REPO_META_ANALYSIS_RESULT; public static final Topic VULN_ANALYSIS_COMMAND; public static final Topic VULN_ANALYSIS_RESULT; + public static final Topic VULN_ANALYSIS_RESULT_PROCESSED; public static final Topic NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE; private static final Serde NOTIFICATION_SERDE = new KafkaProtobufSerde<>(Notification.parser()); @@ -76,6 +77,7 @@ public final class KafkaTopics { REPO_META_ANALYSIS_RESULT = new Topic<>("dtrack.repo-meta-analysis.result", Serdes.String(), new KafkaProtobufSerde<>(AnalysisResult.parser())); VULN_ANALYSIS_COMMAND = new Topic<>("dtrack.vuln-analysis.component", new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanCommand.parser())); VULN_ANALYSIS_RESULT = new Topic<>("dtrack.vuln-analysis.result", new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser())); + VULN_ANALYSIS_RESULT_PROCESSED = new Topic<>("dtrack.vuln-analysis.result.processed", Serdes.String(), new KafkaProtobufSerde<>(ScanResult.parser())); NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE = new Topic<>("dtrack.notification.project-vuln-analysis-complete", Serdes.String(), NOTIFICATION_SERDE); } diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java new file mode 100644 index 000000000..2874be511 --- /dev/null +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -0,0 +1,182 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.event.kafka.processor; + +import alpine.common.logging.Logger; +import alpine.event.framework.Event; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.dependencytrack.event.ComponentMetricsUpdateEvent; +import org.dependencytrack.event.ProjectMetricsUpdateEvent; +import org.dependencytrack.event.ProjectPolicyEvaluationEvent; +import org.dependencytrack.event.kafka.processor.api.BatchProcessor; +import org.dependencytrack.event.kafka.processor.exception.ProcessingException; +import org.dependencytrack.model.VulnerabilityScan; +import org.dependencytrack.model.WorkflowState; +import org.dependencytrack.model.WorkflowStatus; +import org.dependencytrack.model.WorkflowStep; +import org.dependencytrack.persistence.QueryManager; +import org.dependencytrack.persistence.jdbi.VulnerabilityScanDao; +import org.dependencytrack.persistence.jdbi.WorkflowDao; +import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus; +import org.jdbi.v3.core.Handle; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import static java.lang.Math.toIntExact; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; + +public class ProcessedVulnerabilityScanResultProcessor implements BatchProcessor { + + static final String PROCESSOR_NAME = "processed.vuln.scan.result"; + + private static final Logger LOGGER = Logger.getLogger(ProcessedVulnerabilityScanResultProcessor.class); + + @Override + public void process(final List> records) throws ProcessingException { + LOGGER.debug("Processing %d records".formatted(records.size())); + + final List completedVulnScans = processScanResults(records); + LOGGER.debug("Detected completion of %d vulnerability scans".formatted(completedVulnScans.size())); + + for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + switch (completedVulnScan.getTargetType()) { + case COMPONENT -> { + LOGGER.debug("Triggering policy evaluation for component %s".formatted(completedVulnScan.getTargetIdentifier())); + final var metricsUpdateEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + final var policyEvalEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + policyEvalEvent.onFailure(metricsUpdateEvent); + policyEvalEvent.onSuccess(metricsUpdateEvent); + Event.dispatch(policyEvalEvent); + } + case PROJECT -> { + LOGGER.debug("Triggering policy evaluation for project %s".formatted(completedVulnScan.getTargetIdentifier())); + final var metricsUpdateEvent = new ProjectMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + final var policyEvalEvent = new ProjectPolicyEvaluationEvent(completedVulnScan.getTargetIdentifier()); + policyEvalEvent.onFailure(metricsUpdateEvent); + policyEvalEvent.onSuccess(metricsUpdateEvent); + Event.dispatch(policyEvalEvent); + } + } + } + } + + private static List processScanResults(final List> records) { + try (final var qm = new QueryManager()) { + return jdbi(qm).inTransaction(jdbiHandle -> { + final List completedVulnScans = recordScanResults(jdbiHandle, records); + updateWorkflowSteps(jdbiHandle, completedVulnScans); + + return completedVulnScans; + }); + } + } + + private static List recordScanResults(final Handle jdbiHandle, final List> records) { + final Map aggregatesByToken = aggregateScanResults(records); + LOGGER.debug("Aggregated %d records down to %d unique scans".formatted(records.size(), aggregatesByToken.size())); + + final int numAggregates = aggregatesByToken.size(); + final var tokens = new ArrayList(numAggregates); + final var resultsTotal = new ArrayList(numAggregates); + final var scannerResultsTotal = new ArrayList(numAggregates); + final var scannerResultsFailed = new ArrayList(numAggregates); + + for (final Map.Entry entry : aggregatesByToken.entrySet()) { + tokens.add(entry.getKey()); + resultsTotal.add(entry.getValue().resultsTotal); + scannerResultsTotal.add(entry.getValue().scannerResultsTotal); + scannerResultsFailed.add(entry.getValue().scannerResultsFailed); + } + + final var vulnScanDao = jdbiHandle.attach(VulnerabilityScanDao.class); + final List updatedVulnScans = + vulnScanDao.updateAll(tokens, resultsTotal, scannerResultsTotal, scannerResultsFailed); + + return updatedVulnScans.stream() + // Unfortunately we can't perform this filtering in SQL, as RETURNING + // does not allow a WHERE clause. Tried using a CTE as workaround: + // WITH "CTE" AS (UPDATE ... RETURNING ...) SELECT * FROM "CTE" + // but that didn't return any results at all. + // The good news is that the query typically modifies only a handful + // of scans, so we're wasting not too many resources here. + .filter(vulnScan -> vulnScan.getStatus() == VulnerabilityScan.Status.COMPLETED || vulnScan.getStatus() == VulnerabilityScan.Status.FAILED) + .toList(); + } + + private static void updateWorkflowSteps(final Handle jdbiHandle, final List completedVulnScans) { + final int numScans = completedVulnScans.size(); + final var tokens = new ArrayList(numScans); + final var statuses = new ArrayList(numScans); + final var failureReasons = new ArrayList(numScans); + + for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + tokens.add(completedVulnScan.getToken()); + statuses.add(switch (completedVulnScan.getStatus()) { + case COMPLETED -> WorkflowStatus.COMPLETED; + case FAILED -> WorkflowStatus.FAILED; + default -> throw new IllegalStateException(""); + }); + failureReasons.add(completedVulnScan.getFailureReason()); + } + + final var workflowDao = jdbiHandle.attach(WorkflowDao.class); + final List updatedWorkflowSteps = + workflowDao.updateAllSteps(WorkflowStep.VULN_ANALYSIS, tokens, statuses, failureReasons); + + final List failedStepTokens = updatedWorkflowSteps.stream() + .filter(step -> step.getStatus() == WorkflowStatus.FAILED) + .map(WorkflowState::getToken) + .map(UUID::toString) + .toList(); + if (!failedStepTokens.isEmpty()) { + LOGGER.debug("Cancelling children of %d failed workflow steps".formatted(failedStepTokens.size())); + workflowDao.cancelAllChildren(WorkflowStep.VULN_ANALYSIS, failedStepTokens); + } + } + + private static class Aggregate { + private int resultsTotal; + private int scannerResultsTotal; + private int scannerResultsFailed; + } + + private static Map aggregateScanResults(final List> records) { + final var aggregatesByToken = new HashMap(); + + for (final ConsumerRecord record : records) { + aggregatesByToken.compute(record.key(), (token, existingAggregate) -> { + final Aggregate aggregate = existingAggregate != null ? existingAggregate : new Aggregate(); + aggregate.resultsTotal++; + aggregate.scannerResultsTotal += record.value().getScannerResultsCount(); + aggregate.scannerResultsFailed += toIntExact(record.value().getScannerResultsList().stream() + .filter(scannerResult -> scannerResult.getStatus() == ScanStatus.SCAN_STATUS_FAILED) + .count()); + return aggregate; + }); + } + + return aggregatesByToken; + } + +} diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java index e4e19344f..0c1ea357f 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java @@ -39,6 +39,10 @@ public void contextInitialized(final ServletContextEvent event) { KafkaTopics.NEW_VULNERABILITY, new VulnerabilityMirrorProcessor()); PROCESSOR_MANAGER.registerProcessor(RepositoryMetaResultProcessor.PROCESSOR_NAME, KafkaTopics.REPO_META_ANALYSIS_RESULT, new RepositoryMetaResultProcessor()); + PROCESSOR_MANAGER.registerProcessor(VulnerabilityScanResultProcessor.PROCESSOR_NAME, + KafkaTopics.VULN_ANALYSIS_RESULT, new VulnerabilityScanResultProcessor()); + PROCESSOR_MANAGER.registerBatchProcessor(ProcessedVulnerabilityScanResultProcessor.PROCESSOR_NAME, + KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, new ProcessedVulnerabilityScanResultProcessor()); PROCESSOR_MANAGER.startAll(); } diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java new file mode 100644 index 000000000..8ad802e97 --- /dev/null +++ b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java @@ -0,0 +1,1226 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.event.kafka.processor; + +import alpine.Config; +import alpine.common.logging.Logger; +import alpine.notification.Notification; +import alpine.notification.NotificationLevel; +import com.google.protobuf.Any; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent; +import org.dependencytrack.event.kafka.KafkaEvent; +import org.dependencytrack.event.kafka.KafkaEventConverter; +import org.dependencytrack.event.kafka.KafkaEventDispatcher; +import org.dependencytrack.event.kafka.KafkaEventHeaders; +import org.dependencytrack.event.kafka.KafkaTopics; +import org.dependencytrack.event.kafka.KafkaUtil; +import org.dependencytrack.event.kafka.processor.api.Processor; +import org.dependencytrack.model.AnalysisJustification; +import org.dependencytrack.model.AnalysisResponse; +import org.dependencytrack.model.AnalysisState; +import org.dependencytrack.model.AnalyzerIdentity; +import org.dependencytrack.model.Severity; +import org.dependencytrack.model.Vulnerability; +import org.dependencytrack.model.VulnerabilityAlias; +import org.dependencytrack.model.VulnerabilityAnalysisLevel; +import org.dependencytrack.model.mapping.PolicyProtoMapper; +import org.dependencytrack.notification.NotificationConstants; +import org.dependencytrack.notification.NotificationGroup; +import org.dependencytrack.notification.NotificationScope; +import org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln; +import org.dependencytrack.persistence.QueryManager; +import org.dependencytrack.persistence.jdbi.NotificationSubjectDao; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicy; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyEvaluator; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating; +import org.dependencytrack.proto.notification.v1.Group; +import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; +import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus; +import org.dependencytrack.proto.vulnanalysis.v1.Scanner; +import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; +import org.dependencytrack.util.AnalysisCommentFormatter.AnalysisCommentField; +import org.dependencytrack.util.PersistenceUtil; +import org.dependencytrack.util.PersistenceUtil.Differ; +import org.jdbi.v3.core.mapper.reflect.ColumnName; +import org.jdbi.v3.sqlobject.config.RegisterBeanMapper; +import org.jdbi.v3.sqlobject.config.RegisterConstructorMapper; +import org.jdbi.v3.sqlobject.customizer.BindBean; +import org.jdbi.v3.sqlobject.customizer.BindMethods; +import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys; +import org.jdbi.v3.sqlobject.statement.SqlBatch; +import org.jdbi.v3.sqlobject.statement.SqlQuery; +import org.slf4j.MDC; + +import javax.jdo.Query; +import javax.ws.rs.core.MultivaluedHashMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.ServiceLoader; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.datanucleus.PropertyNames.PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT; +import static org.datanucleus.PropertyNames.PROPERTY_RETAIN_VALUES; +import static org.dependencytrack.common.ConfigKey.VULNERABILITY_POLICY_ANALYSIS_ENABLED; +import static org.dependencytrack.common.MdcKeys.MDC_COMPONENT_UUID; +import static org.dependencytrack.common.MdcKeys.MDC_SCAN_TOKEN; +import static org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln.convert; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABILITY; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABLE_DEPENDENCY; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_AUDIT_CHANGE; +import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; +import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; +import static org.dependencytrack.util.AnalysisCommentFormatter.formatComment; +import static org.dependencytrack.util.NotificationUtil.generateNotificationContent; +import static org.dependencytrack.util.NotificationUtil.generateNotificationTitle; +import static org.dependencytrack.util.NotificationUtil.generateTitle; +import static org.dependencytrack.util.VulnerabilityUtil.canBeMirrored; +import static org.dependencytrack.util.VulnerabilityUtil.isAuthoritativeSource; +import static org.dependencytrack.util.VulnerabilityUtil.isMirroringEnabled; + +/** + * A {@link Processor} responsible for processing {@link ScanResult}s. + */ +public class VulnerabilityScanResultProcessor implements Processor { + + static String PROCESSOR_NAME = "vuln.analysis.result"; + + private static final Logger LOGGER = Logger.getLogger(VulnerabilityScanResultProcessor.class); + + private final ThreadLocal>> eventsToDispatch = ThreadLocal.withInitial(ArrayList::new); + private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher(); + private final VulnerabilityPolicyEvaluator vulnPolicyEvaluator; + + public VulnerabilityScanResultProcessor() { + this(Config.getInstance().getPropertyAsBoolean(VULNERABILITY_POLICY_ANALYSIS_ENABLED) + ? ServiceLoader.load(VulnerabilityPolicyEvaluator.class).findFirst().orElseThrow() + : null); + } + + VulnerabilityScanResultProcessor(final VulnerabilityPolicyEvaluator vulnPolicyEvaluator) { + this.vulnPolicyEvaluator = vulnPolicyEvaluator; + } + + @Override + public void process(final ConsumerRecord record) { + final ScanKey scanKey = record.key(); + final ScanResult scanResult = record.value(); + final VulnerabilityAnalysisLevel analysisLevel = determineAnalysisLevel(record); + final boolean isNewComponent = determineIsComponentNew(record); + + try (var ignoredMdcComponentUuid = MDC.putCloseable(MDC_COMPONENT_UUID, scanKey.getComponentUuid()); + var ignoredMdcScanToken = MDC.putCloseable(MDC_SCAN_TOKEN, scanKey.getScanToken())) { + maybeQueueResultProcessedEvent(scanKey, scanResult); + + processInternal(scanKey, scanResult, analysisLevel, isNewComponent); + + final List> dispatchedEvents = eventDispatcher.dispatchAll(eventsToDispatch.get()); + CompletableFuture.allOf(dispatchedEvents.toArray(new CompletableFuture[0])).join(); + } finally { + eventsToDispatch.get().clear(); + } + } + + private void processInternal(final ScanKey scanKey, + final ScanResult scanResult, + final VulnerabilityAnalysisLevel analysisLevel, + final boolean isNewComponent) { + try (final var qm = new QueryManager()) { + // Do not unload fields upon commit (why is this even the default WTF). + qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true"); + qm.getPersistenceManager().setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false"); + + final Component component = jdbi(qm).withExtension(Dao.class, dao -> dao.getComponentByUuid(UUID.fromString(scanKey.getComponentUuid()))); + if (component == null) { + LOGGER.warn("Received result for component, but it does not exist"); + return; + } + + for (final ScannerResult scannerResult : scanResult.getScannerResultsList()) { + processScannerResult(qm, component, scanKey, scannerResult, analysisLevel, isNewComponent); + } + } + } + + private void processScannerResult(final QueryManager qm, final Component component, + final ScanKey scanKey, final ScannerResult scannerResult, + final VulnerabilityAnalysisLevel analysisLevel, + final boolean isNewComponent) { + if (scannerResult.getStatus() == SCAN_STATUS_FAILED) { + final var message = "Scan of component %s with %s failed (scanKey: %s): %s" + .formatted(component.uuid(), scannerResult.getScanner(), prettyPrint(scanKey), scannerResult.getFailureReason()); + eventDispatcher.dispatchNotification(new Notification() + .scope(NotificationScope.SYSTEM) + .group(NotificationGroup.ANALYZER) + .level(NotificationLevel.ERROR) + .title(NotificationConstants.Title.ANALYZER_ERROR) + .content(message)); + LOGGER.warn(message); + return; + } else if (scannerResult.getStatus() != ScanStatus.SCAN_STATUS_SUCCESSFUL) { + LOGGER.warn("Unable to process results from %s with status %s; Dropping record (scanKey: %s)" + .formatted(scannerResult.getScanner(), scannerResult.getStatus(), prettyPrint(scanKey))); + return; + } + + final Set syncedVulns = syncVulnerabilities(qm, scanKey, scannerResult); + LOGGER.debug("Synchronized %d vulnerabilities reported by %s for %s (scanKey: %s)" + .formatted(syncedVulns.size(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey))); + + final Map matchedPoliciesByVulnUuid = maybeEvaluateVulnPolicies(component, syncedVulns); + LOGGER.debug("Identified policy matches for %d/%d vulnerabilities (scanKey: %s)" + .formatted(matchedPoliciesByVulnUuid.size(), syncedVulns.size(), prettyPrint(scanKey))); + + final List newVulnUuids = synchronizeFindingsAndAnalyses(qm, component, syncedVulns, + scannerResult.getScanner(), matchedPoliciesByVulnUuid); + LOGGER.debug("Identified %d new vulnerabilities for %s with %s (scanKey: %s)" + .formatted(newVulnUuids.size(), scanKey.getComponentUuid(), scannerResult.getScanner(), prettyPrint(scanKey))); + + maybeQueueNotifications(qm, component, isNewComponent, analysisLevel, newVulnUuids); + } + + /** + * Synchronize vulnerabilities reported in a given {@link ScannerResult} with the datastore. + * + * @param qm The {@link QueryManager} to use + * @param scanKey The {@link ScanKey} associated with the {@link ScannerResult} + * @param scannerResult The {@link ScannerResult} to synchronize vulnerabilities from + * @return A {@link Set} of synchronized {@link Vulnerability}s + */ + private Set syncVulnerabilities(final QueryManager qm, final ScanKey scanKey, final ScannerResult scannerResult) { + final var syncedVulns = new HashSet(); + + for (final org.cyclonedx.proto.v1_4.Vulnerability reportedVuln : scannerResult.getBom().getVulnerabilitiesList()) { + final Vulnerability vuln; + try { + vuln = ModelConverterCdxToVuln.convert(qm, scannerResult.getBom(), reportedVuln, true); + } catch (RuntimeException e) { + LOGGER.error("Failed to convert vulnerability %s/%s (reported by %s for component %s) to internal model (scanKey: %s)" + .formatted(reportedVuln.getSource(), reportedVuln.getId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e); + continue; + } + + try { + final Vulnerability syncedVuln = syncVulnerability(qm, vuln, scannerResult.getScanner()); + + // Detach vulnerabilities from JDO persistence context. + // We do not want to trigger any DB interactions by accessing their fields later. + // Note that even PersistenceManager#detachCopy will load / unload fields based + // on the current FetchPlan. But we just want to keep the data we already have, + // and #makeTransientAll does exactly that. + qm.getPersistenceManager().makeTransient(syncedVuln); + + if (vuln.getAliases() != null && !vuln.getAliases().isEmpty()) { + final var syncedAliases = new ArrayList(); + for (VulnerabilityAlias alias : vuln.getAliases()) { + final VulnerabilityAlias syncedAlias = qm.synchronizeVulnerabilityAlias(alias); + qm.getPersistenceManager().makeTransient(syncedAlias); + syncedAliases.add(syncedAlias); + } + syncedVuln.setAliases(syncedAliases); + } + + syncedVulns.add(syncedVuln); + } catch (RuntimeException e) { + // Use a broad catch here, so we can still try to process other + // vulnerabilities, even though processing one of them failed. + + LOGGER.warn("Failed to synchronize vulnerability %s/%s (reported by %s for component %s; scanKey: %s)" + .formatted(vuln.getSource(), vuln.getVulnId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e); + } + } + + return syncedVulns; + } + + /** + * Synchronize a given {@link Vulnerability} as reported by a given {@link Scanner} with the datastore. + *

+ * This method differs from {@link QueryManager#synchronizeVulnerability(Vulnerability, boolean)} in that it expects + * an active {@link javax.jdo.Transaction}, and only calls setters of existing vulnerabilities when the respective + * value actually changed, saving network round-trips. + * + * @param qm The {@link QueryManager} to use + * @param vuln The {@link Vulnerability} to synchronize + * @param scanner The {@link AnalyzerIdentity} that reported the vulnerability + * @return The synchronized {@link Vulnerability} + * @throws IllegalStateException When no {@link javax.jdo.Transaction} is active + * @throws NoSuchElementException When the reported vulnerability is internal, but does not exist in the datastore + */ + private Vulnerability syncVulnerability(final QueryManager qm, final Vulnerability vuln, final Scanner scanner) { + // TODO: Refactor this to use JDBI instead. + // It is possible that the same vulnerability is reported for multiple components in parallel, + // causing unique constraint violations when attempting to INSERT into the VULNERABILITY table. + // In such cases, we can get away with simply retrying to SELECT or INSERT again. + return qm.runInRetryableTransaction(() -> { + final Vulnerability existingVuln; + final Query query = qm.getPersistenceManager().newQuery(Vulnerability.class); + try { + query.setFilter("vulnId == :vulnId && source == :source"); + query.setParameters(vuln.getVulnId(), vuln.getSource()); + existingVuln = query.executeUnique(); + } finally { + query.closeAll(); + } + + if (existingVuln == null) { + if (Vulnerability.Source.INTERNAL.name().equals(vuln.getSource())) { + throw new NoSuchElementException("An internal vulnerability with ID %s does not exist".formatted(vuln.getVulnId())); + } + + return qm.getPersistenceManager().makePersistent(vuln); + } + + if (canUpdateVulnerability(existingVuln, scanner)) { + final var differ = new Differ<>(existingVuln, vuln); + + // TODO: Consider using something like javers to get a rich diff of WHAT changed; https://github.com/javers/javers + differ.applyIfChanged("title", Vulnerability::getTitle, existingVuln::setTitle); + differ.applyIfChanged("subTitle", Vulnerability::getSubTitle, existingVuln::setSubTitle); + differ.applyIfChanged("description", Vulnerability::getDescription, existingVuln::setDescription); + differ.applyIfChanged("detail", Vulnerability::getDetail, existingVuln::setDetail); + differ.applyIfChanged("recommendation", Vulnerability::getRecommendation, existingVuln::setRecommendation); + differ.applyIfChanged("references", Vulnerability::getReferences, existingVuln::setReferences); + differ.applyIfChanged("credits", Vulnerability::getCredits, existingVuln::setCredits); + differ.applyIfChanged("created", Vulnerability::getCreated, existingVuln::setCreated); + differ.applyIfChanged("published", Vulnerability::getPublished, existingVuln::setPublished); + differ.applyIfChanged("updated", Vulnerability::getUpdated, existingVuln::setUpdated); + differ.applyIfChanged("cwes", Vulnerability::getCwes, existingVuln::setCwes); + // Calling setSeverity nulls all CVSS and OWASP RR fields. getSeverity calculates the severity on-the-fly, + // and will return UNASSIGNED even when no severity is set explicitly. Thus, calling setSeverity + // must happen before CVSS and OWASP RR fields are set, to avoid null-ing them again. + differ.applyIfChanged("severity", Vulnerability::getSeverity, existingVuln::setSeverity); + differ.applyIfChanged("cvssV2BaseScore", Vulnerability::getCvssV2BaseScore, existingVuln::setCvssV2BaseScore); + differ.applyIfChanged("cvssV2ImpactSubScore", Vulnerability::getCvssV2ImpactSubScore, existingVuln::setCvssV2ImpactSubScore); + differ.applyIfChanged("cvssV2ExploitabilitySubScore", Vulnerability::getCvssV2ExploitabilitySubScore, existingVuln::setCvssV2ExploitabilitySubScore); + differ.applyIfChanged("cvssV2Vector", Vulnerability::getCvssV2Vector, existingVuln::setCvssV2Vector); + differ.applyIfChanged("cvssv3BaseScore", Vulnerability::getCvssV3BaseScore, existingVuln::setCvssV3BaseScore); + differ.applyIfChanged("cvssV3ImpactSubScore", Vulnerability::getCvssV3ImpactSubScore, existingVuln::setCvssV3ImpactSubScore); + differ.applyIfChanged("cvssV3ExploitabilitySubScore", Vulnerability::getCvssV3ExploitabilitySubScore, existingVuln::setCvssV3ExploitabilitySubScore); + differ.applyIfChanged("cvssV3Vector", Vulnerability::getCvssV3Vector, existingVuln::setCvssV3Vector); + differ.applyIfChanged("owaspRRLikelihoodScore", Vulnerability::getOwaspRRLikelihoodScore, existingVuln::setOwaspRRLikelihoodScore); + differ.applyIfChanged("owaspRRTechnicalImpactScore", Vulnerability::getOwaspRRTechnicalImpactScore, existingVuln::setOwaspRRTechnicalImpactScore); + differ.applyIfChanged("owaspRRBusinessImpactScore", Vulnerability::getOwaspRRBusinessImpactScore, existingVuln::setOwaspRRBusinessImpactScore); + differ.applyIfChanged("owaspRRVector", Vulnerability::getOwaspRRVector, existingVuln::setOwaspRRVector); + // Aliases of existingVuln will always be null, as they'd have to be fetched separately. + // Synchronization of aliases is performed after synchronizing the vulnerability. + // updated |= applyIfChanged(existingVuln, vuln, Vulnerability::getAliases, existingVuln::setAliases); + + differ.applyIfChanged("vulnerableVersions", Vulnerability::getVulnerableVersions, existingVuln::setVulnerableVersions); + differ.applyIfChanged("patchedVersions", Vulnerability::getPatchedVersions, existingVuln::setPatchedVersions); + // EPSS is an additional enrichment that no scanner currently provides. + // We don't want EPSS scores of CVEs to be purged just because the CVE information came from e.g. OSS Index. + differ.applyIfNonNullAndChanged("epssScore", Vulnerability::getEpssScore, existingVuln::setEpssScore); + differ.applyIfNonNullAndChanged("epssPercentile", Vulnerability::getEpssPercentile, existingVuln::setEpssPercentile); + + if (!differ.getDiffs().isEmpty()) { + // TODO: Send a notification? + // (But notifications should only be sent if the transaction was committed) + // TODO: Reduce to DEBUG; It's set to INFO for testing + LOGGER.info("Vulnerability %s/%s was updated by %s: %s".formatted(vuln.getSource(), vuln.getVulnId(), scanner, differ.getDiffs())); + } + } + + return existingVuln; + }, PersistenceUtil::isUniqueConstraintViolation); + } + + private Map maybeEvaluateVulnPolicies(final Component component, final Collection vulns) { + if (vulnPolicyEvaluator == null) { + return Collections.emptyMap(); + } + + final var policyProject = org.dependencytrack.proto.policy.v1.Project.newBuilder() + .setUuid(component.projectUuid().toString()) + .build(); + final var policyComponent = org.dependencytrack.proto.policy.v1.Component.newBuilder() + .setUuid(component.uuid().toString()) + .build(); + final List policyVulns = vulns.stream() + .map(PolicyProtoMapper::mapToProto) + .toList(); + + return vulnPolicyEvaluator.evaluate(policyVulns, policyComponent, policyProject); + } + + /** + * Associate a given {@link Collection} of {@link Vulnerability}s with a given {@link Component}, + * evaluate applicable {@link VulnerabilityPolicy}s, and apply the resulting analyses. + *

+ * If a {@link Vulnerability} was not previously associated with the {@link Component}, + * a {@link FindingAttribution} will be created for the {@link Scanner}. + * + * @param qm The {@link QueryManager} to use + * @param component The {@link Component} to associate with + * @param vulns The {@link Vulnerability}s to associate with + * @param scanner The {@link Scanner} that identified the association + * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()} + * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component}, + * and which have not been suppressed via {@link VulnerabilityPolicy}. + */ + private List synchronizeFindingsAndAnalyses(final QueryManager qm, final Component component, + final Collection vulns, final Scanner scanner, + final Map policiesByVulnUuid) { + return jdbi(qm).inTransaction(jdbiHandle -> { + final var dao = jdbiHandle.attach(Dao.class); + + // Bulk-create new findings and corresponding scanner attributions. + final List newFindingVulnIds = dao.createFindings(component, vulns); + final List findingAttributions = newFindingVulnIds.stream() + .map(vulnId -> new FindingAttribution(vulnId, component.id(), component.projectId(), + convert(scanner).name(), UUID.randomUUID())) + .toList(); + dao.createFindingAttributions(findingAttributions); + + return maybeApplyPolicyAnalyses(qm, dao, component, vulns, newFindingVulnIds, policiesByVulnUuid); + }); + } + + /** + * Apply analyses of matched {@link VulnerabilityPolicy}s. Do nothing when no policies matched. + * + * @param qm + * @param dao The {@link Dao} to use for persistence operations + * @param component The {@link Component} to apply analyses for + * @param vulns The {@link Vulnerability}s identified for the {@link Component} + * @param newFindingVulnIds IDs of {@link Vulnerability}s that newly affect the {@link Component} + * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()} + * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component}, + * and which have not been suppressed via {@link VulnerabilityPolicy}. + */ + private List maybeApplyPolicyAnalyses(QueryManager qm, final Dao dao, final Component component, final Collection vulns, + final List newFindingVulnIds, Map policiesByVulnUuid) { + if (vulns.isEmpty()) { + return Collections.emptyList(); + } + + // Mark vulnerability UUIDs without policy match with an explicit "null" policy. + policiesByVulnUuid = new HashMap<>(policiesByVulnUuid); + for (final Vulnerability vuln : vulns) { + if (!policiesByVulnUuid.containsKey(vuln.getUuid())) { + policiesByVulnUuid.put(vuln.getUuid(), null); + } + } + + // Index vulnerabilities by ID and UUID for more efficient lookups. + final var vulnById = new HashMap(); + final var vulnByUuid = new HashMap(); + for (final Vulnerability vuln : vulns) { + vulnById.put(vuln.getId(), vuln); + vulnByUuid.put(vuln.getUuid(), vuln); + } + + // For all vulnerabilities with matching policies, bulk-fetch existing analyses. + // Index them by vulnerability UUID for more efficient access. + final Map existingAnalyses = dao.getAnalyses(component, policiesByVulnUuid.keySet()).stream() + .collect(Collectors.toMap(Analysis::getVulnUuid, Function.identity())); + + final var analysesToCreateOrUpdate = new ArrayList(); + final var projectAuditChangeNotifications = new ArrayList(); + final var analysisCommentsByVulnId = new MultivaluedHashMap(); + + for (final Map.Entry vulnUuidAndPolicy : policiesByVulnUuid.entrySet()) { + final Vulnerability vuln = vulnByUuid.get(vulnUuidAndPolicy.getKey()); + final VulnerabilityPolicy policy = vulnUuidAndPolicy.getValue(); + final Analysis policyAnalysis; + try { + policyAnalysis = Analysis.fromPolicy(policy); + } catch (IllegalArgumentException e) { + LOGGER.warn("Unable to apply policy %s as it was found to be invalid".formatted(policy.getName()), e); + continue; + } + final Analysis existingAnalysis = existingAnalyses.get(vuln.getUuid()); + if (policy != null && existingAnalysis == null) { + policyAnalysis.setComponentId(component.id()); + policyAnalysis.setProjectId(component.projectId()); + policyAnalysis.setVulnId(vuln.getId()); + policyAnalysis.setVulnUuid(vuln.getUuid()); + + // We'll create comments for analysisId=null for now, as the Analysis we're referring + // to hasn't been created yet. The analysisId is populated later, after bulk upserting + // all analyses. + final var commentFactory = new AnalysisCommentFactory(null, policy); + if (policyAnalysis.getState() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.STATE, null, policyAnalysis.getState())); + } + if (policyAnalysis.getJustification() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.JUSTIFICATION, null, policyAnalysis.getJustification())); + } + if (policyAnalysis.getResponse() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.RESPONSE, null, policyAnalysis.getResponse())); + } + if (policyAnalysis.getDetails() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.DETAILS, null, policyAnalysis.getDetails())); + } + if (policyAnalysis.getSuppressed()) { + commentFactory.createComment(formatComment(AnalysisCommentField.SUPPRESSED, null, policyAnalysis.getSuppressed())); + } + if (policyAnalysis.getSeverity() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.SEVERITY, vuln.getSeverity(), policyAnalysis.getSeverity())); + } + if (policyAnalysis.getCvssV2Vector() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_VECTOR, null, policyAnalysis.getCvssV2Vector())); + } + if (policyAnalysis.getCvssV2Score() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_SCORE, null, policyAnalysis.getCvssV2Score())); + } + if (policyAnalysis.getCvssV3Vector() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_VECTOR, null, policyAnalysis.getCvssV3Vector())); + } + if (policyAnalysis.getCvssV3Score() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_SCORE, null, policyAnalysis.getCvssV3Score())); + } + if (policyAnalysis.getOwaspVector() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_VECTOR, null, policyAnalysis.getOwaspVector())); + } + if (policyAnalysis.getOwaspScore() != null) { + commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_SCORE, null, policyAnalysis.getOwaspScore())); + } + analysesToCreateOrUpdate.add(policyAnalysis); + analysisCommentsByVulnId.addAll(policyAnalysis.getVulnId(), commentFactory.getComments()); + } else if (existingAnalysis != null && (policy != null || existingAnalysis.getVulnPolicyName() != null)) { + boolean shouldUpdate = false; + boolean analysisStateChange = false; + boolean suppressionChange = false; + final var commentFactory = new AnalysisCommentFactory(existingAnalysis.getId(), policy); + if (!Objects.equals(existingAnalysis.getState(), policyAnalysis.getState())) { + commentFactory.createComment(formatComment(AnalysisCommentField.STATE, + existingAnalysis.getState(), policyAnalysis.getState())); + + existingAnalysis.setState(policyAnalysis.getState()); + shouldUpdate = true; + analysisStateChange = true; + } + if (!Objects.equals(existingAnalysis.getJustification(), policyAnalysis.getJustification())) { + commentFactory.createComment(formatComment(AnalysisCommentField.JUSTIFICATION, + existingAnalysis.getJustification(), policyAnalysis.getJustification())); + + existingAnalysis.setJustification(policyAnalysis.getJustification()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getResponse(), policyAnalysis.getResponse())) { + commentFactory.createComment(formatComment(AnalysisCommentField.RESPONSE, + existingAnalysis.getResponse(), policyAnalysis.getResponse())); + + existingAnalysis.setResponse(policyAnalysis.getResponse()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.details, policyAnalysis.getDetails())) { + commentFactory.createComment(formatComment(AnalysisCommentField.DETAILS, + existingAnalysis.getDetails(), policyAnalysis.getDetails())); + + existingAnalysis.setDetails(policyAnalysis.getDetails()); + shouldUpdate = true; + } + if (existingAnalysis.getSuppressed() == null || (existingAnalysis.getSuppressed() != policyAnalysis.getSuppressed())) { + commentFactory.createComment(formatComment(AnalysisCommentField.SUPPRESSED, + existingAnalysis.getSuppressed(), policyAnalysis.getSuppressed())); + + existingAnalysis.setSuppressed(policyAnalysis.getSuppressed()); + shouldUpdate = true; + suppressionChange = true; + } + if (!Objects.equals(existingAnalysis.getSeverity(), policyAnalysis.getSeverity())) { + commentFactory.createComment(formatComment(AnalysisCommentField.SEVERITY, + existingAnalysis.getSeverity(), policyAnalysis.getSeverity())); + + existingAnalysis.setSeverity(policyAnalysis.getSeverity()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getCvssV2Vector(), policyAnalysis.getCvssV2Vector())) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_VECTOR, + existingAnalysis.getCvssV2Vector(), policyAnalysis.getCvssV2Vector())); + + existingAnalysis.setCvssV2Vector(policyAnalysis.getCvssV2Vector()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getCvssV2Score(), policyAnalysis.getCvssV2Score())) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_SCORE, + existingAnalysis.getCvssV2Score(), policyAnalysis.getCvssV2Score())); + + existingAnalysis.setCvssV2Score(policyAnalysis.getCvssV2Score()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getCvssV3Vector(), policyAnalysis.getCvssV3Vector())) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_VECTOR, + existingAnalysis.getCvssV3Vector(), policyAnalysis.getCvssV3Vector())); + + existingAnalysis.setCvssV3Vector(policyAnalysis.getCvssV3Vector()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getCvssV3Score(), policyAnalysis.getCvssV3Score())) { + commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_SCORE, + existingAnalysis.getCvssV3Score(), policyAnalysis.getCvssV3Score())); + + existingAnalysis.setCvssV3Score(policyAnalysis.getCvssV3Score()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getOwaspVector(), policyAnalysis.getOwaspVector())) { + commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_VECTOR, + existingAnalysis.getOwaspVector(), policyAnalysis.getOwaspVector())); + + existingAnalysis.setOwaspVector(policyAnalysis.getCvssV2Vector()); + shouldUpdate = true; + } + if (!Objects.equals(existingAnalysis.getOwaspScore(), policyAnalysis.getOwaspScore())) { + commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_SCORE, + existingAnalysis.getOwaspScore(), policyAnalysis.getOwaspScore())); + + existingAnalysis.setOwaspScore(policyAnalysis.getOwaspScore()); + shouldUpdate = true; + } + if (shouldUpdate) { + existingAnalysis.setVulnPolicyName(policy != null ? policy.getName() : null); + analysesToCreateOrUpdate.add(existingAnalysis); + analysisCommentsByVulnId.addAll(existingAnalysis.getVulnId(), commentFactory.getComments()); + maybeQueueProjectAuditChangeNotification(qm, component, vuln, existingAnalysis, analysisStateChange, suppressionChange); + } + } + + // If the finding was suppressed, do not report it as new. + if (Boolean.TRUE.equals(policyAnalysis.getSuppressed())) { + newFindingVulnIds.remove(vuln.getId()); + } + } + + if (!analysesToCreateOrUpdate.isEmpty()) { + final List createdAnalyses = dao.createOrUpdateAnalyses(analysesToCreateOrUpdate); + // Comments for new analyses do not have an analysis ID set yet, as that ID was not known prior + // to inserting the respective analysis record. Enrich comments with analysis IDs now that we know them. + for (final CreatedAnalysis createdAnalysis : createdAnalyses) { + analysisCommentsByVulnId.computeIfPresent(createdAnalysis.vulnId(), + (vulnId, comments) -> comments.stream() + .map(comment -> new AnalysisComment(createdAnalysis.id(), comment.comment(), comment.commenter())) + .toList()); + } + dao.createAnalysisComments(analysisCommentsByVulnId.values().stream().flatMap(Collection::stream).toList()); + } + + return vulnById.entrySet().stream() + .filter(entry -> newFindingVulnIds.contains(entry.getKey())) + .map(Map.Entry::getValue) + .toList(); + } + + private void maybeQueueProjectAuditChangeNotification(final QueryManager qm, final Component component, + final Vulnerability vuln, final Analysis policyAnalysis, + final boolean analysisStateChange, final boolean suppressionChange) { + if (!analysisStateChange && !suppressionChange) { + return; + } + + jdbi(qm) + .withExtension(NotificationSubjectDao.class, + dao -> dao.getForProjectAuditChange(component.uuid(), vuln.getUuid(), policyAnalysis.state, policyAnalysis.suppressed)) + .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() + .setScope(SCOPE_PORTFOLIO) + .setGroup(GROUP_PROJECT_AUDIT_CHANGE) + .setLevel(LEVEL_INFORMATIONAL) + .setTimestamp(Timestamps.now()) + .setTitle(generateTitle(policyAnalysis.getState(), policyAnalysis.getSuppressed(), analysisStateChange, suppressionChange)) + .setContent("An analysis decision was made to a finding affecting a project") + .setSubject(Any.pack(subject)) + .build()) + .map(KafkaEventConverter::convert) + .ifPresent(eventsToDispatch.get()::add); + } + + private void maybeQueueResultProcessedEvent(final ScanKey scanKey, final ScanResult scanResult) { + // Vulnerability scans targeting the entire portfolio are currently not tracked. + // There's no point in including results in the following repartition, and querying + // the database for their scan token, given the queries will never return anything anyway. + // Filtering results of portfolio analyses here also reduces the chance of hot partitions. + if (PortfolioVulnerabilityAnalysisEvent.CHAIN_IDENTIFIER.toString().equals(scanKey.getScanToken())) { + return; + } + + // Drop vulnerabilities from scanner results, as they can be rather large, and we don't need them anymore. + // Dropping them will save us some compression and network overhead during the repartition. + // We can remove this step should we ever need access to the vulnerabilities again. + final ScanResult strippedScanResult = scanResult.toBuilder() + .clearScannerResults() + .addAllScannerResults(scanResult.getScannerResultsList().stream() + .map(scannerResult -> scannerResult.toBuilder() + .clearBom() + .build()) + .toList()) + .build(); + + final var event = new KafkaEvent<>(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, scanKey.getScanToken(), strippedScanResult); + eventsToDispatch.get().add(event); + } + + /** + * Send {@link Group#GROUP_NEW_VULNERABLE_DEPENDENCY} and {@link Group#GROUP_NEW_VULNERABILITY} notifications + * for a given {@link Component}, if it was found to have at least one non-suppressed vulnerability. + * + * @param qm The {@link QueryManager} to use + * @param component The {@link Component} to send notifications for + * @param isNewComponent Whether {@code component} is new + * @param analysisLevel The {@link VulnerabilityAnalysisLevel} + * @param newVulns Newly identified {@link Vulnerability}s + */ + private void maybeQueueNotifications(final QueryManager qm, final Component component, final boolean isNewComponent, + final VulnerabilityAnalysisLevel analysisLevel, final List newVulns) { + if (newVulns.isEmpty()) { + return; + } + + final Timestamp notificationTimestamp = Timestamps.now(); + jdbi(qm).useExtension(NotificationSubjectDao.class, dao -> { + if (isNewComponent) { + dao.getForNewVulnerableDependency(component.uuid()) + .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() + .setScope(SCOPE_PORTFOLIO) + .setGroup(GROUP_NEW_VULNERABLE_DEPENDENCY) + .setLevel(LEVEL_INFORMATIONAL) + .setTimestamp(notificationTimestamp) + .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABLE_DEPENDENCY, subject.getProject())) + .setContent(generateNotificationContent(subject.getComponent(), subject.getVulnerabilitiesList())) + .setSubject(Any.pack(subject)) + .build()) + .map(KafkaEventConverter::convert) + .ifPresent(eventsToDispatch.get()::add); + } + + dao.getForNewVulnerabilities(component.uuid(), newVulns.stream().map(Vulnerability::getUuid).toList(), analysisLevel).stream() + .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() + .setScope(SCOPE_PORTFOLIO) + .setGroup(GROUP_NEW_VULNERABILITY) + .setLevel(LEVEL_INFORMATIONAL) + .setTimestamp(notificationTimestamp) + .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABILITY, subject.getProject())) + .setContent(generateNotificationContent(subject.getVulnerability())) + .setSubject(Any.pack(subject)) + .build()) + .map(KafkaEventConverter::convert) + .forEach(eventsToDispatch.get()::add); + }); + } + + private boolean canUpdateVulnerability(final Vulnerability vuln, final Scanner scanner) { + var canUpdate = true; + + // Results from the internal scanner only contain vulnId and source, nothing else. + // As they only refer to existing vulnerabilities in the database, no update must be performed. + canUpdate &= scanner != SCANNER_INTERNAL; + + // Internal vulnerabilities can only be updated via REST API. + canUpdate &= !Vulnerability.Source.INTERNAL.name().equals(vuln.getSource()); + + // If the scanner is also the authoritative source of the given vulnerability, + // it should be able to update it. This will be the case for the OSS Index scanner + // and sonatype-XXX vulnerabilities for example. + canUpdate &= isAuthoritativeSource(vuln, convert(scanner)) + // Alternatively, if the vulnerability could be mirrored, but mirroring + // is disabled, it is OK to override any existing data. + // + // Ideally, we'd track the data from all sources instead of just overriding + // it, but for now this will have to do it. + || (canBeMirrored(vuln) && !isMirroringEnabled(vuln)); + + return canUpdate; + } + + private static VulnerabilityAnalysisLevel determineAnalysisLevel(final ConsumerRecord record) { + return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.VULN_ANALYSIS_LEVEL) + .map(value -> { + try { + return VulnerabilityAnalysisLevel.valueOf(value); + } catch (IllegalArgumentException e) { + LOGGER.warn("The reported analysis type %s is invalid, assuming %s" + .formatted(value, VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS)); + return VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS; + } + }) + .orElse(VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS); + } + + private static boolean determineIsComponentNew(final ConsumerRecord record) { + return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.IS_NEW_COMPONENT) + .map(Boolean::parseBoolean) + .orElse(false); + } + + private static String prettyPrint(final ScanKey scanKey) { + return "%s/%s".formatted(scanKey.getScanToken(), scanKey.getComponentUuid()); + } + + public interface Dao { + + @SqlQuery(""" + SELECT + "C"."ID" AS "id", + "C"."UUID" AS "uuid", + "P"."ID" AS "projectId", + "P"."UUID" AS "projectUuid" + FROM + "COMPONENT" AS "C" + INNER JOIN + "PROJECT" AS "P" ON "P"."ID" = "C"."PROJECT_ID" + WHERE + "C"."UUID" = (:uuid)::TEXT + """) + @RegisterConstructorMapper(Component.class) + Component getComponentByUuid(final UUID uuid); + + @SqlBatch(""" + INSERT INTO "COMPONENTS_VULNERABILITIES" + ("COMPONENT_ID", "VULNERABILITY_ID") + VALUES + (:component.id, :vuln.id) + ON CONFLICT DO NOTHING + RETURNING "VULNERABILITY_ID" + """) + @GetGeneratedKeys("VULNERABILITY_ID") + List createFindings(@BindMethods("component") final Component component, @BindBean("vuln") final Iterable vuln); + + @SqlBatch(""" + INSERT INTO "FINDINGATTRIBUTION" + ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "ANALYZERIDENTITY", "ATTRIBUTED_ON", "UUID") + VALUES + (:vulnId, :componentId, :projectId, :analyzer, NOW(), (:uuid)::TEXT) + ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID") DO NOTHING + """) + void createFindingAttributions(@BindMethods final Iterable attribution); + + @SqlQuery(""" + SELECT + "V"."ID" AS "vulnId", + "V"."UUID" AS "vulnUuid", + "A"."ID" AS "id", + "A"."COMPONENT_ID" AS "componentId", + "A"."PROJECT_ID" AS "projectId", + "A"."STATE" AS "state", + "A"."JUSTIFICATION" AS "justification", + "A"."RESPONSE" AS "response", + "A"."DETAILS" AS "details", + "A"."SUPPRESSED" AS "suppressed", + "A"."SEVERITY" AS "severity", + "A"."CVSSV2VECTOR" AS "cvssV2Vector", + "A"."CVSSV2SCORE" AS "cvssV2Score", + "A"."CVSSV3VECTOR" AS "cvssV3Vector", + "A"."CVSSV3SCORE" AS "cvssV3Score", + "A"."OWASPVECTOR" AS "owaspVector", + "A"."OWASPSCORE" AS "owaspScore", + "VP"."NAME" AS "vulnPolicyName" + FROM + "VULNERABILITY" AS "V" + INNER JOIN + "ANALYSIS" AS "A" ON "A"."VULNERABILITY_ID" = "V"."ID" + LEFT JOIN + "VULNERABILITY_POLICY" AS "VP" ON "VP"."ID" = "A"."VULNERABILITY_POLICY_ID" + WHERE + "A"."COMPONENT_ID" = :component.id + AND "V"."UUID" = ANY((:vulnUuids)::TEXT[]) + """) + @RegisterBeanMapper(Analysis.class) + List getAnalyses(@BindMethods("component") final Component component, final Iterable vulnUuids); + + @SqlBatch(""" + WITH "CTE_VULN_POLICY" AS ( + SELECT + "ID" + FROM + "VULNERABILITY_POLICY" + WHERE + "NAME" = :vulnPolicyName + ) + INSERT INTO "ANALYSIS" + ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "STATE", "JUSTIFICATION", "RESPONSE", "DETAILS", + "SUPPRESSED", "SEVERITY", "CVSSV2VECTOR", "CVSSV2SCORE", "CVSSV3VECTOR", "CVSSV3SCORE", "OWASPVECTOR", + "OWASPSCORE", "VULNERABILITY_POLICY_ID") + VALUES + (:vulnId, :componentId, :projectId, :state, :justification, :response, :details, :suppressed, + :severity, :cvssV2Vector, :cvssV2Score, :cvssV3Vector, :cvssV3Score, :owaspVector, :owaspScore, + (SELECT "ID" FROM "CTE_VULN_POLICY")) + ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID") DO UPDATE + SET + "STATE" = :state, + "JUSTIFICATION" = :justification, + "RESPONSE" = :response, + "DETAILS" = :details, + "SUPPRESSED" = :suppressed, + "SEVERITY" = :severity, + "CVSSV2VECTOR" = :cvssV2Vector, + "CVSSV2SCORE" = :cvssV2Score, + "CVSSV3VECTOR" = :cvssV3Vector, + "CVSSV3SCORE" = :cvssV3Score, + "OWASPVECTOR" = :owaspVector, + "OWASPSCORE" = :owaspScore, + "VULNERABILITY_POLICY_ID" = (SELECT "ID" FROM "CTE_VULN_POLICY") + RETURNING "ID", "VULNERABILITY_ID" + """) + @GetGeneratedKeys({"ID", "VULNERABILITY_ID"}) + @RegisterConstructorMapper(CreatedAnalysis.class) + List createOrUpdateAnalyses(@BindBean final Iterable analysis); + + @SqlBatch(""" + INSERT INTO "ANALYSISCOMMENT" + ("ANALYSIS_ID", "TIMESTAMP", "COMMENT", "COMMENTER") + VALUES + (:analysisId, NOW(), :comment, :commenter) + """) + void createAnalysisComments(@BindMethods final Iterable comment); + + } + + public static class Analysis { + + private long id; + private long componentId; + private long projectId; + private long vulnId; + private UUID vulnUuid; + private String vulnPolicyName; + private AnalysisState state; + private AnalysisJustification justification; + private AnalysisResponse response; + private String details; + private Boolean suppressed; + private Severity severity; + private String cvssV2Vector; + private Double cvssV2Score; + private String cvssV3Vector; + private Double cvssV3Score; + private String owaspVector; + private Double owaspScore; + + private static Analysis fromPolicy(final VulnerabilityPolicy policy) { + final var analysis = new Analysis(); + if (policy == null) { + analysis.setState(AnalysisState.NOT_SET); + analysis.setSuppressed(false); + return analysis; + } + + analysis.setVulnPolicyName(policy.getName()); + if (policy.getAnalysis().getState() != null) { + analysis.setState(switch (policy.getAnalysis().getState()) { + case EXPLOITABLE -> AnalysisState.EXPLOITABLE; + case FALSE_POSITIVE -> AnalysisState.FALSE_POSITIVE; + case IN_TRIAGE -> AnalysisState.IN_TRIAGE; + case NOT_AFFECTED -> AnalysisState.NOT_AFFECTED; + case RESOLVED -> AnalysisState.RESOLVED; + }); + } else { + throw new IllegalArgumentException("Analysis of policy does not define a state"); + } + if (policy.getAnalysis().getJustification() != null) { + analysis.setJustification(switch (policy.getAnalysis().getJustification()) { + case CODE_NOT_PRESENT -> AnalysisJustification.CODE_NOT_PRESENT; + case CODE_NOT_REACHABLE -> AnalysisJustification.CODE_NOT_REACHABLE; + case PROTECTED_AT_PERIMETER -> AnalysisJustification.PROTECTED_AT_PERIMETER; + case PROTECTED_AT_RUNTIME -> AnalysisJustification.PROTECTED_AT_RUNTIME; + case PROTECTED_BY_COMPILER -> AnalysisJustification.PROTECTED_BY_COMPILER; + case PROTECTED_BY_MITIGATING_CONTROL -> AnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL; + case REQUIRES_CONFIGURATION -> AnalysisJustification.REQUIRES_CONFIGURATION; + case REQUIRES_DEPENDENCY -> AnalysisJustification.REQUIRES_DEPENDENCY; + case REQUIRES_ENVIRONMENT -> AnalysisJustification.REQUIRES_ENVIRONMENT; + }); + } + if (policy.getAnalysis().getVendorResponse() != null) { + analysis.setResponse(switch (policy.getAnalysis().getVendorResponse()) { + case CAN_NOT_FIX -> AnalysisResponse.CAN_NOT_FIX; + case ROLLBACK -> AnalysisResponse.ROLLBACK; + case UPDATE -> AnalysisResponse.UPDATE; + case WILL_NOT_FIX -> AnalysisResponse.WILL_NOT_FIX; + case WORKAROUND_AVAILABLE -> AnalysisResponse.WORKAROUND_AVAILABLE; + }); + } + if (policy.getAnalysis().getDetails() != null) { + analysis.setDetails(policy.getAnalysis().getDetails()); + } + analysis.setSuppressed(policy.getAnalysis().isSuppress()); + + if (policy.getRatings() != null && !policy.getRatings().isEmpty()) { + if (policy.getRatings().size() > 3) { + throw new IllegalArgumentException("Policy defines more than three ratings"); + } + + final var methodsSeen = new HashSet(); + for (final VulnerabilityPolicyRating policyRating : policy.getRatings()) { + if (policyRating.getMethod() == null) { + throw new IllegalArgumentException("Rating #%d does not define a method" + .formatted(policy.getRatings().indexOf(policyRating))); + } + if (!methodsSeen.add(policyRating.getMethod())) { + throw new IllegalArgumentException("Rating method %s is defined more than once" + .formatted(policyRating.getMethod())); + } + if (policyRating.getSeverity() == null) { + throw new IllegalArgumentException("Rating #%d (%s) does not define a severity" + .formatted(policy.getRatings().indexOf(policyRating), policyRating.getMethod())); + } + + analysis.setSeverity(switch (policyRating.getSeverity()) { + case INFO -> Severity.INFO; + case LOW -> Severity.LOW; + case MEDIUM -> Severity.MEDIUM; + case HIGH -> Severity.HIGH; + case CRITICAL -> Severity.CRITICAL; + }); + switch (policyRating.getMethod()) { + case CVSSV2 -> { + analysis.setCvssV2Vector(policyRating.getVector()); + analysis.setCvssV2Score(policyRating.getScore()); + } + case CVSSV3 -> { + analysis.setCvssV3Vector(policyRating.getVector()); + analysis.setCvssV3Score(policyRating.getScore()); + } + case OWASP -> { + analysis.setOwaspVector(policyRating.getVector()); + analysis.setOwaspScore(policyRating.getScore()); + } + } + } + } + + return analysis; + } + + public long getId() { + return id; + } + + public void setId(final long id) { + this.id = id; + } + + public long getComponentId() { + return componentId; + } + + public void setComponentId(final long componentId) { + this.componentId = componentId; + } + + public long getProjectId() { + return projectId; + } + + public void setProjectId(final long projectId) { + this.projectId = projectId; + } + + public long getVulnId() { + return vulnId; + } + + public void setVulnId(final long vulnId) { + this.vulnId = vulnId; + } + + public UUID getVulnUuid() { + return vulnUuid; + } + + public void setVulnUuid(final UUID vulnUuid) { + this.vulnUuid = vulnUuid; + } + + public String getVulnPolicyName() { + return vulnPolicyName; + } + + public void setVulnPolicyName(final String vulnPolicyName) { + this.vulnPolicyName = vulnPolicyName; + } + + public AnalysisState getState() { + return state; + } + + public void setState(final AnalysisState state) { + this.state = state; + } + + public AnalysisJustification getJustification() { + return justification; + } + + public void setJustification(final AnalysisJustification justification) { + this.justification = justification; + } + + public AnalysisResponse getResponse() { + return response; + } + + public void setResponse(final AnalysisResponse response) { + this.response = response; + } + + public String getDetails() { + return details; + } + + public void setDetails(final String details) { + this.details = details; + } + + public Boolean getSuppressed() { + return suppressed; + } + + public void setSuppressed(final Boolean suppressed) { + this.suppressed = suppressed; + } + + public Severity getSeverity() { + return severity; + } + + public void setSeverity(final Severity severity) { + this.severity = severity; + } + + public String getCvssV2Vector() { + return cvssV2Vector; + } + + public void setCvssV2Vector(final String cvssV2Vector) { + this.cvssV2Vector = cvssV2Vector; + } + + public Double getCvssV2Score() { + return cvssV2Score; + } + + public void setCvssV2Score(final Double cvssV2Score) { + this.cvssV2Score = cvssV2Score; + } + + public String getCvssV3Vector() { + return cvssV3Vector; + } + + public void setCvssV3Vector(final String cvssV3Vector) { + this.cvssV3Vector = cvssV3Vector; + } + + public Double getCvssV3Score() { + return cvssV3Score; + } + + public void setCvssV3Score(final Double cvssV3Score) { + this.cvssV3Score = cvssV3Score; + } + + public String getOwaspVector() { + return owaspVector; + } + + public void setOwaspVector(final String owaspVector) { + this.owaspVector = owaspVector; + } + + public Double getOwaspScore() { + return owaspScore; + } + + public void setOwaspScore(final Double owaspScore) { + this.owaspScore = owaspScore; + } + + } + + public record CreatedAnalysis(long id, @ColumnName("VULNERABILITY_ID") long vulnId) { + } + + public record AnalysisComment(Long analysisId, String comment, String commenter) { + } + + private static final class AnalysisCommentFactory { + + private final Long analysisId; + private final VulnerabilityPolicy policy; + private final String commenter; + private final List comments; + + private AnalysisCommentFactory(final Long analysisId, VulnerabilityPolicy policy) { + this.analysisId = analysisId; + this.policy = policy; + this.commenter = createCommenter(policy); + this.comments = new ArrayList<>(); + } + + private void createComment(final String comment) { + comments.add(new AnalysisComment(this.analysisId, comment, this.commenter)); + } + + private List getComments() { + if (comments.isEmpty()) { + return comments; + } + + // If we have comments already, additionally include what the policy matched on. + // Include this as the very first comment, and do not modify the original list. + final var commentsCopy = new ArrayList(); + if (policy == null) { + commentsCopy.add(new AnalysisComment(this.analysisId, "No longer covered by any policy", this.commenter)); + } else { + commentsCopy.add(new AnalysisComment(this.analysisId, "Matched on condition(s):\n%s" + .formatted(policy.getConditions().stream().map("- %s"::formatted).collect(Collectors.joining("\n"))), this.commenter)); + } + commentsCopy.addAll(comments); + return commentsCopy; + } + + private static String createCommenter(final VulnerabilityPolicy policy) { + if (policy == null) { + return "[Policy{None}]"; + } + + if (isNotBlank(policy.getAuthor())) { + return "[Policy{Name=%s, Author=%s}]".formatted(policy.getName(), policy.getAuthor()); + } + + return "[Policy{Name=%s}]".formatted(policy.getName()); + } + + } + + public record Component(long id, UUID uuid, long projectId, UUID projectUuid) { + } + + public record FindingAttribution(long vulnId, long componentId, long projectId, String analyzer, UUID uuid) { + } + +} diff --git a/src/main/java/org/dependencytrack/model/WorkflowState.java b/src/main/java/org/dependencytrack/model/WorkflowState.java index 2b323f3e6..99a362934 100644 --- a/src/main/java/org/dependencytrack/model/WorkflowState.java +++ b/src/main/java/org/dependencytrack/model/WorkflowState.java @@ -48,6 +48,8 @@ public class WorkflowState implements Serializable { @Column(name = "PARENT_STEP_ID" , allowsNull = "true") private WorkflowState parent; + private transient long parentId; + @Persistent @Column(name = "TOKEN", jdbcType = "VARCHAR", length = 36, allowsNull = "false") @NotNull @@ -93,6 +95,14 @@ public void setParent(WorkflowState parent) { this.parent = parent; } + public long getParentId() { + return parentId; + } + + public void setParentId(long parentId) { + this.parentId = parentId; + } + public UUID getToken() { return token; } diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java new file mode 100644 index 000000000..a2b5d2004 --- /dev/null +++ b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java @@ -0,0 +1,61 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.persistence.jdbi; + +import org.dependencytrack.model.VulnerabilityScan; +import org.jdbi.v3.sqlobject.SqlObject; +import org.jdbi.v3.sqlobject.config.RegisterBeanMapper; +import org.jdbi.v3.sqlobject.customizer.Bind; +import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys; +import org.jdbi.v3.sqlobject.statement.SqlBatch; + +import java.util.List; + +public interface VulnerabilityScanDao extends SqlObject { + + @SqlBatch(""" + UPDATE "VULNERABILITYSCAN" + SET "RECEIVED_RESULTS" = "RECEIVED_RESULTS" + :resultsTotal + , "SCAN_TOTAL" = "SCAN_TOTAL" + :scannerResultsTotal + , "SCAN_FAILED" = "SCAN_FAILED" + :scannerResultsFailed + , "STATUS" = CASE WHEN "EXPECTED_RESULTS" = ("RECEIVED_RESULTS" + :resultsTotal) + THEN CASE WHEN (("SCAN_FAILED" + :scannerResultsFailed) / ("SCAN_TOTAL" + :scannerResultsTotal)) > "FAILURE_THRESHOLD" + THEN 'FAILED' + ELSE 'COMPLETED' + END + ELSE 'IN_PROGRESS' + END + , "UPDATED_AT" = NOW() + WHERE "TOKEN" = :token + RETURNING "TOKEN" + , "STATUS" + , "TARGET_TYPE" + , "TARGET_IDENTIFIER" + , CASE WHEN "STATUS" = 'FAILED' + THEN '' + END + """) + @RegisterBeanMapper(VulnerabilityScan.class) + @GetGeneratedKeys({"TOKEN", "STATUS", "TARGET_TYPE", "TARGET_IDENTIFIER"}) + List updateAll(@Bind("token") List tokens, + @Bind List resultsTotal, + @Bind List scannerResultsTotal, + @Bind List scannerResultsFailed); + +} diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java new file mode 100644 index 000000000..05f01ca00 --- /dev/null +++ b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java @@ -0,0 +1,81 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.persistence.jdbi; + +import org.dependencytrack.model.WorkflowState; +import org.dependencytrack.model.WorkflowStatus; +import org.dependencytrack.model.WorkflowStep; +import org.jdbi.v3.sqlobject.config.RegisterBeanMapper; +import org.jdbi.v3.sqlobject.customizer.Bind; +import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys; +import org.jdbi.v3.sqlobject.statement.SqlBatch; + +import java.util.List; + +public interface WorkflowDao { + + @SqlBatch(""" + UPDATE "WORKFLOW_STATE" + SET "STATUS" = :status + , "FAILURE_REASON" = :failureReason + , "UPDATED_AT" = NOW() + WHERE "TOKEN" = :token + AND "STEP" = :step + RETURNING "ID" + , "PARENT_STEP_ID" + , "TOKEN" + , "STEP" + , "STATUS" + , "FAILURE_REASON" + , "STARTED_AT" + , "UPDATED_AT" + """) + @GetGeneratedKeys({"ID", "PARENT_STEP_ID", "TOKEN", "STEP", "STATUS", "FAILURE_REASON", "STARTED_AT", "UPDATED_AT"}) + @RegisterBeanMapper(WorkflowState.class) + List updateAllSteps(@Bind WorkflowStep step, + @Bind("token") List tokens, + @Bind("status") List statuses, + @Bind("failureReason") List failureReasons); + + @SqlBatch(""" + WITH RECURSIVE + "CTE_PARENT" ("ID") AS ( + SELECT "ID" + FROM "WORKFLOW_STATE" + WHERE "STEP" = :step + AND "TOKEN" = :token + ), + "CTE_CHILDREN" ("ID") AS ( + SELECT "ID" + FROM "WORKFLOW_STATE" + WHERE "PARENT_STEP_ID" = (SELECT "ID" FROM "CTE_PARENT") + UNION ALL + SELECT "CHILD"."ID" + FROM "WORKFLOW_STATE" AS "CHILD" + INNER JOIN "CTE_CHILDREN" AS "PARENT" + ON "PARENT"."ID" = "CHILD"."PARENT_STEP_ID" + ) + UPDATE "WORKFLOW_STATE" + SET "STATUS" = 'CANCELLED' + , "UPDATED_AT" = NOW() + WHERE "ID" IN (SELECT "ID" FROM "CTE_CHILDREN") + """) + void cancelAllChildren(@Bind WorkflowStep step, @Bind("token") List tokens); + +} diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d9e2b1e70..0a815daf9 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -516,6 +516,29 @@ alpine.kafka.processor.repo.meta.analysis.result.retry.max.delay.ms=180000 alpine.kafka.processor.repo.meta.analysis.result.consumer.group.id=dtrack-apiserver-processor alpine.kafka.processor.repo.meta.analysis.result.consumer.auto.offset.reset=earliest +# Required +alpine.kafka.processor.vuln.analysis.result.max.concurrency=-1 +alpine.kafka.processor.vuln.analysis.result.processing.order=key +alpine.kafka.processor.vuln.analysis.result.retry.initial.delay.ms=1000 +alpine.kafka.processor.vuln.analysis.result.retry.multiplier=2 +alpine.kafka.processor.vuln.analysis.result.retry.randomization.factor=0.3 +alpine.kafka.processor.vuln.analysis.result.retry.max.delay.ms=180000 +alpine.kafka.processor.vuln.analysis.result.consumer.group.id=dtrack-apiserver-processor +alpine.kafka.processor.vuln.analysis.result.consumer.auto.offset.reset=earliest + +# Required +alpine.kafka.processor.processed.vuln.scan.result.max.batch.size=500 +alpine.kafka.processor.processed.vuln.scan.result.max.concurrency=1 +alpine.kafka.processor.processed.vuln.scan.result.processing.order=unordered +alpine.kafka.processor.processed.vuln.scan.result.retry.initial.delay.ms=3000 +alpine.kafka.processor.processed.vuln.scan.result.retry.multiplier=2 +alpine.kafka.processor.processed.vuln.scan.result.retry.randomization.factor=0.3 +alpine.kafka.processor.processed.vuln.scan.result.retry.max.delay.ms=180000 +alpine.kafka.processor.processed.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor +alpine.kafka.processor.processed.vuln.scan.result.consumer.auto.offset.reset=earliest +alpine.kafka.processor.processed.vuln.scan.result.consumer.max.poll.records=1000 +alpine.kafka.processor.processed.vuln.scan.result.consumer.fetch.min.bytes=16384 + # Scheduling tasks after 3 minutes (3*60*1000) of starting application task.scheduler.initial.delay=180000 diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index cc313f433..7a44b7606 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -50,9 +50,9 @@ org.dependencytrack.event.kafka.processor.ProcessorInitializer - + org.dependencytrack.event.PurlMigrator From 6a193e4b0f8e5008f1925ee1d7a1c3a8714fd1dc Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 27 Mar 2024 14:14:49 +0100 Subject: [PATCH 02/24] Add dispatch `ProjectVulnAnalysisComplete` notifications Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 79 ++++++++-- .../jdbi/NotificationSubjectDao.java | 145 +++++++++++++++++- src/main/resources/application.properties | 6 +- 3 files changed, 213 insertions(+), 17 deletions(-) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index 2874be511..c8172dde8 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -20,19 +20,27 @@ import alpine.common.logging.Logger; import alpine.event.framework.Event; +import com.google.protobuf.Any; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.dependencytrack.event.ComponentMetricsUpdateEvent; import org.dependencytrack.event.ProjectMetricsUpdateEvent; import org.dependencytrack.event.ProjectPolicyEvaluationEvent; +import org.dependencytrack.event.kafka.KafkaEvent; +import org.dependencytrack.event.kafka.KafkaEventConverter; +import org.dependencytrack.event.kafka.KafkaEventDispatcher; import org.dependencytrack.event.kafka.processor.api.BatchProcessor; import org.dependencytrack.event.kafka.processor.exception.ProcessingException; import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.model.WorkflowState; import org.dependencytrack.model.WorkflowStatus; import org.dependencytrack.model.WorkflowStep; +import org.dependencytrack.notification.NotificationConstants; import org.dependencytrack.persistence.QueryManager; +import org.dependencytrack.persistence.jdbi.NotificationSubjectDao; import org.dependencytrack.persistence.jdbi.VulnerabilityScanDao; import org.dependencytrack.persistence.jdbi.WorkflowDao; +import org.dependencytrack.proto.notification.v1.Notification; +import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus; import org.jdbi.v3.core.Handle; @@ -41,23 +49,43 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.UUID; import static java.lang.Math.toIntExact; import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; - +import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; +import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; +import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; + +/** + * A {@link BatchProcessor} that records successfully processed {@link ScanResult}s for their + * corresponding {@link VulnerabilityScan}, and triggers follow-up processes in case a scan + * is complete (either {@link VulnerabilityScan.Status#COMPLETED} or {@link VulnerabilityScan.Status#FAILED}). + */ public class ProcessedVulnerabilityScanResultProcessor implements BatchProcessor { static final String PROCESSOR_NAME = "processed.vuln.scan.result"; private static final Logger LOGGER = Logger.getLogger(ProcessedVulnerabilityScanResultProcessor.class); + private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher(); + @Override public void process(final List> records) throws ProcessingException { LOGGER.debug("Processing %d records".formatted(records.size())); - final List completedVulnScans = processScanResults(records); - LOGGER.debug("Detected completion of %d vulnerability scans".formatted(completedVulnScans.size())); + final var completedVulnScans = new ArrayList(); + final var notifications = new ArrayList>(); + try (final var qm = new QueryManager()) { + jdbi(qm).useTransaction(jdbiHandle -> { + completedVulnScans.addAll(processScanResults(jdbiHandle, records)); + notifications.addAll(createVulnAnalysisCompleteNotifications(jdbiHandle, completedVulnScans)); + }); + } + + eventDispatcher.dispatchAll(notifications); + LOGGER.debug("Dispatched %d notifications".formatted(notifications.size())); for (final VulnerabilityScan completedVulnScan : completedVulnScans) { switch (completedVulnScan.getTargetType()) { @@ -81,15 +109,14 @@ public void process(final List> records) thro } } - private static List processScanResults(final List> records) { - try (final var qm = new QueryManager()) { - return jdbi(qm).inTransaction(jdbiHandle -> { - final List completedVulnScans = recordScanResults(jdbiHandle, records); - updateWorkflowSteps(jdbiHandle, completedVulnScans); + private static List processScanResults(final Handle jdbiHandle, final List> records) { + final List completedVulnScans = recordScanResults(jdbiHandle, records); + LOGGER.debug("Detected completion of %d vulnerability scans".formatted(completedVulnScans.size())); - return completedVulnScans; - }); - } + final List updatedWorkflowSteps = updateWorkflowSteps(jdbiHandle, completedVulnScans); + LOGGER.debug("Updated %s workflow steps".formatted(updatedWorkflowSteps.size())); + + return completedVulnScans; } private static List recordScanResults(final Handle jdbiHandle, final List> records) { @@ -124,7 +151,7 @@ private static List recordScanResults(final Handle jdbiHandle .toList(); } - private static void updateWorkflowSteps(final Handle jdbiHandle, final List completedVulnScans) { + private static List updateWorkflowSteps(final Handle jdbiHandle, final List completedVulnScans) { final int numScans = completedVulnScans.size(); final var tokens = new ArrayList(numScans); final var statuses = new ArrayList(numScans); @@ -153,6 +180,34 @@ private static void updateWorkflowSteps(final Handle jdbiHandle, final List> createVulnAnalysisCompleteNotifications(final Handle jdbiHandle, final List completedVulnScans) { + final var notificationSubjectDao = jdbiHandle.attach(NotificationSubjectDao.class); + + final var notifications = new ArrayList>(completedVulnScans.size()); + for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + final Optional optionalSubject = + notificationSubjectDao.getForProjectVulnAnalysisComplete(completedVulnScan); + if (optionalSubject.isEmpty()) { + // Project (no longer) exists. + continue; + } + + final var notification = Notification.newBuilder() + .setScope(SCOPE_PORTFOLIO) + .setGroup(GROUP_PROJECT_VULN_ANALYSIS_COMPLETE) + .setLevel(LEVEL_INFORMATIONAL) + .setTitle(NotificationConstants.Title.PROJECT_VULN_ANALYSIS_COMPLETE) + .setSubject(Any.pack(optionalSubject.get())) + .build(); + + notifications.add(KafkaEventConverter.convert(notification)); + } + + return notifications; } private static class Aggregate { diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java index a3d84196b..5c825acfa 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java @@ -20,31 +20,45 @@ import org.dependencytrack.model.AnalysisState; import org.dependencytrack.model.VulnerabilityAnalysisLevel; +import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.persistence.jdbi.mapping.NotificationComponentRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationProjectRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerabilityRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerableDependencyRowReducer; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectProjectAuditChangeRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationVulnerabilityRowMapper; +import org.dependencytrack.proto.notification.v1.Component; +import org.dependencytrack.proto.notification.v1.ComponentVulnAnalysisCompleteSubject; import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; import org.dependencytrack.proto.notification.v1.NewVulnerableDependencySubject; +import org.dependencytrack.proto.notification.v1.Project; +import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; +import org.dependencytrack.proto.notification.v1.Vulnerability; import org.dependencytrack.proto.notification.v1.VulnerabilityAnalysisDecisionChangeSubject; +import org.jdbi.v3.core.mapper.JoinRowMapper; +import org.jdbi.v3.sqlobject.SqlObject; import org.jdbi.v3.sqlobject.config.RegisterRowMapper; import org.jdbi.v3.sqlobject.config.RegisterRowMappers; import org.jdbi.v3.sqlobject.statement.SqlQuery; import org.jdbi.v3.sqlobject.statement.UseRowReducer; +import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.UUID; +import java.util.stream.Collectors; + +import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; +import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_FAILED; @RegisterRowMappers({ @RegisterRowMapper(NotificationComponentRowMapper.class), @RegisterRowMapper(NotificationProjectRowMapper.class), @RegisterRowMapper(NotificationVulnerabilityRowMapper.class) }) -public interface NotificationSubjectDao { +public interface NotificationSubjectDao extends SqlObject { @SqlQuery(""" SELECT @@ -360,6 +374,133 @@ LEFT JOIN LATERAL ( "C"."UUID" = (:componentUuid)::TEXT AND "V"."UUID" = (:vulnUuid)::TEXT """) @RegisterRowMapper(NotificationSubjectProjectAuditChangeRowMapper.class) - Optional getForProjectAuditChange(final UUID componentUuid, final UUID vulnUuid, AnalysisState analysisState, boolean isSuppressed); + + @SqlQuery(""" + SELECT "P"."UUID" AS "projectUuid" + , "P"."NAME" AS "projectName" + , "P"."VERSION" AS "projectVersion" + , "P"."DESCRIPTION" AS "projectDescription" + , "P"."PURL" AS "projectPurl" + , (SELECT ARRAY_AGG(DISTINCT "T"."NAME") + FROM "TAG" AS "T" + INNER JOIN "PROJECTS_TAGS" AS "PT" + ON "PT"."TAG_ID" = "T"."ID" + WHERE "PT"."PROJECT_ID" = "P"."ID" + ) AS "projectTags" + FROM "PROJECT" AS "P" + WHERE "P"."UUID" = (:projectUuid)::TEXT + """) + Optional getProject(UUID projectUuid); + + default Optional getForProjectVulnAnalysisComplete(VulnerabilityScan vulnScan) { + final Optional optionalProject = getProject(vulnScan.getTargetIdentifier()); + if (optionalProject.isEmpty()) { + return Optional.empty(); + } + + final Map> vulnsByComponent = getHandle().createQuery(""" + WITH "CTE_PROJECT" AS (SELECT "ID" FROM "PROJECT" WHERE "UUID" = :projectUuid) + SELECT "C"."UUID" AS "componentUuid" + , "C"."GROUP" AS "componentGroup" + , "C"."NAME" AS "componentName" + , "C"."VERSION" AS "componentVersion" + , "C"."PURL" AS "componentPurl" + , "C"."MD5" AS "componentMd5" + , "C"."SHA1" AS "componentSha1" + , "C"."SHA_256" AS "componentSha256" + , "C"."SHA_512" AS "componentSha512" + , "V"."UUID" AS "vulnUuid" + , "V"."VULNID" AS "vulnId" + , "V"."SOURCE" AS "vulnSource" + , "V"."TITLE" AS "vulnTitle" + , "V"."SUBTITLE" AS "vulnSubTitle" + , "V"."DESCRIPTION" AS "vulnDescription" + , "V"."RECOMMENDATION" AS "vulnRecommendation" + , CASE WHEN "A"."SEVERITY" IS NOT NULL + THEN "A"."CVSSV2SCORE" + ELSE "V"."CVSSV2BASESCORE" + END AS "vulnCvssV2BaseScore" + , CASE WHEN "A"."SEVERITY" IS NOT NULL + THEN "A"."CVSSV3SCORE" + ELSE "V"."CVSSV3BASESCORE" + END AS "vulnCvssV3BaseScore" + -- TODO: Analysis only has a single score, but OWASP RR defines multiple. + -- How to handle this? + , CASE WHEN "A"."SEVERITY" IS NOT NULL + THEN "A"."OWASPSCORE" + ELSE "V"."OWASPRRBUSINESSIMPACTSCORE" + END AS "vulnOwaspRrBusinessImpactScore" + , CASE WHEN "A"."SEVERITY" IS NOT NULL + THEN "A"."OWASPSCORE" + ELSE "V"."OWASPRRLIKELIHOODSCORE" + END AS "vulnOwaspRrLikelihoodScore" + , CASE WHEN "A"."SEVERITY" IS NOT NULL + THEN "A"."OWASPSCORE" + ELSE "V"."OWASPRRTECHNICALIMPACTSCORE" + END AS "vulnOwaspRrTechnicalImpactScore" + , "CALC_SEVERITY"("V"."SEVERITY", "A"."SEVERITY", "V"."CVSSV3BASESCORE", "V"."CVSSV2BASESCORE") AS "vulnSeverity" + , STRING_TO_ARRAY("V"."CWES", ',') AS "vulnCwes" + , "vulnAliasesJson" + FROM "COMPONENT" AS "C" + INNER JOIN "COMPONENTS_VULNERABILITIES" AS "CV" ON "CV"."COMPONENT_ID" = "C"."ID" + INNER JOIN "VULNERABILITY" AS "V" ON "V"."ID" = "CV"."VULNERABILITY_ID" + LEFT JOIN "ANALYSIS" AS "A" ON "A"."COMPONENT_ID" = "C"."ID" AND "A"."VULNERABILITY_ID" = "V"."ID" + LEFT JOIN LATERAL ( + SELECT CAST(JSONB_AGG(DISTINCT JSONB_STRIP_NULLS(JSONB_BUILD_OBJECT( + 'cveId', "VA"."CVE_ID", + 'ghsaId', "VA"."GHSA_ID", + 'gsdId', "VA"."GSD_ID", + 'internalId', "VA"."INTERNAL_ID", + 'osvId', "VA"."OSV_ID", + 'sonatypeId', "VA"."SONATYPE_ID", + 'snykId', "VA"."SNYK_ID", + 'vulnDbId', "VA"."VULNDB_ID" + ))) AS TEXT) AS "vulnAliasesJson" + FROM "VULNERABILITYALIAS" AS "VA" + WHERE ("V"."SOURCE" = 'NVD' AND "VA"."CVE_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'GITHUB' AND "VA"."GHSA_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'GSD' AND "VA"."GSD_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'INTERNAL' AND "VA"."INTERNAL_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'OSV' AND "VA"."OSV_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'SONATYPE' AND "VA"."SONATYPE_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'SNYK' AND "VA"."SNYK_ID" = "V"."VULNID") + OR ("V"."SOURCE" = 'VULNDB' AND "VA"."VULNDB_ID" = "V"."VULNID") + ) AS "vulnAliases" ON TRUE + WHERE "C"."PROJECT_ID" = (SELECT "ID" FROM "CTE_PROJECT") + AND ("A"."SUPPRESSED" IS NULL OR NOT "A"."SUPPRESSED") + """) + .bind("projectUuid", optionalProject.get().getUuid()) + .registerRowMapper(Component.class, new NotificationComponentRowMapper()) + .registerRowMapper(Vulnerability.class, new NotificationVulnerabilityRowMapper()) + .map(JoinRowMapper.forTypes(Component.class, Vulnerability.class)) + .stream() + .collect(Collectors.groupingBy( + joinRow -> joinRow.get(Component.class), + Collectors.mapping(joinRow -> joinRow.get(Vulnerability.class), Collectors.toList()) + )); + + final var findings = new ArrayList(vulnsByComponent.size()); + for (final Map.Entry> entry : vulnsByComponent.entrySet()) { + findings.add(ComponentVulnAnalysisCompleteSubject.newBuilder() + .setComponent(entry.getKey()) + .addAllVulnerabilities(entry.getValue()) + .build()); + } + + final var subject = ProjectVulnAnalysisCompleteSubject.newBuilder() + .setToken(vulnScan.getToken()) + .setStatus(switch (vulnScan.getStatus()) { + case COMPLETED -> PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; + case FAILED -> PROJECT_VULN_ANALYSIS_STATUS_FAILED; + default -> throw new IllegalArgumentException(""" + Unexpected vulnerability scan status: %s""".formatted(vulnScan.getStatus())); + }) + .setProject(optionalProject.get()) + .addAllFindings(findings) + .build(); + + return Optional.of(subject); + } + } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 0a815daf9..834cb62d0 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -527,7 +527,7 @@ alpine.kafka.processor.vuln.analysis.result.consumer.group.id=dtrack-apiserver-p alpine.kafka.processor.vuln.analysis.result.consumer.auto.offset.reset=earliest # Required -alpine.kafka.processor.processed.vuln.scan.result.max.batch.size=500 +alpine.kafka.processor.processed.vuln.scan.result.max.batch.size=1000 alpine.kafka.processor.processed.vuln.scan.result.max.concurrency=1 alpine.kafka.processor.processed.vuln.scan.result.processing.order=unordered alpine.kafka.processor.processed.vuln.scan.result.retry.initial.delay.ms=3000 @@ -536,8 +536,8 @@ alpine.kafka.processor.processed.vuln.scan.result.retry.randomization.factor=0.3 alpine.kafka.processor.processed.vuln.scan.result.retry.max.delay.ms=180000 alpine.kafka.processor.processed.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor alpine.kafka.processor.processed.vuln.scan.result.consumer.auto.offset.reset=earliest -alpine.kafka.processor.processed.vuln.scan.result.consumer.max.poll.records=1000 -alpine.kafka.processor.processed.vuln.scan.result.consumer.fetch.min.bytes=16384 +alpine.kafka.processor.processed.vuln.scan.result.consumer.max.poll.records=10000 +alpine.kafka.processor.processed.vuln.scan.result.consumer.fetch.min.bytes=524288 # Scheduling tasks after 3 minutes (3*60*1000) of starting application task.scheduler.initial.delay=180000 From 7ace14a56e08af100539d005a01c548f35d87986 Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 27 Mar 2024 14:15:02 +0100 Subject: [PATCH 03/24] Enable Kafka processor health check Signed-off-by: nscuro --- .../java/org/dependencytrack/health/HealthCheckInitializer.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java index 3ed349de6..e24561aef 100644 --- a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java +++ b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java @@ -24,6 +24,7 @@ import alpine.server.health.checks.DatabaseHealthCheck; import io.github.mweirauch.micrometer.jvm.extras.ProcessMemoryMetrics; import io.github.mweirauch.micrometer.jvm.extras.ProcessThreadMetrics; +import org.dependencytrack.event.kafka.processor.ProcessorsHealthCheck; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; @@ -37,6 +38,7 @@ public void contextInitialized(final ServletContextEvent event) { LOGGER.info("Registering health checks"); HealthCheckRegistry.getInstance().register("database", new DatabaseHealthCheck()); HealthCheckRegistry.getInstance().register("kafka-streams", new KafkaStreamsHealthCheck()); + HealthCheckRegistry.getInstance().register("kafka-processors", new ProcessorsHealthCheck()); // TODO: Move this to its own initializer if it turns out to be useful LOGGER.info("Registering extra process metrics"); From b3a9d40fe2b3623d97940ac170a82ce127bb650e Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 27 Mar 2024 14:15:33 +0100 Subject: [PATCH 04/24] Ensure JAR is accessible when `COPY`ing it into container Signed-off-by: nscuro --- src/main/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index ebb4d2a83..a55cbe209 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -69,7 +69,7 @@ RUN mkdir -p ${APP_DIR} ${DATA_DIR} \ COPY --from=jre-build /opt/java/openjdk $JAVA_HOME # Copy the compiled WAR to the application directory created above -COPY ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} +COPY --chmod=755 ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} # Specify the user to run as (in numeric format for compatibility with Kubernetes/OpenShift's SCC) USER ${UID} From a52d5571aa2866288c0ad38ec3b1e3d77a131916 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 12:38:44 +0100 Subject: [PATCH 05/24] Fix division by 0 error for `ScanResult`s without `ScannerResult`s Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 39 +++++---- .../jdbi/VulnerabilityScanDao.java | 2 +- .../persistence/jdbi/WorkflowDao.java | 19 ++-- ...dVulnerabilityScanResultProcessorTest.java | 87 +++++++++++++++++++ 4 files changed, 116 insertions(+), 31 deletions(-) create mode 100644 src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index c8172dde8..106fd48f5 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -19,6 +19,7 @@ package org.dependencytrack.event.kafka.processor; import alpine.common.logging.Logger; +import alpine.event.framework.ChainableEvent; import alpine.event.framework.Event; import com.google.protobuf.Any; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -88,24 +89,27 @@ public void process(final List> records) thro LOGGER.debug("Dispatched %d notifications".formatted(notifications.size())); for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + final ChainableEvent metricsUpdateEvent; + final ChainableEvent policyEvalEvent; + switch (completedVulnScan.getTargetType()) { case COMPONENT -> { LOGGER.debug("Triggering policy evaluation for component %s".formatted(completedVulnScan.getTargetIdentifier())); - final var metricsUpdateEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); - final var policyEvalEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); - policyEvalEvent.onFailure(metricsUpdateEvent); - policyEvalEvent.onSuccess(metricsUpdateEvent); - Event.dispatch(policyEvalEvent); + metricsUpdateEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + policyEvalEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); } case PROJECT -> { LOGGER.debug("Triggering policy evaluation for project %s".formatted(completedVulnScan.getTargetIdentifier())); - final var metricsUpdateEvent = new ProjectMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); - final var policyEvalEvent = new ProjectPolicyEvaluationEvent(completedVulnScan.getTargetIdentifier()); - policyEvalEvent.onFailure(metricsUpdateEvent); - policyEvalEvent.onSuccess(metricsUpdateEvent); - Event.dispatch(policyEvalEvent); + metricsUpdateEvent = new ProjectMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + policyEvalEvent = new ProjectPolicyEvaluationEvent(completedVulnScan.getTargetIdentifier()); } + default -> throw new IllegalStateException(""" + Unexpected vulnerability scan status %s""".formatted(completedVulnScan.getStatus())); } + + policyEvalEvent.onFailure(metricsUpdateEvent); + policyEvalEvent.onSuccess(metricsUpdateEvent); + Event.dispatch(policyEvalEvent); } } @@ -113,7 +117,7 @@ private static List processScanResults(final Handle jdbiHandl final List completedVulnScans = recordScanResults(jdbiHandle, records); LOGGER.debug("Detected completion of %d vulnerability scans".formatted(completedVulnScans.size())); - final List updatedWorkflowSteps = updateWorkflowSteps(jdbiHandle, completedVulnScans); + final List updatedWorkflowSteps = updateWorkflowStates(jdbiHandle, completedVulnScans); LOGGER.debug("Updated %s workflow steps".formatted(updatedWorkflowSteps.size())); return completedVulnScans; @@ -147,11 +151,12 @@ private static List recordScanResults(final Handle jdbiHandle // but that didn't return any results at all. // The good news is that the query typically modifies only a handful // of scans, so we're wasting not too many resources here. - .filter(vulnScan -> vulnScan.getStatus() == VulnerabilityScan.Status.COMPLETED || vulnScan.getStatus() == VulnerabilityScan.Status.FAILED) + .filter(vulnScan -> vulnScan.getStatus() == VulnerabilityScan.Status.COMPLETED + || vulnScan.getStatus() == VulnerabilityScan.Status.FAILED) .toList(); } - private static List updateWorkflowSteps(final Handle jdbiHandle, final List completedVulnScans) { + private static List updateWorkflowStates(final Handle jdbiHandle, final List completedVulnScans) { final int numScans = completedVulnScans.size(); final var tokens = new ArrayList(numScans); final var statuses = new ArrayList(numScans); @@ -168,10 +173,10 @@ private static List updateWorkflowSteps(final Handle jdbiHandle, } final var workflowDao = jdbiHandle.attach(WorkflowDao.class); - final List updatedWorkflowSteps = - workflowDao.updateAllSteps(WorkflowStep.VULN_ANALYSIS, tokens, statuses, failureReasons); + final List updatedWorkflowStates = + workflowDao.updateAllStates(WorkflowStep.VULN_ANALYSIS, tokens, statuses, failureReasons); - final List failedStepTokens = updatedWorkflowSteps.stream() + final List failedStepTokens = updatedWorkflowStates.stream() .filter(step -> step.getStatus() == WorkflowStatus.FAILED) .map(WorkflowState::getToken) .map(UUID::toString) @@ -181,7 +186,7 @@ private static List updateWorkflowSteps(final Handle jdbiHandle, workflowDao.cancelAllChildren(WorkflowStep.VULN_ANALYSIS, failedStepTokens); } - return updatedWorkflowSteps; + return updatedWorkflowStates; } private static List> createVulnAnalysisCompleteNotifications(final Handle jdbiHandle, final List completedVulnScans) { diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java index a2b5d2004..bd79f31be 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java @@ -35,7 +35,7 @@ public interface VulnerabilityScanDao extends SqlObject { , "SCAN_TOTAL" = "SCAN_TOTAL" + :scannerResultsTotal , "SCAN_FAILED" = "SCAN_FAILED" + :scannerResultsFailed , "STATUS" = CASE WHEN "EXPECTED_RESULTS" = ("RECEIVED_RESULTS" + :resultsTotal) - THEN CASE WHEN (("SCAN_FAILED" + :scannerResultsFailed) / ("SCAN_TOTAL" + :scannerResultsTotal)) > "FAILURE_THRESHOLD" + THEN CASE WHEN (("SCAN_FAILED" + :scannerResultsFailed) / NULLIF(("SCAN_TOTAL" + :scannerResultsTotal), 0)) > "FAILURE_THRESHOLD" THEN 'FAILED' ELSE 'COMPLETED' END diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java index 05f01ca00..20575a99c 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java @@ -37,21 +37,14 @@ public interface WorkflowDao { , "UPDATED_AT" = NOW() WHERE "TOKEN" = :token AND "STEP" = :step - RETURNING "ID" - , "PARENT_STEP_ID" - , "TOKEN" - , "STEP" - , "STATUS" - , "FAILURE_REASON" - , "STARTED_AT" - , "UPDATED_AT" + RETURNING * """) - @GetGeneratedKeys({"ID", "PARENT_STEP_ID", "TOKEN", "STEP", "STATUS", "FAILURE_REASON", "STARTED_AT", "UPDATED_AT"}) + @GetGeneratedKeys("*") @RegisterBeanMapper(WorkflowState.class) - List updateAllSteps(@Bind WorkflowStep step, - @Bind("token") List tokens, - @Bind("status") List statuses, - @Bind("failureReason") List failureReasons); + List updateAllStates(@Bind WorkflowStep step, + @Bind("token") List tokens, + @Bind("status") List statuses, + @Bind("failureReason") List failureReasons); @SqlBatch(""" WITH RECURSIVE diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java new file mode 100644 index 000000000..ed817e722 --- /dev/null +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -0,0 +1,87 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.event.kafka.processor; + +import org.dependencytrack.event.kafka.KafkaTopics; +import org.dependencytrack.model.Project; +import org.dependencytrack.model.VulnerabilityScan; +import org.dependencytrack.proto.notification.v1.Notification; +import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; +import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import org.junit.Test; + +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; +import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; +import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; +import static org.dependencytrack.util.KafkaTestUtil.deserializeKey; +import static org.dependencytrack.util.KafkaTestUtil.deserializeValue; + +public class ProcessedVulnerabilityScanResultProcessorTest extends AbstractProcessorTest { + + @Test + public void testProcessWithResultWithoutScannerResults() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(UUID.randomUUID().toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); + vulnScan.setTargetIdentifier(project.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(1); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + // Create a ScanResult without any ScannerResults attached to it. + // This might happen when no scanner is capable of scanning a component, + // or when all scanners are disabled. + final var scanResult = ScanResult.newBuilder().build(); + + final var processor = new ProcessedVulnerabilityScanResultProcessor(); + processor.process(List.of(aConsumerRecord(vulnScan.getToken(), scanResult).build())); + + qm.getPersistenceManager().refresh(vulnScan); + assertThat(vulnScan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); + + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()); + + final String recordKey = deserializeKey(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE, record); + assertThat(recordKey).isEqualTo(project.getUuid().toString()); + + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getGroup()).isEqualTo(GROUP_PROJECT_VULN_ANALYSIS_COMPLETE); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getSubject().is(ProjectVulnAnalysisCompleteSubject.class)).isTrue(); + + final var subject = notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); + assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); + assertThat(subject.getFindingsCount()).isZero(); + }); + } + +} \ No newline at end of file From 2b46d23cadaf35f2c0d70f31b8643a5fe98c07d9 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 13:22:08 +0100 Subject: [PATCH 06/24] Correctly propagate failure reason from `VulnerabilityScan` to `WorkflowState` Signed-off-by: nscuro --- .../jdbi/VulnerabilityScanDao.java | 9 +- ...dVulnerabilityScanResultProcessorTest.java | 121 +++++++++++++++++- 2 files changed, 125 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java index bd79f31be..c10f4d36d 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/VulnerabilityScanDao.java @@ -35,7 +35,7 @@ public interface VulnerabilityScanDao extends SqlObject { , "SCAN_TOTAL" = "SCAN_TOTAL" + :scannerResultsTotal , "SCAN_FAILED" = "SCAN_FAILED" + :scannerResultsFailed , "STATUS" = CASE WHEN "EXPECTED_RESULTS" = ("RECEIVED_RESULTS" + :resultsTotal) - THEN CASE WHEN (("SCAN_FAILED" + :scannerResultsFailed) / NULLIF(("SCAN_TOTAL" + :scannerResultsTotal), 0)) > "FAILURE_THRESHOLD" + THEN CASE WHEN (CAST("SCAN_FAILED" + :scannerResultsFailed AS DOUBLE PRECISION) / NULLIF("SCAN_TOTAL" + :scannerResultsTotal, 0)) > "FAILURE_THRESHOLD" THEN 'FAILED' ELSE 'COMPLETED' END @@ -48,11 +48,12 @@ THEN CASE WHEN (("SCAN_FAILED" + :scannerResultsFailed) / NULLIF(("SCAN_TOTAL" + , "TARGET_TYPE" , "TARGET_IDENTIFIER" , CASE WHEN "STATUS" = 'FAILED' - THEN '' - END + THEN 'Failure threshold of ' || ROUND("FAILURE_THRESHOLD"::NUMERIC, 2) * 100 || '% exceeded: ' + || "SCAN_FAILED" || '/' || "SCAN_TOTAL" || ' of scans failed' + END AS "FAILURE_REASON" """) @RegisterBeanMapper(VulnerabilityScan.class) - @GetGeneratedKeys({"TOKEN", "STATUS", "TARGET_TYPE", "TARGET_IDENTIFIER"}) + @GetGeneratedKeys({"TOKEN", "STATUS", "TARGET_TYPE", "TARGET_IDENTIFIER", "FAILURE_REASON"}) List updateAll(@Bind("token") List tokens, @Bind List resultsTotal, @Bind List scannerResultsTotal, diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java index ed817e722..b0f05ba07 100644 --- a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -21,11 +21,15 @@ import org.dependencytrack.event.kafka.KafkaTopics; import org.dependencytrack.model.Project; import org.dependencytrack.model.VulnerabilityScan; +import org.dependencytrack.model.WorkflowStatus; +import org.dependencytrack.model.WorkflowStep; import org.dependencytrack.proto.notification.v1.Notification; import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; import org.junit.Test; +import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; @@ -33,20 +37,114 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; +import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; +import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_FAILED; import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_SUCCESSFUL; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_OSSINDEX; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_SNYK; import static org.dependencytrack.util.KafkaTestUtil.deserializeKey; import static org.dependencytrack.util.KafkaTestUtil.deserializeValue; public class ProcessedVulnerabilityScanResultProcessorTest extends AbstractProcessorTest { + @Test + public void testProcessWithFailureThresholdExceeded() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final UUID workflowToken = UUID.randomUUID(); + qm.createWorkflowSteps(workflowToken); + + // Create a VulnerabilityScan, and configure it such that no more than 30% + // of scanners are allowed to fail in order for the scan to be considered successful. + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(workflowToken.toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); + vulnScan.setTargetIdentifier(project.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(3); + vulnScan.setFailureThreshold(0.3); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + // Create 3 ScanResults, each with 2 successful and 1 failed ScannerResult. + final var scanResults = new ArrayList(); + for (int i = 0; i < 3; i++) { + scanResults.add(ScanResult.newBuilder() + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL)) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_OSSINDEX) + .setStatus(SCAN_STATUS_SUCCESSFUL)) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_SNYK) + .setStatus(SCAN_STATUS_FAILED)) + .build()); + } + + final var processor = new ProcessedVulnerabilityScanResultProcessor(); + processor.process(scanResults.stream().map(result -> aConsumerRecord(vulnScan.getToken(), result).build()).toList()); + + qm.getPersistenceManager().refresh(vulnScan); + assertThat(vulnScan.getStatus()).isEqualTo(VulnerabilityScan.Status.FAILED); + + assertThat(qm.getAllWorkflowStatesForAToken(workflowToken)).satisfiesExactlyInAnyOrder( + workflowState -> assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.BOM_CONSUMPTION), + workflowState -> assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.BOM_PROCESSING), + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.VULN_ANALYSIS); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.FAILED); + assertThat(workflowState.getFailureReason()).isEqualTo("Failure threshold of 30.00% exceeded: 3/9 of scans failed"); + }, + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.POLICY_EVALUATION); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.CANCELLED); + assertThat(workflowState.getFailureReason()).isNull(); + }, + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.METRICS_UPDATE); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.CANCELLED); + assertThat(workflowState.getFailureReason()).isNull(); + } + ); + + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()); + + final String recordKey = deserializeKey(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE, record); + assertThat(recordKey).isEqualTo(project.getUuid().toString()); + + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getGroup()).isEqualTo(GROUP_PROJECT_VULN_ANALYSIS_COMPLETE); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getSubject().is(ProjectVulnAnalysisCompleteSubject.class)).isTrue(); + + final var subject = notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); + assertThat(subject.getToken()).isEqualTo(workflowToken.toString()); + assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_FAILED); + assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); + assertThat(subject.getFindingsCount()).isZero(); + }); + } + @Test public void testProcessWithResultWithoutScannerResults() throws Exception { final var project = new Project(); project.setName("acme-app"); qm.persist(project); + final UUID workflowToken = UUID.randomUUID(); + qm.createWorkflowSteps(workflowToken); + final var vulnScan = new VulnerabilityScan(); - vulnScan.setToken(UUID.randomUUID().toString()); + vulnScan.setToken(workflowToken.toString()); vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); vulnScan.setTargetIdentifier(project.getUuid()); vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); @@ -66,6 +164,26 @@ public void testProcessWithResultWithoutScannerResults() throws Exception { qm.getPersistenceManager().refresh(vulnScan); assertThat(vulnScan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); + assertThat(qm.getAllWorkflowStatesForAToken(workflowToken)).satisfiesExactlyInAnyOrder( + workflowState -> assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.BOM_CONSUMPTION), + workflowState -> assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.BOM_PROCESSING), + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.VULN_ANALYSIS); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.COMPLETED); + assertThat(workflowState.getFailureReason()).isNull(); + }, + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.POLICY_EVALUATION); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.PENDING); + assertThat(workflowState.getFailureReason()).isNull(); + }, + workflowState -> { + assertThat(workflowState.getStep()).isEqualTo(WorkflowStep.METRICS_UPDATE); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.PENDING); + assertThat(workflowState.getFailureReason()).isNull(); + } + ); + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()); @@ -79,6 +197,7 @@ public void testProcessWithResultWithoutScannerResults() throws Exception { assertThat(notification.getSubject().is(ProjectVulnAnalysisCompleteSubject.class)).isTrue(); final var subject = notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); + assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); assertThat(subject.getFindingsCount()).isZero(); }); From 507d97fc365e6e877a54289a2f44136c66e134f9 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 13:42:09 +0100 Subject: [PATCH 07/24] Assert dispatch of in-memory events upon vulnerability scan completion Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 7 +++ ...dVulnerabilityScanResultProcessorTest.java | 56 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index 106fd48f5..120ef7dd4 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -89,6 +89,10 @@ public void process(final List> records) thro LOGGER.debug("Dispatched %d notifications".formatted(notifications.size())); for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + if (completedVulnScan.getStatus() != VulnerabilityScan.Status.COMPLETED) { + continue; + } + final ChainableEvent metricsUpdateEvent; final ChainableEvent policyEvalEvent; @@ -107,6 +111,9 @@ public void process(final List> records) thro Unexpected vulnerability scan status %s""".formatted(completedVulnScan.getStatus())); } + final UUID workflowToken = UUID.fromString(completedVulnScan.getToken()); + metricsUpdateEvent.setChainIdentifier(workflowToken); + policyEvalEvent.setChainIdentifier(workflowToken); policyEvalEvent.onFailure(metricsUpdateEvent); policyEvalEvent.onSuccess(metricsUpdateEvent); Event.dispatch(policyEvalEvent); diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java index b0f05ba07..390423827 100644 --- a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -18,6 +18,13 @@ */ package org.dependencytrack.event.kafka.processor; +import alpine.event.framework.Event; +import alpine.event.framework.EventService; +import alpine.event.framework.Subscriber; +import org.dependencytrack.event.ComponentMetricsUpdateEvent; +import org.dependencytrack.event.ComponentPolicyEvaluationEvent; +import org.dependencytrack.event.ProjectMetricsUpdateEvent; +import org.dependencytrack.event.ProjectPolicyEvaluationEvent; import org.dependencytrack.event.kafka.KafkaTopics; import org.dependencytrack.model.Project; import org.dependencytrack.model.VulnerabilityScan; @@ -27,12 +34,15 @@ import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; +import org.junit.After; +import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; +import java.util.concurrent.ConcurrentLinkedQueue; import static org.assertj.core.api.Assertions.assertThat; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; @@ -50,6 +60,24 @@ public class ProcessedVulnerabilityScanResultProcessorTest extends AbstractProcessorTest { + @Before + public void before() throws Exception { + super.before(); + + EventService.getInstance().subscribe(ComponentMetricsUpdateEvent.class, EventSubscriber.class); + EventService.getInstance().subscribe(ProjectMetricsUpdateEvent.class, EventSubscriber.class); + EventService.getInstance().subscribe(ComponentPolicyEvaluationEvent.class, EventSubscriber.class); + EventService.getInstance().subscribe(ProjectPolicyEvaluationEvent.class, EventSubscriber.class); + } + + @After + public void after() { + EventService.getInstance().unsubscribe(EventSubscriber.class); + EVENTS.clear(); + + super.after(); + } + @Test public void testProcessWithFailureThresholdExceeded() throws Exception { final var project = new Project(); @@ -132,6 +160,8 @@ public void testProcessWithFailureThresholdExceeded() throws Exception { assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); assertThat(subject.getFindingsCount()).isZero(); }); + + assertThat(EVENTS).isEmpty(); } @Test @@ -201,6 +231,32 @@ public void testProcessWithResultWithoutScannerResults() throws Exception { assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); assertThat(subject.getFindingsCount()).isZero(); }); + + assertThat(EVENTS).satisfiesExactly( + event -> { + assertThat(event).isInstanceOf(ProjectPolicyEvaluationEvent.class); + final var policyEvalEvent = (ProjectPolicyEvaluationEvent) event; + assertThat(policyEvalEvent.getUuid()).isEqualTo(project.getUuid()); + assertThat(policyEvalEvent.getChainIdentifier()).isEqualTo(workflowToken); + }, + event -> { + assertThat(event).isInstanceOf(ProjectMetricsUpdateEvent.class); + final var metricsUpdateEvent = (ProjectMetricsUpdateEvent) event; + assertThat(metricsUpdateEvent.getUuid()).isEqualTo(project.getUuid()); + assertThat(metricsUpdateEvent.getChainIdentifier()).isEqualTo(workflowToken); + } + ); + } + + private static final ConcurrentLinkedQueue EVENTS = new ConcurrentLinkedQueue<>(); + + public static class EventSubscriber implements Subscriber { + + @Override + public void inform(final Event event) { + EVENTS.add(event); + } + } } \ No newline at end of file From a4d21c91adce43771f0f5015bb9c0c1dd579c277 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 13:46:29 +0100 Subject: [PATCH 08/24] Remove unused transient `WorkflowState#parentId` field Signed-off-by: nscuro --- .../java/org/dependencytrack/model/WorkflowState.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/main/java/org/dependencytrack/model/WorkflowState.java b/src/main/java/org/dependencytrack/model/WorkflowState.java index 99a362934..2b323f3e6 100644 --- a/src/main/java/org/dependencytrack/model/WorkflowState.java +++ b/src/main/java/org/dependencytrack/model/WorkflowState.java @@ -48,8 +48,6 @@ public class WorkflowState implements Serializable { @Column(name = "PARENT_STEP_ID" , allowsNull = "true") private WorkflowState parent; - private transient long parentId; - @Persistent @Column(name = "TOKEN", jdbcType = "VARCHAR", length = 36, allowsNull = "false") @NotNull @@ -95,14 +93,6 @@ public void setParent(WorkflowState parent) { this.parent = parent; } - public long getParentId() { - return parentId; - } - - public void setParentId(long parentId) { - this.parentId = parentId; - } - public UUID getToken() { return token; } From 98d3e4384339eec004c932b46b02e025a788bfc2 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 13:46:56 +0100 Subject: [PATCH 09/24] Use `chown` instead of `chmod` when copying JAR in `Dockerfile` Signed-off-by: nscuro --- src/main/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index a55cbe209..863ef15db 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -69,7 +69,7 @@ RUN mkdir -p ${APP_DIR} ${DATA_DIR} \ COPY --from=jre-build /opt/java/openjdk $JAVA_HOME # Copy the compiled WAR to the application directory created above -COPY --chmod=755 ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} +COPY --chown=${UID}:${GID} ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} # Specify the user to run as (in numeric format for compatibility with Kubernetes/OpenShift's SCC) USER ${UID} From 6102a1bb67807df0ca8ee1a126b127ebd7f7d6b6 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 14:41:31 +0100 Subject: [PATCH 10/24] Handle delayed `BOM_PROCESSED` notifications Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 79 ++++++++++++++++++- .../jdbi/NotificationSubjectDao.java | 29 +++++++ .../persistence/jdbi/WorkflowDao.java | 17 ++++ ...ubjectBomConsumedOrProcessedRowMapper.java | 50 ++++++++++++ 4 files changed, 174 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index 120ef7dd4..2c98fae37 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -18,10 +18,12 @@ */ package org.dependencytrack.event.kafka.processor; +import alpine.Config; import alpine.common.logging.Logger; import alpine.event.framework.ChainableEvent; import alpine.event.framework.Event; import com.google.protobuf.Any; +import com.google.protobuf.util.Timestamps; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.dependencytrack.event.ComponentMetricsUpdateEvent; import org.dependencytrack.event.ProjectMetricsUpdateEvent; @@ -40,6 +42,7 @@ import org.dependencytrack.persistence.jdbi.NotificationSubjectDao; import org.dependencytrack.persistence.jdbi.VulnerabilityScanDao; import org.dependencytrack.persistence.jdbi.WorkflowDao; +import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Notification; import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; @@ -47,14 +50,19 @@ import org.jdbi.v3.core.Handle; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.UUID; +import java.util.stream.Collectors; import static java.lang.Math.toIntExact; +import static org.dependencytrack.common.ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION; import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSED; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; @@ -71,6 +79,15 @@ public class ProcessedVulnerabilityScanResultProcessor implements BatchProcessor private static final Logger LOGGER = Logger.getLogger(ProcessedVulnerabilityScanResultProcessor.class); private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher(); + private final boolean shouldDispatchBomProcessedNotification; + + public ProcessedVulnerabilityScanResultProcessor() { + this(Config.getInstance().getPropertyAsBoolean(TMP_DELAY_BOM_PROCESSED_NOTIFICATION)); + } + + ProcessedVulnerabilityScanResultProcessor(final boolean shouldDispatchBomProcessedNotification) { + this.shouldDispatchBomProcessedNotification = shouldDispatchBomProcessedNotification; + } @Override public void process(final List> records) throws ProcessingException { @@ -82,6 +99,10 @@ public void process(final List> records) thro jdbi(qm).useTransaction(jdbiHandle -> { completedVulnScans.addAll(processScanResults(jdbiHandle, records)); notifications.addAll(createVulnAnalysisCompleteNotifications(jdbiHandle, completedVulnScans)); + + if (shouldDispatchBomProcessedNotification) { + notifications.addAll(createBomProcessedNotifications(jdbiHandle, completedVulnScans)); + } }); } @@ -174,7 +195,8 @@ private static List updateWorkflowStates(final Handle jdbiHandle, statuses.add(switch (completedVulnScan.getStatus()) { case COMPLETED -> WorkflowStatus.COMPLETED; case FAILED -> WorkflowStatus.FAILED; - default -> throw new IllegalStateException(""); + default -> throw new IllegalStateException(""" + Unexpected vulnerability scan status %s""".formatted(completedVulnScan.getStatus())); }); failureReasons.add(completedVulnScan.getFailureReason()); } @@ -212,6 +234,7 @@ private static List updateWorkflowStates(final Handle jdbiHandle, .setScope(SCOPE_PORTFOLIO) .setGroup(GROUP_PROJECT_VULN_ANALYSIS_COMPLETE) .setLevel(LEVEL_INFORMATIONAL) + .setTimestamp(Timestamps.now()) .setTitle(NotificationConstants.Title.PROJECT_VULN_ANALYSIS_COMPLETE) .setSubject(Any.pack(optionalSubject.get())) .build(); @@ -222,6 +245,60 @@ private static List updateWorkflowStates(final Handle jdbiHandle, return notifications; } + private static List> createBomProcessedNotifications(final Handle jdbiHandle, final List completedVulnScans) { + final int numScans = completedVulnScans.size(); + final var tokens = new ArrayList(numScans); + final var statuses = new ArrayList(numScans); + final var failureReasons = new ArrayList(numScans); + + for (final VulnerabilityScan completedVulnScan : completedVulnScans) { + if (completedVulnScan.getTargetType() != VulnerabilityScan.TargetType.PROJECT) { + // BOM_PROCESSED notifications only make sense when the scan target is a project. + continue; + } + + tokens.add(completedVulnScan.getToken()); + statuses.add(WorkflowStatus.COMPLETED); + failureReasons.add(null); + } + if (tokens.isEmpty()) { + LOGGER.debug("None of the possible %d completed vulnerability scans target a project".formatted(numScans)); + return Collections.emptyList(); + } + + final var workflowDao = jdbiHandle.attach(WorkflowDao.class); + final List updatedWorkflowStates = + workflowDao.updateAllStatesIfPending(WorkflowStep.BOM_PROCESSING, tokens, statuses, failureReasons); + if (updatedWorkflowStates.isEmpty()) { + LOGGER.debug("None of the possible %d workflow states for %s were transitioned to %s status" + .formatted(tokens.size(), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)); + return Collections.emptyList(); + } + + final var notificationSubjectDao = jdbiHandle.attach(NotificationSubjectDao.class); + + final Set updatedWorkflowStateTokens = updatedWorkflowStates.stream() + .map(WorkflowState::getToken).map(UUID::toString).collect(Collectors.toSet()); + final List notificationSubjects = + notificationSubjectDao.getForDelayedBomProcessed(updatedWorkflowStateTokens); + + final var notifications = new ArrayList>(updatedWorkflowStates.size()); + notificationSubjects.stream() + .map(subject -> Notification.newBuilder() + .setScope(SCOPE_PORTFOLIO) + .setGroup(GROUP_BOM_PROCESSED) + .setLevel(LEVEL_INFORMATIONAL) + .setTimestamp(Timestamps.now()) + .setTitle(NotificationConstants.Title.BOM_PROCESSED) + .setContent("A %s BOM was processed".formatted(subject.getBom().getFormat())) + .setSubject(Any.pack(subject)) + .build()) + .map(KafkaEventConverter::convert) + .forEach(notifications::add); + + return notifications; + } + private static class Aggregate { private int resultsTotal; private int scannerResultsTotal; diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java index 5c825acfa..824147aea 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java @@ -23,10 +23,12 @@ import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.persistence.jdbi.mapping.NotificationComponentRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationProjectRowMapper; +import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectBomConsumedOrProcessedRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerabilityRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerableDependencyRowReducer; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectProjectAuditChangeRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationVulnerabilityRowMapper; +import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Component; import org.dependencytrack.proto.notification.v1.ComponentVulnAnalysisCompleteSubject; import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; @@ -376,6 +378,33 @@ LEFT JOIN LATERAL ( @RegisterRowMapper(NotificationSubjectProjectAuditChangeRowMapper.class) Optional getForProjectAuditChange(final UUID componentUuid, final UUID vulnUuid, AnalysisState analysisState, boolean isSuppressed); + @SqlQuery(""" + SELECT "P"."UUID" AS "projectUuid" + , "P"."NAME" AS "projectName" + , "P"."VERSION" AS "projectVersion" + , "P"."DESCRIPTION" AS "projectDescription" + , "P"."PURL" AS "projectPurl" + , (SELECT ARRAY_AGG(DISTINCT "T"."NAME") + FROM "TAG" AS "T" + INNER JOIN "PROJECTS_TAGS" AS "PT" + ON "PT"."TAG_ID" = "T"."ID" + WHERE "PT"."PROJECT_ID" = "P"."ID" + ) AS "projectTags" + , 'CycloneDX' AS "bomFormat" + , '(Unknown)' AS "bomSpecVersion" + , '(Omitted)' AS "bomContent" + FROM "VULNERABILITYSCAN" AS "VS" + INNER JOIN "PROJECT" AS "P" + ON "P"."UUID" = "VS"."TARGET_IDENTIFIER" + INNER JOIN "WORKFLOW_STATE" AS "WFS" + ON "WFS"."TOKEN" = "VS"."TOKEN" + AND "WFS"."STEP" = 'BOM_PROCESSING' + AND "WFS"."STATUS" = 'COMPLETED' + WHERE "VS"."TOKEN" = ANY(:tokens) + """) + @RegisterRowMapper(NotificationSubjectBomConsumedOrProcessedRowMapper.class) + List getForDelayedBomProcessed(Collection workflowTokens); + @SqlQuery(""" SELECT "P"."UUID" AS "projectUuid" , "P"."NAME" AS "projectName" diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java index 20575a99c..b0ebe76a6 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java @@ -46,6 +46,23 @@ List updateAllStates(@Bind WorkflowStep step, @Bind("status") List statuses, @Bind("failureReason") List failureReasons); + @SqlBatch(""" + UPDATE "WORKFLOW_STATE" + SET "STATUS" = :status + , "FAILURE_REASON" = :failureReason + , "UPDATED_AT" = NOW() + WHERE "TOKEN" = :token + AND "STEP" = :step + AND "STATUS" = 'PENDING' + RETURNING * + """) + @GetGeneratedKeys("*") + @RegisterBeanMapper(WorkflowState.class) + List updateAllStatesIfPending(@Bind WorkflowStep step, + @Bind("token") List tokens, + @Bind("status") List statuses, + @Bind("failureReason") List failureReasons); + @SqlBatch(""" WITH RECURSIVE "CTE_PARENT" ("ID") AS ( diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java new file mode 100644 index 000000000..d679eff0a --- /dev/null +++ b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java @@ -0,0 +1,50 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.persistence.jdbi.mapping; + +import org.dependencytrack.proto.notification.v1.Bom; +import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; +import org.dependencytrack.proto.notification.v1.Project; +import org.jdbi.v3.core.mapper.NoSuchMapperException; +import org.jdbi.v3.core.mapper.RowMapper; +import org.jdbi.v3.core.statement.StatementContext; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.dependencytrack.persistence.jdbi.mapping.RowMapperUtil.maybeSet; + +public class NotificationSubjectBomConsumedOrProcessedRowMapper implements RowMapper { + + @Override + public BomConsumedOrProcessedSubject map(final ResultSet rs, final StatementContext ctx) throws SQLException { + final RowMapper projectRowMapper = ctx.findRowMapperFor(Project.class) + .orElseThrow(() -> new NoSuchMapperException("No mapper registered for %s".formatted(Project.class))); + final RowMapper bomRowMapper = ctx.findRowMapperFor(Bom.class) + .orElseThrow(() -> new NoSuchMapperException("No mapper registered for %s".formatted(Bom.class))); + + final BomConsumedOrProcessedSubject.Builder builder = BomConsumedOrProcessedSubject.newBuilder() + .setProject(projectRowMapper.map(rs, ctx)) + .setBom(bomRowMapper.map(rs, ctx)); + maybeSet(rs, "token", ResultSet::getString, builder::setToken); + + return builder.build(); + } + +} From 72a37b4982119304b57090a72f75d2e6abd24213 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 16:04:42 +0100 Subject: [PATCH 11/24] Fix `BOM_PROCESSED` notification construction Signed-off-by: nscuro --- .../jdbi/NotificationSubjectDao.java | 5 +- .../mapping/NotificationBomRowMapper.java | 41 +++++++ ...dVulnerabilityScanResultProcessorTest.java | 103 ++++++++++++++++++ 3 files changed, 148 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java index 824147aea..ba27bf26a 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java @@ -21,6 +21,7 @@ import org.dependencytrack.model.AnalysisState; import org.dependencytrack.model.VulnerabilityAnalysisLevel; import org.dependencytrack.model.VulnerabilityScan; +import org.dependencytrack.persistence.jdbi.mapping.NotificationBomRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationComponentRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationProjectRowMapper; import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectBomConsumedOrProcessedRowMapper; @@ -56,6 +57,7 @@ import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_FAILED; @RegisterRowMappers({ + @RegisterRowMapper(NotificationBomRowMapper.class), @RegisterRowMapper(NotificationComponentRowMapper.class), @RegisterRowMapper(NotificationProjectRowMapper.class), @RegisterRowMapper(NotificationVulnerabilityRowMapper.class) @@ -393,6 +395,7 @@ LEFT JOIN LATERAL ( , 'CycloneDX' AS "bomFormat" , '(Unknown)' AS "bomSpecVersion" , '(Omitted)' AS "bomContent" + , "WFS"."TOKEN" AS "token" FROM "VULNERABILITYSCAN" AS "VS" INNER JOIN "PROJECT" AS "P" ON "P"."UUID" = "VS"."TARGET_IDENTIFIER" @@ -400,7 +403,7 @@ LEFT JOIN LATERAL ( ON "WFS"."TOKEN" = "VS"."TOKEN" AND "WFS"."STEP" = 'BOM_PROCESSING' AND "WFS"."STATUS" = 'COMPLETED' - WHERE "VS"."TOKEN" = ANY(:tokens) + WHERE "VS"."TOKEN" = ANY(:workflowTokens) """) @RegisterRowMapper(NotificationSubjectBomConsumedOrProcessedRowMapper.class) List getForDelayedBomProcessed(Collection workflowTokens); diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java new file mode 100644 index 000000000..9762c8045 --- /dev/null +++ b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java @@ -0,0 +1,41 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.persistence.jdbi.mapping; + +import org.dependencytrack.proto.notification.v1.Bom; +import org.jdbi.v3.core.mapper.RowMapper; +import org.jdbi.v3.core.statement.StatementContext; + +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.dependencytrack.persistence.jdbi.mapping.RowMapperUtil.maybeSet; + +public class NotificationBomRowMapper implements RowMapper { + + @Override + public Bom map(final ResultSet rs, final StatementContext ctx) throws SQLException { + final Bom.Builder builder = Bom.newBuilder(); + maybeSet(rs, "bomFormat", ResultSet::getString, builder::setFormat); + maybeSet(rs, "bomSpecVersion", ResultSet::getString, builder::setSpecVersion); + maybeSet(rs, "bomContent", ResultSet::getString, builder::setContent); + return builder.build(); + } + +} diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java index 390423827..5ff6f6a87 100644 --- a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -30,6 +30,7 @@ import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.model.WorkflowStatus; import org.dependencytrack.model.WorkflowStep; +import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Notification; import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; @@ -45,6 +46,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import static org.assertj.core.api.Assertions.assertThat; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSED; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; @@ -248,6 +250,107 @@ public void testProcessWithResultWithoutScannerResults() throws Exception { ); } + @Test + public void testProcessWithDelayedBomProcessedNotification() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final UUID workflowToken = UUID.randomUUID(); + qm.createWorkflowSteps(workflowToken); + + // Create a VulnerabilityScan, and configure it such that no more than 10% + // of scanners are allowed to fail in order for the scan to be considered successful. + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(workflowToken.toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); + vulnScan.setTargetIdentifier(project.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(1); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + // Create a ScanResult without any ScannerResults attached to it. + // This might happen when no scanner is capable of scanning a component, + // or when all scanners are disabled. + final var scanResult = ScanResult.newBuilder().build(); + + final var processor = new ProcessedVulnerabilityScanResultProcessor(/* shouldDispatchBomProcessedNotification */ true); + processor.process(List.of(aConsumerRecord(vulnScan.getToken(), scanResult).build())); + + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()), + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()); + + final String recordKey = deserializeKey(KafkaTopics.NOTIFICATION_BOM, record); + assertThat(recordKey).isEqualTo(project.getUuid().toString()); + + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_BOM, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getGroup()).isEqualTo(GROUP_BOM_PROCESSED); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getSubject().is(BomConsumedOrProcessedSubject.class)).isTrue(); + + final var subject = notification.getSubject().unpack(BomConsumedOrProcessedSubject.class); + assertThat(subject.getToken()).isEqualTo(workflowToken.toString()); + assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); + } + ); + } + + @Test + public void testProcessWithDelayedBomProcessedNotificationWhenVulnerabilityScanFailed() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final UUID workflowToken = UUID.randomUUID(); + qm.createWorkflowSteps(workflowToken); + + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(workflowToken.toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); + vulnScan.setTargetIdentifier(project.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(1); + vulnScan.setFailureThreshold(0.1); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + // Create a ScanResult, with only one failed ScannerResult (i.e. 100% failure rate). + final var scanResult = ScanResult.newBuilder() + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_FAILED)) + .build(); + + final var processor = new ProcessedVulnerabilityScanResultProcessor(/* shouldDispatchBomProcessedNotification */ true); + processor.process(List.of(aConsumerRecord(vulnScan.getToken(), scanResult).build())); + + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()), + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_BOM.name()); + + final String recordKey = deserializeKey(KafkaTopics.NOTIFICATION_BOM, record); + assertThat(recordKey).isEqualTo(project.getUuid().toString()); + + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_BOM, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getGroup()).isEqualTo(GROUP_BOM_PROCESSED); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getSubject().is(BomConsumedOrProcessedSubject.class)).isTrue(); + + final var subject = notification.getSubject().unpack(BomConsumedOrProcessedSubject.class); + assertThat(subject.getToken()).isEqualTo(workflowToken.toString()); + assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); + } + ); + } + private static final ConcurrentLinkedQueue EVENTS = new ConcurrentLinkedQueue<>(); public static class EventSubscriber implements Subscriber { From 895ca66e379790657be80f56c4b9358ca5aefc33 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 16:35:22 +0100 Subject: [PATCH 12/24] Migrate `VulnerabilityScanResultProcessorTest` Signed-off-by: nscuro --- .../VulnerabilityScanResultProcessorTest.java | 1382 +++++++++++++++++ 1 file changed, 1382 insertions(+) create mode 100644 src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java new file mode 100644 index 000000000..c12c8450b --- /dev/null +++ b/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java @@ -0,0 +1,1382 @@ +/* + * This file is part of Dependency-Track. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * Copyright (c) OWASP Foundation. All Rights Reserved. + */ +package org.dependencytrack.event.kafka.processor; + +import com.google.protobuf.Timestamp; +import junitparams.JUnitParamsRunner; +import junitparams.Parameters; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.cyclonedx.proto.v1_4.Advisory; +import org.cyclonedx.proto.v1_4.Bom; +import org.cyclonedx.proto.v1_4.Property; +import org.cyclonedx.proto.v1_4.Source; +import org.cyclonedx.proto.v1_4.VulnerabilityRating; +import org.cyclonedx.proto.v1_4.VulnerabilityReference; +import org.dependencytrack.TestCacheManager; +import org.dependencytrack.event.kafka.KafkaEventHeaders; +import org.dependencytrack.event.kafka.KafkaTopics; +import org.dependencytrack.model.Analysis; +import org.dependencytrack.model.AnalysisComment; +import org.dependencytrack.model.AnalysisJustification; +import org.dependencytrack.model.AnalysisResponse; +import org.dependencytrack.model.AnalysisState; +import org.dependencytrack.model.AnalyzerIdentity; +import org.dependencytrack.model.Component; +import org.dependencytrack.model.ConfigPropertyConstants; +import org.dependencytrack.model.Finding; +import org.dependencytrack.model.FindingAttribution; +import org.dependencytrack.model.Project; +import org.dependencytrack.model.Severity; +import org.dependencytrack.model.Vulnerability; +import org.dependencytrack.model.VulnerabilityAlias; +import org.dependencytrack.model.VulnerabilityAnalysisLevel; +import org.dependencytrack.notification.NotificationConstants; +import org.dependencytrack.persistence.CweImporter; +import org.dependencytrack.persistence.jdbi.VulnerabilityPolicyDao; +import org.dependencytrack.policy.cel.CelPolicyScriptHost; +import org.dependencytrack.policy.cel.CelPolicyType; +import org.dependencytrack.policy.cel.CelVulnerabilityPolicyEvaluator; +import org.dependencytrack.policy.vulnerability.DatabaseVulnerabilityPolicyProvider; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicy; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyAnalysis; +import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating; +import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; +import org.dependencytrack.proto.notification.v1.NewVulnerableDependencySubject; +import org.dependencytrack.proto.notification.v1.Notification; +import org.dependencytrack.proto.notification.v1.VulnerabilityAnalysisDecisionChangeSubject; +import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; +import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; +import org.dependencytrack.proto.vulnanalysis.v1.Scanner; +import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +import java.math.BigDecimal; +import java.sql.Date; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_CVSSV2; +import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_CVSSV3; +import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_OWASP; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_ANALYZER; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABILITY; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABLE_DEPENDENCY; +import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_AUDIT_CHANGE; +import static org.dependencytrack.proto.notification.v1.Level.LEVEL_ERROR; +import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; +import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; +import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_SYSTEM; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_PENDING; +import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_SUCCESSFUL; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_OSSINDEX; +import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_SNYK; +import static org.dependencytrack.util.KafkaTestUtil.deserializeKey; +import static org.dependencytrack.util.KafkaTestUtil.deserializeValue; + +@RunWith(JUnitParamsRunner.class) +public class VulnerabilityScanResultProcessorTest extends AbstractProcessorTest { + + private VulnerabilityScanResultProcessor processor; + + @Before + public void before() throws Exception { + super.before(); + + final var cacheManager = new TestCacheManager(5, TimeUnit.MINUTES, 100); + final var scriptHost = new CelPolicyScriptHost(cacheManager, CelPolicyType.VULNERABILITY); + final var policyProvider = new DatabaseVulnerabilityPolicyProvider(); + final var policyEvaluator = new CelVulnerabilityPolicyEvaluator(policyProvider, scriptHost, cacheManager); + + processor = new VulnerabilityScanResultProcessor(policyEvaluator); + + new CweImporter().processCweDefinitions(); // Required for CWE mapping + } + + @Test + public void dropFailedScanResultTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_FAILED) + .setFailureReason("just because")) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_ANALYZER.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_ANALYZER, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_SYSTEM); + assertThat(notification.getLevel()).isEqualTo(LEVEL_ERROR); + assertThat(notification.getGroup()).isEqualTo(GROUP_ANALYZER); + assertThat(notification.getTitle()).isEqualTo(NotificationConstants.Title.ANALYZER_ERROR); + assertThat(notification.getContent()).isEqualTo( + "Scan of component %s with %s failed (scanKey: %s): just because", + component.getUuid(), SCANNER_INTERNAL, scanToken + "/" + component.getUuid()); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + } + ); + } + + @Test + public void dropPendingScanResultTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_PENDING)) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }); + } + + @Test + public void processSuccessfulScanResultWhenComponentDoesNotExistTest() { + final var componentUuid = UUID.randomUUID(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var vuln = new Vulnerability(); + vuln.setVulnId("INT-001"); + vuln.setSource(Vulnerability.Source.INTERNAL); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addVulnerabilities(createVuln("INT-001", "INTERNAL")))) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }); + } + + @Test + public void processSuccessfulScanResult() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var vulnA = new Vulnerability(); + vulnA.setVulnId("INT-001"); + vulnA.setSource(Vulnerability.Source.INTERNAL); + qm.persist(vulnA); + final var vulnB = new Vulnerability(); + vulnB.setVulnId("SONATYPE-002"); + vulnB.setSource(Vulnerability.Source.OSSINDEX); + final var vulnC = new Vulnerability(); + vulnC.setVulnId("INT-002"); + vulnC.setSource(Vulnerability.Source.INTERNAL); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vulnA.getVulnId(), "INTERNAL"), + createVuln(vulnB.getVulnId(), "OSSINDEX"), + createVuln(vulnC.getVulnId(), "INTERNAL") + )))) + .build(); + final Headers headers = new RecordHeaders(); + headers.add(KafkaEventHeaders.VULN_ANALYSIS_LEVEL, VulnerabilityAnalysisLevel.BOM_UPLOAD_ANALYSIS.name().getBytes()); + headers.add(KafkaEventHeaders.IS_NEW_COMPONENT, "true".getBytes()); + + processor.process(aConsumerRecord(scanKey, scanResult).withHeaders(headers).build());; + + qm.getPersistenceManager().refresh(component); + assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder( + vuln -> { + assertThat(vuln.getVulnId()).isEqualTo("INT-001"); + assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.INTERNAL.name()); + }, + vuln -> { + assertThat(vuln.getVulnId()).isEqualTo("SONATYPE-002"); + assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.OSSINDEX.name()); + } + // INT-002 is discarded because it is internal but doesn't exist in the database. + ); + + final List findings = qm.getFindings(project, false); + assertThat(findings).satisfiesExactlyInAnyOrder( + finding -> { + assertThat(finding.getVulnerability().get("vulnId")).isEqualTo("INT-001"); + assertThat(finding.getAttribution().get("analyzerIdentity")).isEqualTo(AnalyzerIdentity.INTERNAL_ANALYZER.name()); + }, + finding -> { + assertThat(finding.getVulnerability().get("vulnId")).isEqualTo("SONATYPE-002"); + assertThat(finding.getAttribution().get("analyzerIdentity")).isEqualTo(AnalyzerIdentity.INTERNAL_ANALYZER.name()); + } + // INT-002 is discarded because it is internal but doesn't exist in the database. + ); + + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABLE_DEPENDENCY); + assertThat(notification.getSubject().is(NewVulnerableDependencySubject.class)).isTrue(); + final var subject = notification.getSubject().unpack(NewVulnerableDependencySubject.class); + assertThat(subject.getComponent().getName()).isEqualTo("acme-lib"); + assertThat(subject.getComponent().getVersion()).isEqualTo("1.1.0"); + assertThat(subject.getProject().getName()).isEqualTo("acme-app"); + assertThat(subject.getProject().getVersion()).isEqualTo("1.0.0"); + assertThat(subject.getVulnerabilitiesCount()).isEqualTo(2); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); + assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); + final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); + assertThat(subject.getVulnerabilityAnalysisLevel()).isEqualTo("BOM_UPLOAD_ANALYSIS"); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); + assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); + final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); + assertThat(subject.getVulnerabilityAnalysisLevel()).isEqualTo("BOM_UPLOAD_ANALYSIS"); + } + // INT-002 is discarded because it is internal but doesn't exist in the database. + ); + } + + @Test + public void processSuccessfulScanResultWithExistingFindingTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var vulnerability = new Vulnerability(); + vulnerability.setVulnId("CVE-001"); + vulnerability.setSource(Vulnerability.Source.NVD); + qm.persist(vulnerability); + qm.addVulnerability(vulnerability, component, AnalyzerIdentity.OSSINDEX_ANALYZER); + + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addVulnerabilities(createVuln("CVE-001", "NVD")))) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().refreshAll(component, vulnerability); + assertThat(component.getVulnerabilities()).satisfiesExactly( + vuln -> { + assertThat(vuln.getVulnId()).isEqualTo("CVE-001"); + assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.NVD.name()); + } + ); + + // Attribution should still refer to the first scanner that identified the vulnerability. + final FindingAttribution attribution = qm.getFindingAttribution(vulnerability, component); + assertThat(attribution).isNotNull(); + assertThat(attribution.getAnalyzerIdentity()).isEqualTo(AnalyzerIdentity.OSSINDEX_ANALYZER); + + // Because the vulnerability was reported already, no notification must be sent. + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }); + } + + private Object[] canUpdateExistingVulnerabilityTestParams() { + return new Object[]{ + // Results from the internal scanner must never override any existing data. + new Object[]{"INT-001", "INTERNAL", SCANNER_INTERNAL, null, null, false}, + new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "true", false}, + new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "false", false}, + new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, null, false}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "true", false}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "false", false}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, null, false}, + new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "Go;Maven", false}, + new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "", false}, + new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, null, false}, + new Object[]{"sonatype-001", "OSSINDEX", SCANNER_INTERNAL, null, null, false}, + new Object[]{"snyk-001", "SNYK", SCANNER_INTERNAL, null, null, false}, + new Object[]{"001", "VULNDB", SCANNER_INTERNAL, null, null, false}, + // Scanners must be allowed to override existing data if they themselves are the authoritative source, + // or mirroring of the actual authoritative source is not enabled. + new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "true", false}, + new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "false", true}, + new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, null, true}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "true", false}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "false", true}, + new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, null, true}, + new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "Go;Maven", false}, + new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "", true}, + new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, null, true}, + new Object[]{"sonatype-001", "OSSINDEX", SCANNER_OSSINDEX, null, null, true}, + new Object[]{"SNYK-001", "SNYK", SCANNER_OSSINDEX, null, null, false}, + new Object[]{"sonatype-001", "OSSINDEX", SCANNER_SNYK, null, null, false}, + new Object[]{"SNYK-001", "SNYK", SCANNER_SNYK, null, null, true}, + // Updating of internal vulnerabilities must always be forbidden. + new Object[]{"INT-001", "INTERNAL", SCANNER_OSSINDEX, null, null, false}, + new Object[]{"INT-001", "INTERNAL", SCANNER_SNYK, null, null, false}, + }; + } + + @Test + @Parameters(method = "canUpdateExistingVulnerabilityTestParams") + public void canUpdateExistingVulnerabilityTest(final String vulnId, final String vulnSource, final Scanner scanner, + final ConfigPropertyConstants mirrorSourceConfigProperty, + final String mirrorSourceConfigPropertyValue, final boolean expectModified) { + if (mirrorSourceConfigProperty != null && mirrorSourceConfigPropertyValue != null) { + qm.createConfigProperty( + mirrorSourceConfigProperty.getGroupName(), + mirrorSourceConfigProperty.getPropertyName(), + mirrorSourceConfigPropertyValue, + mirrorSourceConfigProperty.getPropertyType(), + null + ); + } + + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var vulnerability = new Vulnerability(); + vulnerability.setVulnId(vulnId); + vulnerability.setSource(vulnSource); + vulnerability.setDescription("original description"); + qm.persist(vulnerability); + + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(scanner) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addVulnerabilities( + createVuln(vulnId, vulnSource).toBuilder().setDescription("modified description") + .build()))) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().refreshAll(component, vulnerability); + assertThat(component.getVulnerabilities()).satisfiesExactly( + vuln -> { + assertThat(vuln.getVulnId()).isEqualTo(vulnId); + assertThat(vuln.getSource()).isEqualTo(vulnSource); + if (expectModified) { + assertThat(vuln.getDescription()).isEqualTo("modified description"); + } else { + assertThat(vuln.getDescription()).isEqualTo("original description"); + } + } + ); + } + + @Test + public void updateExistingVulnerabilityTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var vulnerability = new Vulnerability(); + vulnerability.setVulnId("CVE-001"); + vulnerability.setSource(Vulnerability.Source.NVD); + vulnerability.setTitle("original title"); + vulnerability.setSubTitle("original subTitle"); + vulnerability.setDescription("original description"); + vulnerability.setDetail("original detail"); + vulnerability.setRecommendation("original recommendation"); + vulnerability.setReferences("original references"); + vulnerability.setCreated(Date.from(Instant.ofEpochSecond(1672527600))); // Sat Dec 31 2022 23:00:00 GMT+0000 + vulnerability.setPublished(Date.from(Instant.ofEpochSecond(1672614000))); // Sun Jan 01 2023 23:00:00 GMT+0000 + vulnerability.setUpdated(Date.from(Instant.ofEpochSecond(1672700400))); // Mon Jan 02 2023 23:00:00 GMT+0000 + vulnerability.setCwes(List.of(666, 777)); + vulnerability.setSeverity(Severity.LOW); + vulnerability.setCvssV2BaseScore(BigDecimal.valueOf(2.2)); + vulnerability.setCvssV2ExploitabilitySubScore(BigDecimal.valueOf(2.2)); + vulnerability.setCvssV2ImpactSubScore(BigDecimal.valueOf(2.3)); + vulnerability.setCvssV2Vector("original cvssV2Vector"); + vulnerability.setCvssV3BaseScore(BigDecimal.valueOf(3.1)); + vulnerability.setCvssV3ExploitabilitySubScore(BigDecimal.valueOf(3.2)); + vulnerability.setCvssV3ImpactSubScore(BigDecimal.valueOf(3.3)); + vulnerability.setCvssV3Vector("original cvssv3Vector"); + vulnerability.setOwaspRRLikelihoodScore(BigDecimal.valueOf(4.1)); + vulnerability.setOwaspRRTechnicalImpactScore(BigDecimal.valueOf(4.2)); + vulnerability.setOwaspRRBusinessImpactScore(BigDecimal.valueOf(4.3)); + vulnerability.setOwaspRRVector("original owaspRrVector"); + vulnerability.setVulnerableVersions("original vulnerableVersions"); + vulnerability.setPatchedVersions("original patchedVersions"); + vulnerability.setEpssScore(BigDecimal.valueOf(5.1)); + vulnerability.setEpssPercentile(BigDecimal.valueOf(5.2)); + qm.persist(vulnerability); + + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_OSSINDEX) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addVulnerabilities( + createVuln("CVE-001", "NVD").toBuilder() + .setDescription("modified description") + .setDetail("modified detail") + .setRecommendation("modified recommendation") + .setCreated(Timestamp.newBuilder() + .setSeconds(1673305200)) // Mon Jan 09 2023 23:00:00 GMT+0000 + .setPublished(Timestamp.newBuilder() + .setSeconds(1673391600)) // Tue Jan 10 2023 23:00:00 GMT+0000 + .setUpdated(Timestamp.newBuilder() + .setSeconds(1673478000)) // Wed Jan 11 2023 23:00:00 GMT+0000 + .addCwes(999) + .addAdvisories(Advisory.newBuilder().setUrl("modified reference").build()) + .addRatings(VulnerabilityRating.newBuilder() + .setSource(Source.newBuilder().setName("NVD").build()) + .setMethod(SCORE_METHOD_CVSSV2) + .setScore(9.3) + .setVector("(AV:N/AC:M/Au:N/C:C/I:C/A:C)")) + .addRatings(VulnerabilityRating.newBuilder() + .setSource(Source.newBuilder().setName("NVD").build()) + .setMethod(SCORE_METHOD_CVSSV3) + .setScore(10.0) + .setVector("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H")) + .addRatings(VulnerabilityRating.newBuilder() + .setSource(Source.newBuilder().setName("OSSINDEX").build()) + .setMethod(SCORE_METHOD_OWASP) + .setScore(6.6) + .setVector("SL:1/M:4/O:4/S:9/ED:7/EE:3/A:4/ID:3/LC:9/LI:1/LAV:5/LAC:1/FD:3/RD:4/NC:7/PV:9")) + .addProperties(Property.newBuilder() + .setName("dependency-track:vuln:title") + .setValue("modified title").build()) + .build()))) + .build(); + + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().refreshAll(component, vulnerability); + assertThat(component.getVulnerabilities()).hasSize(1); + + assertThat(vulnerability.getVulnId()).isEqualTo("CVE-001"); + assertThat(vulnerability.getSource()).isEqualTo("NVD"); + assertThat(vulnerability.getDescription()).isEqualTo("modified description"); + assertThat(vulnerability.getDetail()).isEqualTo("modified detail"); + assertThat(vulnerability.getTitle()).isEqualTo("modified title"); + assertThat(vulnerability.getSubTitle()).isNull(); + assertThat(vulnerability.getRecommendation()).isEqualTo("modified recommendation"); + assertThat(vulnerability.getReferences()).isEqualTo("* [modified reference](modified reference)\n"); + assertThat(vulnerability.getCreated()).isEqualTo(Instant.ofEpochSecond(1673305200)); + assertThat(vulnerability.getPublished()).isEqualTo(Instant.ofEpochSecond(1673391600)); + assertThat(vulnerability.getUpdated()).isEqualTo(Instant.ofEpochSecond(1673478000)); + assertThat(vulnerability.getCwes()).containsOnly(999); + assertThat(vulnerability.getSeverity()).isEqualTo(Severity.CRITICAL); + assertThat(vulnerability.getCvssV2BaseScore()).isEqualTo("9.3"); + assertThat(vulnerability.getCvssV2ExploitabilitySubScore()).isEqualTo("8.6"); + assertThat(vulnerability.getCvssV2ImpactSubScore()).isEqualTo("10.0"); + assertThat(vulnerability.getCvssV2Vector()).isEqualTo("(AV:N/AC:M/Au:N/C:C/I:C/A:C)"); + assertThat(vulnerability.getCvssV3BaseScore()).isEqualTo("10.0"); + assertThat(vulnerability.getCvssV3ExploitabilitySubScore()).isEqualTo("3.9"); + assertThat(vulnerability.getCvssV3ImpactSubScore()).isEqualTo("6.0"); + assertThat(vulnerability.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H"); + assertThat(vulnerability.getOwaspRRLikelihoodScore()).isEqualTo("4.375"); + assertThat(vulnerability.getOwaspRRTechnicalImpactScore()).isEqualTo("4.0"); + assertThat(vulnerability.getOwaspRRBusinessImpactScore()).isEqualTo("5.75"); + assertThat(vulnerability.getOwaspRRVector()).isEqualTo("SL:1/M:4/O:4/S:9/ED:7/EE:3/A:4/ID:3/LC:9/LI:1/LAV:5/LAC:1/FD:3/RD:4/NC:7/PV:9"); + assertThat(vulnerability.getVulnerableVersions()).isNull(); + assertThat(vulnerability.getPatchedVersions()).isNull(); + assertThat(vulnerability.getEpssScore()).isEqualByComparingTo("5.1"); + assertThat(vulnerability.getEpssPercentile()).isEqualByComparingTo("5.2"); + } + + @Test + public void analysisThroughPolicyNewAnalysisTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + // Create a vulnerability that was not previously reported for the component. + final var newVuln = new Vulnerability(); + newVuln.setVulnId("CVE-100"); + newVuln.setSource(Vulnerability.Source.NVD); + newVuln.setSeverity(Severity.CRITICAL); + qm.persist(newVuln); + + // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); + policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); + policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); + policyAnalysis.setDetails("Because I say so."); + final var policyRating = new VulnerabilityPolicyRating(); + policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); + policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); + policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + policyRating.setScore(1.6); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setAuthor("Jane Doe"); + policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); + policy.setAnalysis(policyAnalysis); + policy.setRatings(List.of(policyRating)); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(newVuln.getVulnId(), newVuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactly( + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); + assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); + assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); + assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); + assertThat(analysis.isSuppressed()).isFalse(); + assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); + assertThat(analysis.getCvssV2Vector()).isNull(); + assertThat(analysis.getCvssV2Score()).isNull(); + assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); + assertThat(analysis.getOwaspVector()).isNull(); + assertThat(analysis.getOwaspScore()).isNull(); + + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( + "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", + "Analysis: NOT_SET → NOT_AFFECTED", + "Justification: NOT_SET → CODE_NOT_REACHABLE", + "Vendor Response: NOT_SET → WILL_NOT_FIX", + "Details: Because I say so.", + "Severity: CRITICAL → LOW", + "CVSSv3 Vector: (None) → CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L", + "CVSSv3 Score: (None) → 1.6" + ); + }); + }); + + // TODO: There should be PROJECT_AUDIT_CHANGE notifications. + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); + assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); + final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); + assertThat(subject.getVulnerability().getVulnId()).isEqualTo("CVE-100"); + assertThat(subject.getVulnerability().getSource()).isEqualTo("NVD"); + assertThat(subject.getVulnerability().getSeverity()).isEqualTo("LOW"); + assertThat(subject.getVulnerability().getCvssV3()).isEqualTo(1.6); + } + ); + } + + @Test + public void analysisThroughPolicyNewAnalysisSuppressionTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + // Create a vulnerability that was not previously reported for the component. + final var newVuln = new Vulnerability(); + newVuln.setVulnId("CVE-100"); + newVuln.setSource(Vulnerability.Source.NVD); + newVuln.setSeverity(Severity.CRITICAL); + qm.persist(newVuln); + + // Create a policy that marks any finding as FALSE_POSITIVE, and suppresses it. + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + policyAnalysis.setSuppress(true); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setAuthor("Jane Doe"); + policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); + policy.setAnalysis(policyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(newVuln.getVulnId(), newVuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactly( + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); + assertThat(analysis.getAnalysisJustification()).isNull(); + assertThat(analysis.getAnalysisResponse()).isNull(); + assertThat(analysis.getAnalysisDetails()).isNull(); + assertThat(analysis.isSuppressed()).isTrue(); + assertThat(analysis.getSeverity()).isNull(); + assertThat(analysis.getCvssV2Vector()).isNull(); + assertThat(analysis.getCvssV2Score()).isNull(); + assertThat(analysis.getCvssV3Vector()).isNull(); + assertThat(analysis.getCvssV3Score()).isNull(); + assertThat(analysis.getOwaspVector()).isNull(); + assertThat(analysis.getOwaspScore()).isNull(); + + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( + "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", + "Analysis: NOT_SET → FALSE_POSITIVE", + "Suppressed" + ); + }); + }); + + // The vulnerability was suppressed, so no notifications to be expected. + // TODO: There should be PROJECT_AUDIT_CHANGE notifications. + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }); + } + + @Test + public void analysisThroughPolicyExistingDifferentAnalysisTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + // Create an existing vulnerability, for which the analysis is entirely different + // over what's defined in the policy. + final var vuln = new Vulnerability(); + vuln.setVulnId("CVE-100"); + vuln.setSource(Vulnerability.Source.NVD); + vuln.setSeverity(Severity.CRITICAL); + qm.persist(vuln); + qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); + final Analysis vulnAnalysis = qm.makeAnalysis(component, vuln, AnalysisState.FALSE_POSITIVE, + AnalysisJustification.NOT_SET, AnalysisResponse.CAN_NOT_FIX, "oldDetails", true); + vulnAnalysis.setSeverity(Severity.INFO); + vulnAnalysis.setCvssV2Vector("oldCvssV2Vector"); + vulnAnalysis.setCvssV2Score(BigDecimal.ZERO); + vulnAnalysis.setCvssV3Vector("oldCvssV3Vector"); + vulnAnalysis.setCvssV3Score(BigDecimal.ZERO); + vulnAnalysis.setOwaspVector("oldOwaspVector"); + vulnAnalysis.setOwaspScore(BigDecimal.ZERO); + qm.persist(vulnAnalysis); + + // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); + policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); + policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); + policyAnalysis.setDetails("Because I say so."); + final var policyRating = new VulnerabilityPolicyRating(); + policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); + policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); + policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + policyRating.setScore(1.6); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setAuthor("Jane Doe"); + policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); + policy.setAnalysis(policyAnalysis); + policy.setRatings(List.of(policyRating)); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vuln.getVulnId(), vuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactly( + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); + assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); + assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); + assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); + assertThat(analysis.isSuppressed()).isFalse(); + assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); + assertThat(analysis.getCvssV2Vector()).isNull(); + assertThat(analysis.getCvssV2Score()).isNull(); + assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); + assertThat(analysis.getOwaspVector()).isNull(); + assertThat(analysis.getOwaspScore()).isNull(); + + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); + assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( + "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", + "Analysis: FALSE_POSITIVE → NOT_AFFECTED", + "Justification: NOT_SET → CODE_NOT_REACHABLE", + "Vendor Response: CAN_NOT_FIX → WILL_NOT_FIX", + "Details: Because I say so.", + "Unsuppressed", + "Severity: INFO → LOW", + "CVSSv2 Vector: oldCvssV2Vector → (None)", + "CVSSv2 Score: 0.0 → (None)", + "CVSSv3 Vector: oldCvssV3Vector → CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L", + "CVSSv3 Score: 0.0 → 1.6", + "OWASP Vector: oldOwaspVector → (None)", + "OWASP Score: 0.0 → (None)" + ); + }); + }); + + // The vulnerability already existed, so no notifications to be expected. + // There should be PROJECT_AUDIT_CHANGE notification. + assertThat(kafkaMockProducer.history()).satisfiesExactly( + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }, + record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE.name()); + final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE, record); + assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); + assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); + assertThat(notification.getGroup()).isEqualTo(GROUP_PROJECT_AUDIT_CHANGE); + assertThat(notification.getSubject().is(VulnerabilityAnalysisDecisionChangeSubject.class)).isTrue(); + final var subject = notification.getSubject().unpack(VulnerabilityAnalysisDecisionChangeSubject.class); + assertThat(subject.getAnalysis().getState()).isEqualTo("NOT_AFFECTED"); + } + ); + } + + @Test + public void analysisThroughPolicyExistingEqualAnalysisTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + // Create an existing vulnerability, for which the analysis is completely + // identical to what's defined in the policy. + final var vuln = new Vulnerability(); + vuln.setVulnId("CVE-100"); + vuln.setSource(Vulnerability.Source.NVD); + vuln.setSeverity(Severity.CRITICAL); + qm.persist(vuln); + qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); + final Analysis vulnAnalysis = qm.makeAnalysis(component, vuln, AnalysisState.NOT_AFFECTED, + AnalysisJustification.CODE_NOT_REACHABLE, AnalysisResponse.WILL_NOT_FIX, "Because I say so.", false); + vulnAnalysis.setSeverity(Severity.LOW); + vulnAnalysis.setCvssV3Vector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + vulnAnalysis.setCvssV3Score(BigDecimal.valueOf(1.6)); + qm.persist(vulnAnalysis); + + // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); + policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); + policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); + policyAnalysis.setDetails("Because I say so."); + final var policyRating = new VulnerabilityPolicyRating(); + policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); + policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); + policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + policyRating.setScore(1.6); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); + policy.setAnalysis(policyAnalysis); + policy.setRatings(List.of(policyRating)); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vuln.getVulnId(), vuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactly( + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); + assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); + assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); + assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); + assertThat(analysis.isSuppressed()).isFalse(); + assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); + assertThat(analysis.getCvssV2Vector()).isNull(); + assertThat(analysis.getCvssV2Score()).isNull(); + assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); + assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); + assertThat(analysis.getOwaspVector()).isNull(); + assertThat(analysis.getOwaspScore()).isNull(); + + // As no changes were made, no analysis comments should've been created. + assertThat(analysis.getAnalysisComments()).isEmpty(); + }); + }); + + // The vulnerability already existed, so no notifications to be expected. + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> { + assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name()); + final String recordKey = deserializeKey(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordKey).isEqualTo(scanToken); + final ScanResult recordValue = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record); + assertThat(recordValue.getScannerResultsList()).noneMatch(ScannerResult::hasBom); + }); + } + + @Test + public void analysisThroughPolicyWithAliasesTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + // Create a vulnerability for which no aliases are currently known. + // Aliases will be reported by the ScanResult. + final var vulnA = new Vulnerability(); + vulnA.setVulnId("CVE-100"); + vulnA.setSource(Vulnerability.Source.NVD); + qm.persist(vulnA); + + // Create a vulnerability for which an alias is already known. + // The same alias will be reported by the ScanResult. + final var vulnB = new Vulnerability(); + vulnB.setVulnId("CVE-200"); + vulnB.setSource(Vulnerability.Source.NVD); + qm.persist(vulnB); + final var vulnAliasB = new VulnerabilityAlias(); + vulnAliasB.setCveId("CVE-200"); + vulnAliasB.setGhsaId("GHSA-200"); + qm.synchronizeVulnerabilityAlias(vulnAliasB); + + // Create a policy that suppresses any finding with the alias GHSA-100 or GHSA-200. + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + policyAnalysis.setSuppress(true); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setConditions(List.of("vuln.aliases.exists(alias, alias.id == \"GHSA-100\" || alias.id == \"GHSA-200\")")); + policy.setAnalysis(policyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + // Report three vulnerabilities for the component: + // - CVE-100 with alias GHSA-100 (vuln already in DB, alias is new) + // - CVE-200 with alias GHSA-200 (vuln and alias already in DB) + // - CVE-300 without alias (vuln already in DB) + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() + .setId("CVE-100") + .setSource(Source.newBuilder().setName("NVD")) + .addReferences(VulnerabilityReference.newBuilder() + .setId("GHSA-100") + .setSource(Source.newBuilder().setName("GITHUB"))) + .build(), + org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() + .setId("CVE-200") + .setSource(Source.newBuilder().setName("NVD")) + .addReferences(VulnerabilityReference.newBuilder() + .setId("GHSA-200") + .setSource(Source.newBuilder().setName("GITHUB"))) + .build(), + org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() + .setId("CVE-300") + .setSource(Source.newBuilder().setName("NVD")) + .build() + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder( + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); + assertThat(analysis.isSuppressed()).isTrue(); + }); + }, + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-200"); + assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { + assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); + assertThat(analysis.isSuppressed()).isTrue(); + }); + }, + v -> { + assertThat(v.getVulnId()).isEqualTo("CVE-300"); + assertThat(qm.getAnalysis(component, v)).isNull(); + } + ); + } + + @Test + public void analysisThroughPolicyResetOnNoMatchTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); + policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); + policyAnalysis.setSuppress(true); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setConditions(List.of("component.name == \"some-other-name\"")); + policy.setAnalysis(policyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + // Create vulnerability with existing analysis that was previously applied by the above policy, + // but is no longer current. + final var vulnA = new Vulnerability(); + vulnA.setVulnId("CVE-100"); + vulnA.setSource(Vulnerability.Source.NVD); + vulnA.setSeverity(Severity.CRITICAL); + qm.persist(vulnA); + qm.addVulnerability(vulnA, component, AnalyzerIdentity.INTERNAL_ANALYZER); + final var analysisA = new Analysis(); + analysisA.setComponent(component); + analysisA.setVulnerability(vulnA); + analysisA.setAnalysisState(AnalysisState.NOT_AFFECTED); + analysisA.setAnalysisJustification(AnalysisJustification.CODE_NOT_REACHABLE); + analysisA.setAnalysisResponse(AnalysisResponse.WILL_NOT_FIX); + analysisA.setAnalysisDetails("Because I say so."); + analysisA.setSeverity(Severity.MEDIUM); + analysisA.setCvssV2Vector("oldCvssV2Vector"); + analysisA.setCvssV2Score(BigDecimal.valueOf(1.1)); + analysisA.setCvssV3Vector("oldCvssV3Vector"); + analysisA.setCvssV3Score(BigDecimal.valueOf(2.2)); + analysisA.setOwaspVector("oldOwaspVector"); + analysisA.setOwaspScore(BigDecimal.valueOf(3.3)); + analysisA.setSuppressed(true); + qm.getPersistenceManager().makePersistent(analysisA); + jdbi(qm).useHandle(jdbiHandle -> jdbiHandle.createUpdate(""" + UPDATE + "ANALYSIS" + SET + "VULNERABILITY_POLICY_ID" = (SELECT "ID" FROM "VULNERABILITY_POLICY" WHERE "NAME" = :vulnPolicyName) + WHERE + "ID" = :analysisId + """) + .bind("vulnPolicyName", policy.getName()) + .bind("analysisId", analysisA.getId()) + .execute()); + + // Create another vulnerability with existing analysis that was manually applied. + final var vulnB = new Vulnerability(); + vulnB.setVulnId("CVE-200"); + vulnB.setSource(Vulnerability.Source.NVD); + vulnB.setSeverity(Severity.HIGH); + qm.persist(vulnB); + qm.addVulnerability(vulnB, component, AnalyzerIdentity.INTERNAL_ANALYZER); + final var analysisB = new Analysis(); + analysisB.setComponent(component); + analysisB.setVulnerability(vulnB); + analysisB.setAnalysisState(AnalysisState.NOT_AFFECTED); + qm.getPersistenceManager().makePersistent(analysisB); + + // Report both CVE-100 and CVE-200 again. + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vulnA.getVulnId(), vulnA.getSource()), + createVuln(vulnB.getVulnId(), vulnB.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(component.getVulnerabilities()).satisfiesExactly( + v -> { + // The analysis that was previously applied via policy must have been reverted. + assertThat(v.getVulnId()).isEqualTo("CVE-100"); + assertThat(qm.getAnalysis(component, v)).satisfies(a -> { + assertThat(a.getAnalysisState()).isEqualTo(AnalysisState.NOT_SET); + assertThat(a.getVulnerabilityPolicy()).isNull(); + assertThat(a.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{None}]"); + assertThat(a.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactlyInAnyOrder( + "No longer covered by any policy", + "Analysis: NOT_AFFECTED → NOT_SET", + "Justification: CODE_NOT_REACHABLE → NOT_SET", + "Vendor Response: WILL_NOT_FIX → NOT_SET", + "Details: (None)", + "Severity: MEDIUM → UNASSIGNED", + "CVSSv2 Vector: oldCvssV2Vector → (None)", + "CVSSv2 Score: 1.1 → (None)", + "CVSSv3 Vector: oldCvssV3Vector → (None)", + "CVSSv3 Score: 2.2 → (None)", + "OWASP Vector: oldOwaspVector → (None)", + "OWASP Score: 3.3 → (None)", + "Unsuppressed" + ); + }); + }, + v -> { + // The manually applied analysis must not be touched! + assertThat(v.getVulnId()).isEqualTo("CVE-200"); + assertThat(qm.getAnalysis(component, v)).satisfies(a -> { + assertThat(a.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); + assertThat(a.getAnalysisJustification()).isNull(); + assertThat(a.getAnalysisResponse()).isNull(); + assertThat(a.getAnalysisDetails()).isNull(); + assertThat(a.getSeverity()).isNull(); + assertThat(a.getCvssV2Vector()).isNull(); + assertThat(a.getCvssV2Score()).isNull(); + assertThat(a.getCvssV3Vector()).isNull(); + assertThat(a.getCvssV3Score()).isNull(); + assertThat(a.getVulnerabilityPolicy()).isNull(); + assertThat(a.getAnalysisComments()).isEmpty(); + }); + }); + } + + @Test + public void analysisThroughPolicyWithPoliciesNotYetValidOrNotValidAnymoreTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var notYetValidPolicyAnalysis = new VulnerabilityPolicyAnalysis(); + notYetValidPolicyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + notYetValidPolicyAnalysis.setSuppress(true); + final var notYetValidPolicy = new VulnerabilityPolicy(); + notYetValidPolicy.setName("NotYetValid"); + notYetValidPolicy.setValidFrom(ZonedDateTime.ofInstant(Instant.now().plusSeconds(180), ZoneOffset.UTC)); + notYetValidPolicy.setConditions(List.of("true")); + notYetValidPolicy.setAnalysis(notYetValidPolicyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(notYetValidPolicy)); + + final var notValidAnymorePolicyAnalysis = new VulnerabilityPolicyAnalysis(); + notValidAnymorePolicyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + notValidAnymorePolicyAnalysis.setSuppress(true); + final var notValidAnymorePolicy = new VulnerabilityPolicy(); + notValidAnymorePolicy.setName("NotValidAnymore"); + notValidAnymorePolicy.setValidUntil(ZonedDateTime.ofInstant(Instant.now().minusSeconds(180), ZoneOffset.UTC)); + notValidAnymorePolicy.setConditions(List.of("true")); + notValidAnymorePolicy.setAnalysis(notValidAnymorePolicyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(notValidAnymorePolicy)); + + final var vuln = new Vulnerability(); + vuln.setVulnId("CVE-100"); + vuln.setSource(Vulnerability.Source.NVD); + vuln.setSeverity(Severity.CRITICAL); + qm.persist(vuln); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vuln.getVulnId(), vuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().evictAll(); + assertThat(qm.getAnalysis(component, vuln)).isNull(); + } + + @Test + public void analysisThroughPolicyWithAnalysisUpdateNotOnStateOrSuppressionTest() { + final var project = new Project(); + project.setName("acme-app"); + project.setVersion("1.0.0"); + qm.persist(project); + + final var component = new Component(); + component.setName("acme-lib"); + component.setVersion("1.1.0"); + component.setProject(project); + qm.persist(component); + + final var vuln = new Vulnerability(); + vuln.setVulnId("CVE-100"); + vuln.setSource(Vulnerability.Source.NVD); + vuln.setSeverity(Severity.CRITICAL); + qm.persist(vuln); + + qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); + + final var analysis = new Analysis(); + analysis.setComponent(component); + analysis.setVulnerability(vuln); + analysis.setAnalysisState(AnalysisState.FALSE_POSITIVE); + analysis.setSuppressed(true); + qm.persist(analysis); + + final var policyAnalysis = new VulnerabilityPolicyAnalysis(); + policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); + policyAnalysis.setDetails("newDetails"); + policyAnalysis.setSuppress(true); + final var policy = new VulnerabilityPolicy(); + policy.setName("Foo"); + policy.setConditions(List.of("true")); + policy.setAnalysis(policyAnalysis); + jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); + + final var componentUuid = component.getUuid(); + final var scanToken = UUID.randomUUID().toString(); + final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); + final var scanResult = ScanResult.newBuilder() + .setKey(scanKey) + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL) + .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( + createVuln(vuln.getVulnId(), vuln.getSource()) + )))) + .build(); + processor.process(aConsumerRecord(scanKey, scanResult).build()); + + qm.getPersistenceManager().refresh(analysis); + assertThat(analysis.getAnalysisDetails()).isEqualTo("newDetails"); + + assertThat(kafkaMockProducer.history()).noneSatisfy( + record -> assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE.name())); + } + + private org.cyclonedx.proto.v1_4.Vulnerability createVuln(final String id, final String source) { + return org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() + .setId(id) + .setSource(Source.newBuilder().setName(source).build()).build(); + } +} \ No newline at end of file From 0128429aeb07e5121f3dfc916d2efd4e561d1a3b Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 16:40:57 +0100 Subject: [PATCH 13/24] =?UTF-8?q?Nuke=20Kafka=20Streams=20=F0=9F=92=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: nscuro --- pom.xml | 11 +- .../org/dependencytrack/common/ConfigKey.java | 9 - .../streams/KafkaStreamsInitializer.java | 153 -- .../streams/KafkaStreamsTopologyFactory.java | 240 --- ...bstractThresholdBasedExceptionHandler.java | 73 - ...treamsDeserializationExceptionHandler.java | 90 -- ...afkaStreamsProductionExceptionHandler.java | 97 -- .../KafkaStreamsUncaughtExceptionHandler.java | 128 -- ...ayedBomProcessedNotificationProcessor.java | 106 -- .../VulnerabilityScanResultProcessor.java | 1191 -------------- .../health/HealthCheckInitializer.java | 1 - .../health/KafkaStreamsHealthCheck.java | 58 - src/main/resources/application.properties | 33 - src/main/webapp/WEB-INF/web.xml | 3 - ...msDelayedBomProcessedNotificationTest.java | 316 ---- .../event/kafka/streams/KafkaStreamsTest.java | 127 -- .../streams/KafkaStreamsTopologyTest.java | 610 -------- ...msDeserializationExceptionHandlerTest.java | 67 - ...StreamsProductionExceptionHandlerTest.java | 72 - ...kaStreamsUncaughtExceptionHandlerTest.java | 73 - .../VulnerabilityScanResultProcessorTest.java | 1374 ----------------- 21 files changed, 1 insertion(+), 4831 deletions(-) delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java delete mode 100644 src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java delete mode 100644 src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java delete mode 100644 src/test/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessorTest.java diff --git a/pom.xml b/pom.xml index cbda27c1b..49b8a8cf6 100644 --- a/pom.xml +++ b/pom.xml @@ -288,16 +288,7 @@ kafka-clients ${lib.kafka.version} - - org.apache.kafka - kafka-streams - ${lib.kafka.version} - - - org.apache.kafka - kafka-streams-test-utils - ${lib.kafka.version} - + net.javacrumbs.shedlock diff --git a/src/main/java/org/dependencytrack/common/ConfigKey.java b/src/main/java/org/dependencytrack/common/ConfigKey.java index 2f2fa8922..dffc6e0e4 100644 --- a/src/main/java/org/dependencytrack/common/ConfigKey.java +++ b/src/main/java/org/dependencytrack/common/ConfigKey.java @@ -40,17 +40,8 @@ public enum ConfigKey implements Config.Key { KEY_STORE_PATH("kafka.keystore.path", ""), KEY_STORE_PASSWORD("kafka.keystore.password", ""), - KAFKA_NUM_STREAM_THREADS("kafka.num.stream.threads", 1), KAFKA_TOPIC_PREFIX("kafka.topic.prefix", ""), KAFKA_PRODUCER_DRAIN_TIMEOUT_DURATION("kafka.producer.drain.timeout.duration", "PT30S"), - KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_COUNT("kafka.streams.deserialization.exception.threshold.count", "5"), - KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_INTERVAL("kafka.streams.deserialization.exception.threshold.interval", "PT30M"), - KAFKA_STREAMS_METRICS_RECORDING_LEVEL("kafka.streams.metrics.recording.level", "INFO"), - KAFKA_STREAMS_PRODUCTION_EXCEPTION_THRESHOLD_COUNT("kafka.streams.production.exception.threshold.count", "5"), - KAFKA_STREAMS_PRODUCTION_EXCEPTION_THRESHOLD_INTERVAL("kafka.streams.production.exception.threshold.interval", "PT30H"), - KAFKA_STREAMS_TRANSIENT_PROCESSING_EXCEPTION_THRESHOLD_COUNT("kafka.streams.transient.processing.exception.threshold.count", "50"), - KAFKA_STREAMS_TRANSIENT_PROCESSING_EXCEPTION_THRESHOLD_INTERVAL("kafka.streams.transient.processing.exception.threshold.interval", "PT30M"), - KAFKA_STREAMS_DRAIN_TIMEOUT_DURATION("kafka.streams.drain.timeout.duration", "PT30S"), CRON_EXPRESSION_FOR_PORTFOLIO_METRICS_TASK("task.cron.metrics.portfolio", "10 * * * *"), CRON_EXPRESSION_FOR_VULNERABILITY_METRICS_TASK("task.cron.metrics.vulnerability", "40 * * * *"), diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java deleted file mode 100644 index 607066551..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams; - -import alpine.Config; -import alpine.common.logging.Logger; -import alpine.common.metrics.Metrics; -import io.micrometer.core.instrument.binder.kafka.KafkaStreamsMetrics; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.config.SslConfigs; -import org.apache.kafka.common.record.CompressionType; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsConfig; -import org.dependencytrack.common.ConfigKey; -import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsDeserializationExceptionHandler; -import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsProductionExceptionHandler; -import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsUncaughtExceptionHandler; - -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; -import java.nio.file.Paths; -import java.time.Duration; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class KafkaStreamsInitializer implements ServletContextListener { - - private static final Logger LOGGER = Logger.getLogger(KafkaStreamsInitializer.class); - private static final Duration DRAIN_TIMEOUT_DURATION; - private static final Pattern CONSUMER_PREFIX_PATTERN; - private static final Pattern PRODUCER_PREFIX_PATTERN; - - static { - DRAIN_TIMEOUT_DURATION = Duration.parse(Config.getInstance().getProperty(ConfigKey.KAFKA_STREAMS_DRAIN_TIMEOUT_DURATION)); - - CONSUMER_PREFIX_PATTERN = Pattern.compile("^(%s|%s|%s)".formatted( - Pattern.quote(StreamsConfig.CONSUMER_PREFIX), - Pattern.quote(StreamsConfig.GLOBAL_CONSUMER_PREFIX), - Pattern.quote(StreamsConfig.MAIN_CONSUMER_PREFIX) - )); - - PRODUCER_PREFIX_PATTERN = Pattern.compile("^" + Pattern.quote(StreamsConfig.PRODUCER_PREFIX)); - } - - private static KafkaStreams STREAMS; - private static KafkaStreamsMetrics STREAMS_METRICS; - - @Override - public void contextInitialized(final ServletContextEvent event) { - LOGGER.info("Initializing Kafka streams"); - - STREAMS = new KafkaStreams(new KafkaStreamsTopologyFactory().createTopology(), new StreamsConfig(getDefaultProperties())); - STREAMS.setUncaughtExceptionHandler(new KafkaStreamsUncaughtExceptionHandler()); - - if (Config.getInstance().getPropertyAsBoolean(Config.AlpineKey.METRICS_ENABLED)) { - LOGGER.info("Registering Kafka streams metrics"); - STREAMS_METRICS = new KafkaStreamsMetrics(STREAMS); - STREAMS_METRICS.bindTo(Metrics.getRegistry()); - } - - STREAMS.start(); - } - - @Override - public void contextDestroyed(final ServletContextEvent event) { - if (STREAMS != null) { - LOGGER.info("Closing Kafka streams"); - - // Close streams, but wait for a configurable amount of time - // for it to process any polled events. - STREAMS.close(DRAIN_TIMEOUT_DURATION); - - if (STREAMS_METRICS != null) { - STREAMS_METRICS.close(); - } - } - } - - public static KafkaStreams getKafkaStreams() { - return STREAMS; - } - - static Properties getDefaultProperties() { - final var properties = new Properties(); - properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_BOOTSTRAP_SERVERS)); - StringBuilder applicationName = new StringBuilder(Config.getInstance().getProperty(ConfigKey.KAFKA_TOPIC_PREFIX)).append(Config.getInstance().getProperty(ConfigKey.APPLICATION_ID)); - properties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationName.toString()); - - properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, KafkaStreamsDeserializationExceptionHandler.class); - properties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, KafkaStreamsProductionExceptionHandler.class); - - properties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_NUM_STREAM_THREADS)); - properties.put(StreamsConfig.STATE_DIR_CONFIG, Paths.get(Config.getInstance().getDataDirectorty().getAbsolutePath(), "kafka-streams").toString()); - properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_AUTO_OFFSET_RESET)); - if (Config.getInstance().getPropertyAsBoolean(ConfigKey.KAFKA_TLS_ENABLED)) { - properties.put(StreamsConfig.SECURITY_PROTOCOL_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_TLS_PROTOCOL)); - properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, Config.getInstance().getProperty(ConfigKey.TRUST_STORE_PATH)); - properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, Config.getInstance().getProperty(ConfigKey.TRUST_STORE_PASSWORD)); - if (Config.getInstance().getPropertyAsBoolean(ConfigKey.KAFKA_MTLS_ENABLED)) { - properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, Config.getInstance().getProperty(ConfigKey.KEY_STORE_PATH)); - properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, Config.getInstance().getProperty(ConfigKey.KEY_STORE_PASSWORD)); - } - } - properties.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_STREAMS_METRICS_RECORDING_LEVEL)); - properties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "1000"); - properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, CompressionType.SNAPPY.name); - properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true"); - properties.put(ProducerConfig.ACKS_CONFIG, "all"); - - final Map passThroughProperties = Config.getInstance().getPassThroughProperties("kafka.streams"); - for (final Map.Entry passThroughProperty : passThroughProperties.entrySet()) { - final String key = passThroughProperty.getKey().replaceFirst("^kafka\\.streams\\.", ""); - if (StreamsConfig.configDef().names().contains(key)) { - properties.put(key, passThroughProperty.getValue()); - } else { - final Matcher consumerPrefixMatcher = CONSUMER_PREFIX_PATTERN.matcher(key); - final Matcher producerPrefixMatcher = PRODUCER_PREFIX_PATTERN.matcher(key); - - final boolean isValidConsumerProperty = ConsumerConfig.configNames().contains(key) - || (consumerPrefixMatcher.find() && ConsumerConfig.configNames().contains(consumerPrefixMatcher.replaceFirst(""))); - final boolean isValidProducerProperty = ProducerConfig.configNames().contains(key) - || (producerPrefixMatcher.find() && ProducerConfig.configNames().contains(producerPrefixMatcher.replaceFirst(""))); - if (isValidConsumerProperty || isValidProducerProperty) { - properties.put(key, passThroughProperty.getValue()); - } else { - LOGGER.warn("%s is not a known Streams, Consumer, or Producer property; Ignoring".formatted(key)); - } - } - } - - return properties; - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java deleted file mode 100644 index 015044d74..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams; - -import alpine.Config; -import alpine.common.logging.Logger; -import alpine.event.framework.ChainableEvent; -import alpine.event.framework.Event; -import org.apache.kafka.common.serialization.Serdes; -import org.apache.kafka.streams.KeyValue; -import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.Topology; -import org.apache.kafka.streams.kstream.Consumed; -import org.apache.kafka.streams.kstream.KStream; -import org.apache.kafka.streams.kstream.Named; -import org.apache.kafka.streams.kstream.Produced; -import org.apache.kafka.streams.kstream.Repartitioned; -import org.datanucleus.PropertyNames; -import org.dependencytrack.common.ConfigKey; -import org.dependencytrack.event.ComponentMetricsUpdateEvent; -import org.dependencytrack.event.ComponentPolicyEvaluationEvent; -import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent; -import org.dependencytrack.event.ProjectMetricsUpdateEvent; -import org.dependencytrack.event.ProjectPolicyEvaluationEvent; -import org.dependencytrack.event.kafka.KafkaTopics; -import org.dependencytrack.event.kafka.streams.processor.DelayedBomProcessedNotificationProcessor; -import org.dependencytrack.event.kafka.streams.processor.VulnerabilityScanResultProcessor; -import org.dependencytrack.model.VulnerabilityScan; -import org.dependencytrack.model.WorkflowState; -import org.dependencytrack.model.WorkflowStatus; -import org.dependencytrack.model.WorkflowStep; -import org.dependencytrack.notification.NotificationGroup; -import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus; -import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; -import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; - -import java.time.Instant; -import java.util.Date; -import java.util.Properties; -import java.util.UUID; - -import static org.dependencytrack.parser.dependencytrack.NotificationModelConverter.convert; -import static org.dependencytrack.util.NotificationUtil.createProjectVulnerabilityAnalysisCompleteNotification; - -class KafkaStreamsTopologyFactory { - - private static final Logger LOGGER = Logger.getLogger(KafkaStreamsTopologyFactory.class); - - private final boolean delayBomProcessedNotification; - - public KafkaStreamsTopologyFactory() { - this(Config.getInstance().getPropertyAsBoolean(ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION)); - } - - KafkaStreamsTopologyFactory(final boolean delayBomProcessedNotification) { - this.delayBomProcessedNotification = delayBomProcessedNotification; - } - - Topology createTopology() { - final var streamsBuilder = new StreamsBuilder(); - - final var streamsProperties = new Properties(); - streamsProperties.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE); - - final KStream vulnScanResultStream = streamsBuilder - .stream(KafkaTopics.VULN_ANALYSIS_RESULT.name(), Consumed - .with(KafkaTopics.VULN_ANALYSIS_RESULT.keySerde(), KafkaTopics.VULN_ANALYSIS_RESULT.valueSerde()) - .withName("consume_from_%s_topic".formatted(KafkaTopics.VULN_ANALYSIS_RESULT.name()))); - - // Process the vulnerabilities reported by the scanners. - final KStream processedVulnScanResultStream = vulnScanResultStream - .processValues(VulnerabilityScanResultProcessor::new, Named.as("process_vuln_scan_result")); - - // Re-key processed results to their respective scan token, and record their arrival. - final KStream completedVulnScanStream = processedVulnScanResultStream - // Vulnerability scans targeting the entire portfolio are currently not tracked. - // There's no point in including results in the following repartition, and querying - // the database for their scan token, given the queries will never return anything anyway. - // Filtering results of portfolio analyses here also reduces the chance of hot partitions. - .filter((scanKey, scanResult) -> !scanKey.getScanToken().equals(PortfolioVulnerabilityAnalysisEvent.CHAIN_IDENTIFIER.toString()), - Named.as("filter_out_portfolio_vuln_scan_results")) - .map((scanKey, scanResult) -> { - // Drop vulnerabilities from scanner results, as they can be rather large, and we don't need them anymore. - // Dropping them will save us some compression and network overhead during the repartition. - // We can remove this step should we ever need access to the vulnerabilities again. - final var strippedScanResult = scanResult.toBuilder() - .clearScannerResults() - .addAllScannerResults(scanResult.getScannerResultsList().stream() - .map(scannerResult -> scannerResult.toBuilder() - .clearBom() - .build()) - .toList()) - .build(); - return KeyValue.pair(scanKey.getScanToken(), strippedScanResult); - }, Named.as("re-key_scan-result_to_scan-token")) - .repartition(Repartitioned - .with(Serdes.String(), KafkaTopics.VULN_ANALYSIS_RESULT.valueSerde()) - .withName("processed-vuln-scan-result-by-scan-token")) - .mapValues((scanToken, scanResult) -> { - try (final var qm = new QueryManager()) { - return qm.recordVulnerabilityScanResult(scanToken, scanResult); - } - }, Named.as("record_processed_vuln_scan_result")) - .filter((scanToken, vulnScan) -> vulnScan != null, - Named.as("filter_completed_vuln_scans")) - .mapValues((scanToken, vulnScan) -> { - final double failureRate = (double) vulnScan.getScanFailed() / vulnScan.getScanTotal(); - - if (failureRate > vulnScan.getFailureThreshold()) { - try (var qm = new QueryManager()) { - // Detach VulnerabilityScan objects when committing changes. Without this, - // all fields except the ID field will be unloaded on commit (the object will become HOLLOW). - qm.getPersistenceManager().setProperty(PropertyNames.PROPERTY_DETACH_ALL_ON_COMMIT, "true"); - vulnScan = qm.updateVulnerabilityScanStatus(vulnScan.getToken(), VulnerabilityScan.Status.FAILED); - vulnScan.setFailureReason("Failure threshold of " + vulnScan.getFailureThreshold() + "% exceeded: " + failureRate + "% of scans failed"); - LOGGER.warn("Detected failure of vulnerability scan (token=%s, targetType=%s, targetIdentifier=%s): %s" - .formatted(vulnScan.getToken(), vulnScan.getTargetType(), vulnScan.getTargetIdentifier(), vulnScan.getFailureReason())); - } - } - - return vulnScan; - }, Named.as("evaluate_vuln_scan_failure_rate")); - - completedVulnScanStream - .foreach((scanToken, vulnScan) -> { - try (var qm = new QueryManager()) { - final WorkflowState vulnAnalysisState = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.VULN_ANALYSIS); - if (vulnAnalysisState == null) { - // No workflow exists for this scan; Nothing to update. - return; - } - - if (vulnScan.getStatus() == VulnerabilityScan.Status.FAILED) { - vulnAnalysisState.setStatus(WorkflowStatus.FAILED); - vulnAnalysisState.setUpdatedAt(new Date()); - vulnAnalysisState.setFailureReason(vulnScan.getFailureReason()); - final WorkflowState updatedVulnAnalysisState = qm.updateWorkflowState(vulnAnalysisState); - qm.updateAllDescendantStatesOfParent(updatedVulnAnalysisState, WorkflowStatus.CANCELLED, Date.from(Instant.now())); - return; - } - - vulnAnalysisState.setStatus(WorkflowStatus.COMPLETED); - vulnAnalysisState.setUpdatedAt(Date.from(Instant.now())); - qm.updateWorkflowState(vulnAnalysisState); - } - }, Named.as("update_vuln_analysis_workflow_status")); - - final KStream completedVulnScanWithProjectTargetStream = completedVulnScanStream - .filter((scanToken, vulnScan) -> vulnScan.getTargetType() == VulnerabilityScan.TargetType.PROJECT, - Named.as("filter_vuln_scans_with_project_target")); - - // For each completed vulnerability scan that targeted a project (opposed to individual components), - // determine its overall status, gather all findings, and emit a PROJECT_VULN_ANALYSIS_COMPLETE notification. - completedVulnScanWithProjectTargetStream - .map((scanToken, vulnScan) -> { - final alpine.notification.Notification alpineNotification; - try { - alpineNotification = vulnScan.getStatus() == VulnerabilityScan.Status.FAILED - ? createProjectVulnerabilityAnalysisCompleteNotification(vulnScan, - UUID.fromString(scanToken), - ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_FAILED) - : createProjectVulnerabilityAnalysisCompleteNotification( - vulnScan, - UUID.fromString(scanToken), - ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); - } catch (RuntimeException e) { - LOGGER.warn("Failed to generate a %s notification (project: %s; token: %s)" - .formatted(NotificationGroup.PROJECT_VULN_ANALYSIS_COMPLETE, - vulnScan.getTargetIdentifier(), vulnScan.getToken()), e); - return KeyValue.pair(vulnScan.getTargetIdentifier().toString(), null); - } - - return KeyValue.pair(vulnScan.getTargetIdentifier().toString(), convert(alpineNotification)); - }, Named.as("map_vuln_scan_to_vuln_analysis_complete_notification")) - .filter((projectUuid, notification) -> notification != null, - Named.as("filter_valid_project-vuln-analysis-complete_notification")) - .to(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), Produced - .with(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.keySerde(), - KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.valueSerde()) - .withName("produce_to_%s_topic".formatted(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name()))); - - // When delaying of BOM_PROCESSED notifications is enabled, emit a BOM_PROCESSED notification - // for each completed vulnerability scan that targeted a project. But only do so when the scan is - // part of a workflow that includes a BOM_PROCESSING step with status COMPLETED. - if (delayBomProcessedNotification) { - completedVulnScanStream - .process(DelayedBomProcessedNotificationProcessor::new, - Named.as("tmp_delay_bom_processed_notification_process_completed_vuln_scan")) - .to(KafkaTopics.NOTIFICATION_BOM.name(), Produced - .with(KafkaTopics.NOTIFICATION_BOM.keySerde(), KafkaTopics.NOTIFICATION_BOM.valueSerde()) - .withName("tmp_delay_bom_processed_notification_produce_to_%s_topic".formatted(KafkaTopics.NOTIFICATION_BOM.name()))); - } - - // For each successfully completed vulnerability scan, trigger a policy evaluation and metrics update - // for the targeted entity (project or individual component). - completedVulnScanStream - .filter((scanToken, vulnScan) -> vulnScan.getStatus() != VulnerabilityScan.Status.FAILED, - Named.as("filter_failed_vuln_scans")) - .foreach((scanToken, vulnScan) -> { - final ChainableEvent policyEvaluationEvent = switch (vulnScan.getTargetType()) { - case COMPONENT -> new ComponentPolicyEvaluationEvent(vulnScan.getTargetIdentifier()); - case PROJECT -> new ProjectPolicyEvaluationEvent(vulnScan.getTargetIdentifier()); - }; - policyEvaluationEvent.setChainIdentifier(UUID.fromString(vulnScan.getToken())); - - // Trigger a metrics update no matter if the policy evaluation succeeded or not. - final ChainableEvent metricsUpdateEvent = switch (vulnScan.getTargetType()) { - case COMPONENT -> new ComponentMetricsUpdateEvent(vulnScan.getTargetIdentifier()); - case PROJECT -> new ProjectMetricsUpdateEvent(vulnScan.getTargetIdentifier()); - }; - metricsUpdateEvent.setChainIdentifier(UUID.fromString(vulnScan.getToken())); - - policyEvaluationEvent.onFailure(metricsUpdateEvent); - policyEvaluationEvent.onSuccess(metricsUpdateEvent); - - Event.dispatch(policyEvaluationEvent); - }, Named.as("trigger_policy_evaluation")); - - return streamsBuilder.build(streamsProperties); - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java deleted file mode 100644 index 8c0a3a16e..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; - -abstract class AbstractThresholdBasedExceptionHandler { - - private final Clock clock; - private final Duration exceptionThresholdInterval; - private final int exceptionThresholdCount; - private Instant firstExceptionOccurredAt; - private int exceptionOccurrences; - - AbstractThresholdBasedExceptionHandler(final Clock clock, final Duration exceptionThresholdInterval, final int exceptionThresholdCount) { - this.clock = clock; - this.exceptionThresholdInterval = exceptionThresholdInterval; - this.exceptionThresholdCount = exceptionThresholdCount; - } - - boolean exceedsThreshold() { - final Instant now = Instant.now(clock); - if (firstExceptionOccurredAt == null) { - firstExceptionOccurredAt = now; - exceptionOccurrences = 1; - } else { - exceptionOccurrences++; - } - - final Instant cutoff = firstExceptionOccurredAt.plus(exceptionThresholdInterval); - if (now.isAfter(cutoff)) { - firstExceptionOccurredAt = now; - exceptionOccurrences = 1; - } - - return exceptionOccurrences >= exceptionThresholdCount; - } - - public Duration exceptionThresholdInterval() { - return exceptionThresholdInterval; - } - - public int exceptionThresholdCount() { - return exceptionThresholdCount; - } - - public Instant firstExceptionOccurredAt() { - return firstExceptionOccurredAt; - } - - public int exceptionOccurrences() { - return exceptionOccurrences; - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java deleted file mode 100644 index e2dcbf42c..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import alpine.Config; -import alpine.common.logging.Logger; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.streams.errors.DeserializationExceptionHandler; -import org.apache.kafka.streams.processor.ProcessorContext; -import org.dependencytrack.common.ConfigKey; - -import java.time.Clock; -import java.time.Duration; -import java.util.Map; - -public class KafkaStreamsDeserializationExceptionHandler extends AbstractThresholdBasedExceptionHandler implements DeserializationExceptionHandler { - - private static final Logger LOGGER = Logger.getLogger(KafkaStreamsDeserializationExceptionHandler.class); - - - @SuppressWarnings("unused") // Called by Kafka Streams via reflection - public KafkaStreamsDeserializationExceptionHandler() { - this( - Clock.systemUTC(), - Duration.parse(Config.getInstance().getProperty(ConfigKey.KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_INTERVAL)), - Config.getInstance().getPropertyAsInt(ConfigKey.KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_COUNT) - ); - } - - KafkaStreamsDeserializationExceptionHandler(final Clock clock, - final Duration exceptionThresholdInterval, - final int exceptionThresholdCount) { - super(clock, exceptionThresholdInterval, exceptionThresholdCount); - } - - /** - * {@inheritDoc} - */ - @Override - public void configure(final Map configs) { - // Configuration is done via Alpine config. - } - - /** - * {@inheritDoc} - */ - @Override - public synchronized DeserializationHandlerResponse handle(final ProcessorContext context, - final ConsumerRecord record, - final Exception exception) { - // TODO: Use KafkaEventDispatcher to send the record to a dead letter topic? - if (exceedsThreshold()) { - LOGGER.error(""" - Failed to deserialize record from topic %s (partition: %d, offset %d); \ - Stopping to consume records, as the error was encountered %d times since %s, \ - exceeding the configured threshold of %d occurrences in an interval of %s\ - """ - .formatted(record.topic(), record.partition(), record.offset(), - exceptionOccurrences(), firstExceptionOccurredAt(), - exceptionThresholdCount(), exceptionThresholdInterval()), exception); - return DeserializationHandlerResponse.FAIL; - } - - LOGGER.warn(""" - Failed to deserialize record from topic %s (partition: %d, offset: %d); \ - Skipping and continuing to consume records, as the configured threshold of \ - %d occurrences in an interval of %s has not been exceeded yet\ - """ - .formatted(record.topic(), record.partition(), record.offset(), - exceptionThresholdCount(), exceptionThresholdInterval()), exception); - return DeserializationHandlerResponse.CONTINUE; - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java deleted file mode 100644 index 344aeca8b..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import alpine.Config; -import alpine.common.logging.Logger; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.errors.RecordTooLargeException; -import org.apache.kafka.streams.errors.ProductionExceptionHandler; -import org.dependencytrack.common.ConfigKey; - -import java.time.Clock; -import java.time.Duration; -import java.util.Map; - -public class KafkaStreamsProductionExceptionHandler extends AbstractThresholdBasedExceptionHandler implements ProductionExceptionHandler { - - private static final Logger LOGGER = Logger.getLogger(KafkaStreamsProductionExceptionHandler.class); - - - @SuppressWarnings("unused") // Called by Kafka Streams via reflection - public KafkaStreamsProductionExceptionHandler() { - this( - Clock.systemUTC(), - Duration.parse(Config.getInstance().getProperty(ConfigKey.KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_INTERVAL)), - Config.getInstance().getPropertyAsInt(ConfigKey.KAFKA_STREAMS_DESERIALIZATION_EXCEPTION_THRESHOLD_COUNT) - ); - } - - KafkaStreamsProductionExceptionHandler(final Clock clock, - final Duration exceptionThresholdInterval, - final int exceptionThresholdCount) { - super(clock, exceptionThresholdInterval, exceptionThresholdCount); - } - - /** - * {@inheritDoc} - */ - @Override - public void configure(final Map configs) { - // Configuration is done via Alpine config. - } - - /** - * {@inheritDoc} - */ - @Override - public synchronized ProductionExceptionHandlerResponse handle(final ProducerRecord record, - final Exception exception) { - if (!(exception instanceof RecordTooLargeException)) { - LOGGER.error(""" - Failed to produce record to topic %s; \ - Stopping to produce records, as the error is of an unexpected type, \ - and we're not sure if it can safely be ignored\ - """ - .formatted(record.topic()), exception); - return ProductionExceptionHandlerResponse.FAIL; - } - - if (exceedsThreshold()) { - LOGGER.error(""" - Failed to produce record to topic %s; \ - Stopping to produce records, as the error was encountered %d times since %s, \ - exceeding the configured threshold of %d occurrences in an interval of %s\ - """ - .formatted(record.topic(), - exceptionOccurrences(), firstExceptionOccurredAt(), - exceptionThresholdCount(), exceptionThresholdInterval()), exception); - return ProductionExceptionHandlerResponse.FAIL; - } - - LOGGER.warn(""" - Failed to produce record to topic %s; \ - Skipping and continuing to produce records, as the configured threshold of \ - %d occurrences in an interval of %s has not been exceeded yet\ - """ - .formatted(record.topic(), exceptionThresholdCount(), exceptionThresholdInterval()), exception); - return ProductionExceptionHandlerResponse.CONTINUE; - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java deleted file mode 100644 index faddce783..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import alpine.Config; -import alpine.common.logging.Logger; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.http.conn.ConnectTimeoutException; -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; -import org.datanucleus.api.jdo.exceptions.ConnectionInUseException; -import org.datanucleus.store.query.QueryInterruptedException; -import org.dependencytrack.common.ConfigKey; - -import javax.jdo.JDOOptimisticVerificationException; -import java.net.SocketTimeoutException; -import java.sql.SQLTransientException; -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeoutException; - -import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.REPLACE_THREAD; -import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; - -public class KafkaStreamsUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { - - private record ExceptionOccurrence(Instant occurredFirstAt, int count) { - } - - private static final Logger LOGGER = Logger.getLogger(KafkaStreamsUncaughtExceptionHandler.class); - - private final Clock clock; - private final Map, ExceptionOccurrence> transientExceptionOccurrences; - private final Duration transientExceptionThresholdInterval; - private final int transientExceptionThresholdCount; - - public KafkaStreamsUncaughtExceptionHandler() { - this( - Clock.systemUTC(), - Duration.parse(Config.getInstance().getProperty(ConfigKey.KAFKA_STREAMS_TRANSIENT_PROCESSING_EXCEPTION_THRESHOLD_INTERVAL)), - Config.getInstance().getPropertyAsInt(ConfigKey.KAFKA_STREAMS_TRANSIENT_PROCESSING_EXCEPTION_THRESHOLD_COUNT) - ); - } - - KafkaStreamsUncaughtExceptionHandler(final Clock clock, - final Duration transientExceptionThresholdInterval, - final int transientExceptionThresholdCount) { - this.clock = clock; - this.transientExceptionOccurrences = new ConcurrentHashMap<>(); - this.transientExceptionThresholdInterval = transientExceptionThresholdInterval; - this.transientExceptionThresholdCount = transientExceptionThresholdCount; - } - - /** - * {@inheritDoc} - */ - @Override - public StreamThreadExceptionResponse handle(final Throwable exception) { - final Throwable rootCause = ExceptionUtils.getRootCause(exception); - - if (rootCause instanceof TimeoutException - || rootCause instanceof ConnectTimeoutException - || rootCause instanceof SocketTimeoutException - || rootCause instanceof ConnectionInUseException - || rootCause instanceof QueryInterruptedException - || rootCause instanceof JDOOptimisticVerificationException - || rootCause instanceof SQLTransientException) { - final ExceptionOccurrence occurrence = transientExceptionOccurrences - .compute(rootCause.getClass(), (key, oldValue) -> { - final Instant now = Instant.now(clock); - if (oldValue == null) { - return new ExceptionOccurrence(now, 1); - } - - final Instant cutoff = oldValue.occurredFirstAt().plus(transientExceptionThresholdInterval); - if (now.isAfter(cutoff)) { - return new ExceptionOccurrence(now, 1); - } - - return new ExceptionOccurrence(oldValue.occurredFirstAt(), oldValue.count() + 1); - }); - - if (occurrence.count() >= transientExceptionThresholdCount) { - LOGGER.error(""" - Encountered an unhandled exception during record processing; \ - Shutting down the failed stream thread as the error was encountered \ - %d times since %s, exceeding the configured threshold of %d occurrences \ - in an interval of %s\ - """ - // Actual exception stack trace will be logged by Kafka Streams - .formatted(occurrence.count(), occurrence.occurredFirstAt(), - transientExceptionThresholdCount, transientExceptionThresholdInterval)); - return SHUTDOWN_CLIENT; - } - - LOGGER.warn(""" - Encountered an unhandled exception during record processing; \ - Replacing the failed stream thread as the error appears to be transient\ - """); // Actual exception stack trace will be logged by Kafka Streams - return REPLACE_THREAD; - } - - LOGGER.error(""" - Encountered an unhandled exception during record processing; \ - Shutting down the failed stream thread as the error does not appear to be transient\ - """); // Actual exception stack trace will be logged by Kafka Streams - return SHUTDOWN_CLIENT; - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java b/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java deleted file mode 100644 index 857a93123..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.processor; - -import alpine.common.logging.Logger; -import alpine.notification.NotificationLevel; -import org.apache.kafka.streams.processor.api.ContextualProcessor; -import org.apache.kafka.streams.processor.api.Processor; -import org.apache.kafka.streams.processor.api.Record; -import org.dependencytrack.model.Bom; -import org.dependencytrack.model.Project; -import org.dependencytrack.model.VulnerabilityScan; -import org.dependencytrack.model.WorkflowStatus; -import org.dependencytrack.model.WorkflowStep; -import org.dependencytrack.notification.NotificationConstants; -import org.dependencytrack.notification.NotificationGroup; -import org.dependencytrack.notification.NotificationScope; -import org.dependencytrack.notification.vo.BomConsumedOrProcessed; -import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.proto.notification.v1.Notification; - -import javax.jdo.Query; -import java.util.UUID; - -import static org.dependencytrack.parser.dependencytrack.NotificationModelConverter.convert; - -/** - * A {@link Processor} responsible for dispatching {@link NotificationGroup#BOM_PROCESSED} notifications - * upon detection of a completed {@link VulnerabilityScan}. - */ -public class DelayedBomProcessedNotificationProcessor extends ContextualProcessor { - - private static final Logger LOGGER = Logger.getLogger(DelayedBomProcessedNotificationProcessor.class); - - @Override - public void process(final Record record) { - final VulnerabilityScan vulnScan = record.value(); - - if (vulnScan.getStatus() != VulnerabilityScan.Status.COMPLETED - && vulnScan.getStatus() != VulnerabilityScan.Status.FAILED) { - LOGGER.warn("Received vulnerability scan with non-terminal status %s; Dropping (token=%s, project=%s)" - .formatted(vulnScan.getStatus(), vulnScan.getToken(), vulnScan.getTargetIdentifier())); - return; - } - - final Project project; - try (final var qm = new QueryManager()) { - if (!qm.hasWorkflowStepWithStatus(UUID.fromString(vulnScan.getToken()), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)) { - LOGGER.debug("Received completed vulnerability scan, but no %s step exists in this workflow; Dropping (token=%s, project=%s)" - .formatted(WorkflowStep.BOM_PROCESSING, vulnScan.getToken(), vulnScan.getTargetIdentifier())); - return; - } - - project = getProject(qm, vulnScan.getTargetIdentifier()); - if (project == null) { - LOGGER.warn("Received completed vulnerability scan, but the target project does not exist; Dropping (token=%s, project=%s)" - .formatted(vulnScan.getToken(), vulnScan.getTargetIdentifier())); - return; - } - } - - final var alpineNotification = new alpine.notification.Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.BOM_PROCESSED) - .level(NotificationLevel.INFORMATIONAL) - .title(NotificationConstants.Title.BOM_PROCESSED) - // BOM format and spec version are hardcoded because we don't have this information at this point. - // DT currently only accepts CycloneDX anyway. - .content("A %s BOM was processed".formatted(Bom.Format.CYCLONEDX.getFormatShortName())) - .subject(new BomConsumedOrProcessed(UUID.fromString(vulnScan.getToken()), project, /* bom */ "(Omitted)", Bom.Format.CYCLONEDX, "Unknown")); - - context().forward(record.withKey(project.getUuid().toString()).withValue(convert(alpineNotification))); - LOGGER.info("Dispatched delayed %s notification (token=%s, project=%s)" - .formatted(NotificationGroup.BOM_PROCESSED, vulnScan.getToken(), vulnScan.getTargetIdentifier())); - } - - private static Project getProject(final QueryManager qm, final UUID uuid) { - final Query projectQuery = qm.getPersistenceManager().newQuery(Project.class); - projectQuery.setFilter("uuid == :uuid"); - projectQuery.setParameters(uuid); - projectQuery.getFetchPlan().clearGroups(); // Ensure we're not loading too much bloat. - projectQuery.getFetchPlan().setGroup(Project.FetchGroup.NOTIFICATION.name()); - try { - return qm.getPersistenceManager().detachCopy(projectQuery.executeResultUnique(Project.class)); - } finally { - projectQuery.closeAll(); - } - } - -} diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java deleted file mode 100644 index 8910c7ebd..000000000 --- a/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java +++ /dev/null @@ -1,1191 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.processor; - -import alpine.Config; -import alpine.common.logging.Logger; -import alpine.common.metrics.Metrics; -import alpine.notification.Notification; -import alpine.notification.NotificationLevel; -import com.google.protobuf.Any; -import com.google.protobuf.Timestamp; -import com.google.protobuf.util.Timestamps; -import io.micrometer.core.instrument.Timer; -import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor; -import org.apache.kafka.streams.processor.api.ContextualProcessor; -import org.apache.kafka.streams.processor.api.FixedKeyRecord; -import org.dependencytrack.event.kafka.KafkaEventDispatcher; -import org.dependencytrack.event.kafka.KafkaEventHeaders; -import org.dependencytrack.event.kafka.KafkaUtil; -import org.dependencytrack.model.AnalysisJustification; -import org.dependencytrack.model.AnalysisResponse; -import org.dependencytrack.model.AnalysisState; -import org.dependencytrack.model.AnalyzerIdentity; -import org.dependencytrack.model.Severity; -import org.dependencytrack.model.Vulnerability; -import org.dependencytrack.model.VulnerabilityAlias; -import org.dependencytrack.model.VulnerabilityAnalysisLevel; -import org.dependencytrack.model.mapping.PolicyProtoMapper; -import org.dependencytrack.notification.NotificationConstants; -import org.dependencytrack.notification.NotificationGroup; -import org.dependencytrack.notification.NotificationScope; -import org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln; -import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.persistence.jdbi.NotificationSubjectDao; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicy; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyEvaluator; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating; -import org.dependencytrack.proto.notification.v1.Group; -import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; -import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; -import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus; -import org.dependencytrack.proto.vulnanalysis.v1.Scanner; -import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; -import org.dependencytrack.util.AnalysisCommentFormatter.AnalysisCommentField; -import org.dependencytrack.util.PersistenceUtil; -import org.dependencytrack.util.PersistenceUtil.Differ; -import org.jdbi.v3.core.mapper.reflect.ColumnName; -import org.jdbi.v3.sqlobject.config.RegisterBeanMapper; -import org.jdbi.v3.sqlobject.config.RegisterConstructorMapper; -import org.jdbi.v3.sqlobject.customizer.BindBean; -import org.jdbi.v3.sqlobject.customizer.BindMethods; -import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys; -import org.jdbi.v3.sqlobject.statement.SqlBatch; -import org.jdbi.v3.sqlobject.statement.SqlQuery; - -import javax.jdo.Query; -import javax.ws.rs.core.MultivaluedHashMap; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Objects; -import java.util.ServiceLoader; -import java.util.Set; -import java.util.UUID; -import java.util.function.Function; -import java.util.stream.Collectors; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static org.datanucleus.PropertyNames.PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT; -import static org.datanucleus.PropertyNames.PROPERTY_RETAIN_VALUES; -import static org.dependencytrack.common.ConfigKey.VULNERABILITY_POLICY_ANALYSIS_ENABLED; -import static org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln.convert; -import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABILITY; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABLE_DEPENDENCY; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_AUDIT_CHANGE; -import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; -import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; -import static org.dependencytrack.util.AnalysisCommentFormatter.formatComment; -import static org.dependencytrack.util.NotificationUtil.generateNotificationContent; -import static org.dependencytrack.util.NotificationUtil.generateNotificationTitle; -import static org.dependencytrack.util.NotificationUtil.generateTitle; -import static org.dependencytrack.util.VulnerabilityUtil.canBeMirrored; -import static org.dependencytrack.util.VulnerabilityUtil.isAuthoritativeSource; -import static org.dependencytrack.util.VulnerabilityUtil.isMirroringEnabled; - -/** - * A {@link ContextualProcessor} responsible for processing {@link ScanResult}s. - */ -public class VulnerabilityScanResultProcessor extends ContextualFixedKeyProcessor { - - private static final Logger LOGGER = Logger.getLogger(VulnerabilityScanResultProcessor.class); - private static final Timer TIMER = Timer.builder("vuln_scan_result_processing") - .description("Time taken to process vulnerability scan results") - .register(Metrics.getRegistry()); - - private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher(); - private final VulnerabilityPolicyEvaluator vulnPolicyEvaluator; - - public VulnerabilityScanResultProcessor() { - this(Config.getInstance().getPropertyAsBoolean(VULNERABILITY_POLICY_ANALYSIS_ENABLED) - ? ServiceLoader.load(VulnerabilityPolicyEvaluator.class).findFirst().orElseThrow() - : null); - } - - VulnerabilityScanResultProcessor(final VulnerabilityPolicyEvaluator vulnPolicyEvaluator) { - this.vulnPolicyEvaluator = vulnPolicyEvaluator; - } - - @Override - public void process(final FixedKeyRecord record) { - final ScanKey scanKey = record.key(); - final ScanResult result = record.value(); - final UUID componentUuid = UUID.fromString(scanKey.getComponentUuid()); - final VulnerabilityAnalysisLevel analysisLevel = determineAnalysisLevel(record); - final boolean isNewComponent = determineIsComponentNew(record); - - final Timer.Sample timerSample = Timer.start(); - try (final var qm = new QueryManager()) { - // Do not unload fields upon commit (why is this even the default WTF). - qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true"); - qm.getPersistenceManager().setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false"); - - final Component component = jdbi(qm).withExtension(Dao.class, dao -> dao.getComponentByUuid(componentUuid)); - if (component == null) { - LOGGER.warn("Received result for component %s, but it does not exist (scanKey: %s)" - .formatted(componentUuid, prettyPrint(scanKey))); - return; - } - - for (final ScannerResult scannerResult : result.getScannerResultsList()) { - processScannerResult(qm, component, scanKey, scannerResult, analysisLevel, isNewComponent); - } - } catch (Exception e) { - LOGGER.error("Failed to process scan result for component %s (scanKey: %s)" - .formatted(componentUuid, prettyPrint(scanKey)), e); - } finally { - timerSample.stop(TIMER); - context().forward(record); - } - } - - private void processScannerResult(final QueryManager qm, final Component component, - final ScanKey scanKey, final ScannerResult scannerResult, - final VulnerabilityAnalysisLevel analysisLevel, - final boolean isNewComponent) { - if (scannerResult.getStatus() == SCAN_STATUS_FAILED) { - final var message = "Scan of component %s with %s failed (scanKey: %s): %s" - .formatted(component.uuid(), scannerResult.getScanner(), prettyPrint(scanKey), scannerResult.getFailureReason()); - eventDispatcher.dispatchNotification(new Notification() - .scope(NotificationScope.SYSTEM) - .group(NotificationGroup.ANALYZER) - .level(NotificationLevel.ERROR) - .title(NotificationConstants.Title.ANALYZER_ERROR) - .content(message)); - LOGGER.warn(message); - return; - } else if (scannerResult.getStatus() != ScanStatus.SCAN_STATUS_SUCCESSFUL) { - LOGGER.warn("Unable to process results from %s with status %s; Dropping record (scanKey: %s)" - .formatted(scannerResult.getScanner(), scannerResult.getStatus(), prettyPrint(scanKey))); - return; - } - - final Set syncedVulns = syncVulnerabilities(qm, scanKey, scannerResult); - LOGGER.debug("Synchronized %d vulnerabilities reported by %s for %s (scanKey: %s)" - .formatted(syncedVulns.size(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey))); - - final Map matchedPoliciesByVulnUuid = maybeEvaluateVulnPolicies(component, syncedVulns); - LOGGER.debug("Identified policy matches for %d/%d vulnerabilities (scanKey: %s)" - .formatted(matchedPoliciesByVulnUuid.size(), syncedVulns.size(), prettyPrint(scanKey))); - - final List newVulnUuids = synchronizeFindingsAndAnalyses(qm, component, syncedVulns, - scannerResult.getScanner(), matchedPoliciesByVulnUuid); - LOGGER.debug("Identified %d new vulnerabilities for %s with %s (scanKey: %s)" - .formatted(newVulnUuids.size(), scanKey.getComponentUuid(), scannerResult.getScanner(), prettyPrint(scanKey))); - - maybeSendNotifications(qm, component, isNewComponent, analysisLevel, newVulnUuids); - } - - /** - * Synchronize vulnerabilities reported in a given {@link ScannerResult} with the datastore. - * - * @param qm The {@link QueryManager} to use - * @param scanKey The {@link ScanKey} associated with the {@link ScannerResult} - * @param scannerResult The {@link ScannerResult} to synchronize vulnerabilities from - * @return A {@link Set} of synchronized {@link Vulnerability}s - */ - private Set syncVulnerabilities(final QueryManager qm, final ScanKey scanKey, final ScannerResult scannerResult) { - final var syncedVulns = new HashSet(); - - for (final org.cyclonedx.proto.v1_4.Vulnerability reportedVuln : scannerResult.getBom().getVulnerabilitiesList()) { - final Vulnerability vuln; - try { - vuln = ModelConverterCdxToVuln.convert(qm, scannerResult.getBom(), reportedVuln, true); - } catch (RuntimeException e) { - LOGGER.error("Failed to convert vulnerability %s/%s (reported by %s for component %s) to internal model (scanKey: %s)" - .formatted(reportedVuln.getSource(), reportedVuln.getId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e); - continue; - } - - try { - final Vulnerability syncedVuln = syncVulnerability(qm, vuln, scannerResult.getScanner()); - - // Detach vulnerabilities from JDO persistence context. - // We do not want to trigger any DB interactions by accessing their fields later. - // Note that even PersistenceManager#detachCopy will load / unload fields based - // on the current FetchPlan. But we just want to keep the data we already have, - // and #makeTransientAll does exactly that. - qm.getPersistenceManager().makeTransient(syncedVuln); - - if (vuln.getAliases() != null && !vuln.getAliases().isEmpty()) { - final var syncedAliases = new ArrayList(); - for (VulnerabilityAlias alias : vuln.getAliases()) { - final VulnerabilityAlias syncedAlias = qm.synchronizeVulnerabilityAlias(alias); - qm.getPersistenceManager().makeTransient(syncedAlias); - syncedAliases.add(syncedAlias); - } - syncedVuln.setAliases(syncedAliases); - } - - syncedVulns.add(syncedVuln); - } catch (RuntimeException e) { - // Use a broad catch here, so we can still try to process other - // vulnerabilities, even though processing one of them failed. - - LOGGER.warn("Failed to synchronize vulnerability %s/%s (reported by %s for component %s; scanKey: %s)" - .formatted(vuln.getSource(), vuln.getVulnId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e); - } - } - - return syncedVulns; - } - - /** - * Synchronize a given {@link Vulnerability} as reported by a given {@link Scanner} with the datastore. - *

- * This method differs from {@link QueryManager#synchronizeVulnerability(Vulnerability, boolean)} in that it expects - * an active {@link javax.jdo.Transaction}, and only calls setters of existing vulnerabilities when the respective - * value actually changed, saving network round-trips. - * - * @param qm The {@link QueryManager} to use - * @param vuln The {@link Vulnerability} to synchronize - * @param scanner The {@link AnalyzerIdentity} that reported the vulnerability - * @return The synchronized {@link Vulnerability} - * @throws IllegalStateException When no {@link javax.jdo.Transaction} is active - * @throws NoSuchElementException When the reported vulnerability is internal, but does not exist in the datastore - */ - private Vulnerability syncVulnerability(final QueryManager qm, final Vulnerability vuln, final Scanner scanner) { - // TODO: Refactor this to use JDBI instead. - // It is possible that the same vulnerability is reported for multiple components in parallel, - // causing unique constraint violations when attempting to INSERT into the VULNERABILITY table. - // In such cases, we can get away with simply retrying to SELECT or INSERT again. - return qm.runInRetryableTransaction(() -> { - final Vulnerability existingVuln; - final Query query = qm.getPersistenceManager().newQuery(Vulnerability.class); - try { - query.setFilter("vulnId == :vulnId && source == :source"); - query.setParameters(vuln.getVulnId(), vuln.getSource()); - existingVuln = query.executeUnique(); - } finally { - query.closeAll(); - } - - if (existingVuln == null) { - if (Vulnerability.Source.INTERNAL.name().equals(vuln.getSource())) { - throw new NoSuchElementException("An internal vulnerability with ID %s does not exist".formatted(vuln.getVulnId())); - } - - return qm.getPersistenceManager().makePersistent(vuln); - } - - if (canUpdateVulnerability(existingVuln, scanner)) { - final var differ = new Differ<>(existingVuln, vuln); - - // TODO: Consider using something like javers to get a rich diff of WHAT changed; https://github.com/javers/javers - differ.applyIfChanged("title", Vulnerability::getTitle, existingVuln::setTitle); - differ.applyIfChanged("subTitle", Vulnerability::getSubTitle, existingVuln::setSubTitle); - differ.applyIfChanged("description", Vulnerability::getDescription, existingVuln::setDescription); - differ.applyIfChanged("detail", Vulnerability::getDetail, existingVuln::setDetail); - differ.applyIfChanged("recommendation", Vulnerability::getRecommendation, existingVuln::setRecommendation); - differ.applyIfChanged("references", Vulnerability::getReferences, existingVuln::setReferences); - differ.applyIfChanged("credits", Vulnerability::getCredits, existingVuln::setCredits); - differ.applyIfChanged("created", Vulnerability::getCreated, existingVuln::setCreated); - differ.applyIfChanged("published", Vulnerability::getPublished, existingVuln::setPublished); - differ.applyIfChanged("updated", Vulnerability::getUpdated, existingVuln::setUpdated); - differ.applyIfChanged("cwes", Vulnerability::getCwes, existingVuln::setCwes); - // Calling setSeverity nulls all CVSS and OWASP RR fields. getSeverity calculates the severity on-the-fly, - // and will return UNASSIGNED even when no severity is set explicitly. Thus, calling setSeverity - // must happen before CVSS and OWASP RR fields are set, to avoid null-ing them again. - differ.applyIfChanged("severity", Vulnerability::getSeverity, existingVuln::setSeverity); - differ.applyIfChanged("cvssV2BaseScore", Vulnerability::getCvssV2BaseScore, existingVuln::setCvssV2BaseScore); - differ.applyIfChanged("cvssV2ImpactSubScore", Vulnerability::getCvssV2ImpactSubScore, existingVuln::setCvssV2ImpactSubScore); - differ.applyIfChanged("cvssV2ExploitabilitySubScore", Vulnerability::getCvssV2ExploitabilitySubScore, existingVuln::setCvssV2ExploitabilitySubScore); - differ.applyIfChanged("cvssV2Vector", Vulnerability::getCvssV2Vector, existingVuln::setCvssV2Vector); - differ.applyIfChanged("cvssv3BaseScore", Vulnerability::getCvssV3BaseScore, existingVuln::setCvssV3BaseScore); - differ.applyIfChanged("cvssV3ImpactSubScore", Vulnerability::getCvssV3ImpactSubScore, existingVuln::setCvssV3ImpactSubScore); - differ.applyIfChanged("cvssV3ExploitabilitySubScore", Vulnerability::getCvssV3ExploitabilitySubScore, existingVuln::setCvssV3ExploitabilitySubScore); - differ.applyIfChanged("cvssV3Vector", Vulnerability::getCvssV3Vector, existingVuln::setCvssV3Vector); - differ.applyIfChanged("owaspRRLikelihoodScore", Vulnerability::getOwaspRRLikelihoodScore, existingVuln::setOwaspRRLikelihoodScore); - differ.applyIfChanged("owaspRRTechnicalImpactScore", Vulnerability::getOwaspRRTechnicalImpactScore, existingVuln::setOwaspRRTechnicalImpactScore); - differ.applyIfChanged("owaspRRBusinessImpactScore", Vulnerability::getOwaspRRBusinessImpactScore, existingVuln::setOwaspRRBusinessImpactScore); - differ.applyIfChanged("owaspRRVector", Vulnerability::getOwaspRRVector, existingVuln::setOwaspRRVector); - // Aliases of existingVuln will always be null, as they'd have to be fetched separately. - // Synchronization of aliases is performed after synchronizing the vulnerability. - // updated |= applyIfChanged(existingVuln, vuln, Vulnerability::getAliases, existingVuln::setAliases); - - differ.applyIfChanged("vulnerableVersions", Vulnerability::getVulnerableVersions, existingVuln::setVulnerableVersions); - differ.applyIfChanged("patchedVersions", Vulnerability::getPatchedVersions, existingVuln::setPatchedVersions); - // EPSS is an additional enrichment that no scanner currently provides. - // We don't want EPSS scores of CVEs to be purged just because the CVE information came from e.g. OSS Index. - differ.applyIfNonNullAndChanged("epssScore", Vulnerability::getEpssScore, existingVuln::setEpssScore); - differ.applyIfNonNullAndChanged("epssPercentile", Vulnerability::getEpssPercentile, existingVuln::setEpssPercentile); - - if (!differ.getDiffs().isEmpty()) { - // TODO: Send a notification? - // (But notifications should only be sent if the transaction was committed) - // TODO: Reduce to DEBUG; It's set to INFO for testing - LOGGER.info("Vulnerability %s/%s was updated by %s: %s".formatted(vuln.getSource(), vuln.getVulnId(), scanner, differ.getDiffs())); - } - } - - return existingVuln; - }, PersistenceUtil::isUniqueConstraintViolation); - } - - private Map maybeEvaluateVulnPolicies(final Component component, final Collection vulns) { - if (vulnPolicyEvaluator == null) { - return Collections.emptyMap(); - } - - final var policyProject = org.dependencytrack.proto.policy.v1.Project.newBuilder() - .setUuid(component.projectUuid().toString()) - .build(); - final var policyComponent = org.dependencytrack.proto.policy.v1.Component.newBuilder() - .setUuid(component.uuid().toString()) - .build(); - final List policyVulns = vulns.stream() - .map(PolicyProtoMapper::mapToProto) - .toList(); - - return vulnPolicyEvaluator.evaluate(policyVulns, policyComponent, policyProject); - } - - /** - * Associate a given {@link Collection} of {@link Vulnerability}s with a given {@link Component}, - * evaluate applicable {@link VulnerabilityPolicy}s, and apply the resulting analyses. - *

- * If a {@link Vulnerability} was not previously associated with the {@link Component}, - * a {@link FindingAttribution} will be created for the {@link Scanner}. - * - * @param qm The {@link QueryManager} to use - * @param component The {@link Component} to associate with - * @param vulns The {@link Vulnerability}s to associate with - * @param scanner The {@link Scanner} that identified the association - * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()} - * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component}, - * and which have not been suppressed via {@link VulnerabilityPolicy}. - */ - private List synchronizeFindingsAndAnalyses(final QueryManager qm, final Component component, - final Collection vulns, final Scanner scanner, - final Map policiesByVulnUuid) { - return jdbi(qm).inTransaction(jdbiHandle -> { - final var dao = jdbiHandle.attach(Dao.class); - - // Bulk-create new findings and corresponding scanner attributions. - final List newFindingVulnIds = dao.createFindings(component, vulns); - final List findingAttributions = newFindingVulnIds.stream() - .map(vulnId -> new FindingAttribution(vulnId, component.id(), component.projectId(), - convert(scanner).name(), UUID.randomUUID())) - .toList(); - dao.createFindingAttributions(findingAttributions); - - return maybeApplyPolicyAnalyses(qm, dao, component, vulns, newFindingVulnIds, policiesByVulnUuid); - }); - } - - /** - * Apply analyses of matched {@link VulnerabilityPolicy}s. Do nothing when no policies matched. - * - * @param qm - * @param dao The {@link Dao} to use for persistence operations - * @param component The {@link Component} to apply analyses for - * @param vulns The {@link Vulnerability}s identified for the {@link Component} - * @param newFindingVulnIds IDs of {@link Vulnerability}s that newly affect the {@link Component} - * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()} - * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component}, - * and which have not been suppressed via {@link VulnerabilityPolicy}. - */ - private List maybeApplyPolicyAnalyses(QueryManager qm, final Dao dao, final Component component, final Collection vulns, - final List newFindingVulnIds, Map policiesByVulnUuid) { - if (vulns.isEmpty()) { - return Collections.emptyList(); - } - - // Mark vulnerability UUIDs without policy match with an explicit "null" policy. - policiesByVulnUuid = new HashMap<>(policiesByVulnUuid); - for (final Vulnerability vuln : vulns) { - if (!policiesByVulnUuid.containsKey(vuln.getUuid())) { - policiesByVulnUuid.put(vuln.getUuid(), null); - } - } - - // Index vulnerabilities by ID and UUID for more efficient lookups. - final var vulnById = new HashMap(); - final var vulnByUuid = new HashMap(); - for (final Vulnerability vuln : vulns) { - vulnById.put(vuln.getId(), vuln); - vulnByUuid.put(vuln.getUuid(), vuln); - } - - // For all vulnerabilities with matching policies, bulk-fetch existing analyses. - // Index them by vulnerability UUID for more efficient access. - final Map existingAnalyses = dao.getAnalyses(component, policiesByVulnUuid.keySet()).stream() - .collect(Collectors.toMap(Analysis::getVulnUuid, Function.identity())); - - final var analysesToCreateOrUpdate = new ArrayList(); - final var projectAuditChangeNotifications = new ArrayList(); - final var analysisCommentsByVulnId = new MultivaluedHashMap(); - - for (final Map.Entry vulnUuidAndPolicy : policiesByVulnUuid.entrySet()) { - final Vulnerability vuln = vulnByUuid.get(vulnUuidAndPolicy.getKey()); - final VulnerabilityPolicy policy = vulnUuidAndPolicy.getValue(); - final Analysis policyAnalysis; - try { - policyAnalysis = Analysis.fromPolicy(policy); - } catch (IllegalArgumentException e) { - LOGGER.warn("Unable to apply policy %s as it was found to be invalid".formatted(policy.getName()), e); - continue; - } - final Analysis existingAnalysis = existingAnalyses.get(vuln.getUuid()); - if (policy != null && existingAnalysis == null) { - policyAnalysis.setComponentId(component.id()); - policyAnalysis.setProjectId(component.projectId()); - policyAnalysis.setVulnId(vuln.getId()); - policyAnalysis.setVulnUuid(vuln.getUuid()); - - // We'll create comments for analysisId=null for now, as the Analysis we're referring - // to hasn't been created yet. The analysisId is populated later, after bulk upserting - // all analyses. - final var commentFactory = new AnalysisCommentFactory(null, policy); - if (policyAnalysis.getState() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.STATE, null, policyAnalysis.getState())); - } - if (policyAnalysis.getJustification() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.JUSTIFICATION, null, policyAnalysis.getJustification())); - } - if (policyAnalysis.getResponse() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.RESPONSE, null, policyAnalysis.getResponse())); - } - if (policyAnalysis.getDetails() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.DETAILS, null, policyAnalysis.getDetails())); - } - if (policyAnalysis.getSuppressed()) { - commentFactory.createComment(formatComment(AnalysisCommentField.SUPPRESSED, null, policyAnalysis.getSuppressed())); - } - if (policyAnalysis.getSeverity() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.SEVERITY, vuln.getSeverity(), policyAnalysis.getSeverity())); - } - if (policyAnalysis.getCvssV2Vector() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_VECTOR, null, policyAnalysis.getCvssV2Vector())); - } - if (policyAnalysis.getCvssV2Score() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_SCORE, null, policyAnalysis.getCvssV2Score())); - } - if (policyAnalysis.getCvssV3Vector() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_VECTOR, null, policyAnalysis.getCvssV3Vector())); - } - if (policyAnalysis.getCvssV3Score() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_SCORE, null, policyAnalysis.getCvssV3Score())); - } - if (policyAnalysis.getOwaspVector() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_VECTOR, null, policyAnalysis.getOwaspVector())); - } - if (policyAnalysis.getOwaspScore() != null) { - commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_SCORE, null, policyAnalysis.getOwaspScore())); - } - analysesToCreateOrUpdate.add(policyAnalysis); - analysisCommentsByVulnId.addAll(policyAnalysis.getVulnId(), commentFactory.getComments()); - } else if (existingAnalysis != null && (policy != null || existingAnalysis.getVulnPolicyName() != null)) { - boolean shouldUpdate = false; - boolean analysisStateChange = false; - boolean suppressionChange = false; - final var commentFactory = new AnalysisCommentFactory(existingAnalysis.getId(), policy); - if (!Objects.equals(existingAnalysis.getState(), policyAnalysis.getState())) { - commentFactory.createComment(formatComment(AnalysisCommentField.STATE, - existingAnalysis.getState(), policyAnalysis.getState())); - - existingAnalysis.setState(policyAnalysis.getState()); - shouldUpdate = true; - analysisStateChange = true; - } - if (!Objects.equals(existingAnalysis.getJustification(), policyAnalysis.getJustification())) { - commentFactory.createComment(formatComment(AnalysisCommentField.JUSTIFICATION, - existingAnalysis.getJustification(), policyAnalysis.getJustification())); - - existingAnalysis.setJustification(policyAnalysis.getJustification()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getResponse(), policyAnalysis.getResponse())) { - commentFactory.createComment(formatComment(AnalysisCommentField.RESPONSE, - existingAnalysis.getResponse(), policyAnalysis.getResponse())); - - existingAnalysis.setResponse(policyAnalysis.getResponse()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.details, policyAnalysis.getDetails())) { - commentFactory.createComment(formatComment(AnalysisCommentField.DETAILS, - existingAnalysis.getDetails(), policyAnalysis.getDetails())); - - existingAnalysis.setDetails(policyAnalysis.getDetails()); - shouldUpdate = true; - } - if (existingAnalysis.getSuppressed() == null || (existingAnalysis.getSuppressed() != policyAnalysis.getSuppressed())) { - commentFactory.createComment(formatComment(AnalysisCommentField.SUPPRESSED, - existingAnalysis.getSuppressed(), policyAnalysis.getSuppressed())); - - existingAnalysis.setSuppressed(policyAnalysis.getSuppressed()); - shouldUpdate = true; - suppressionChange = true; - } - if (!Objects.equals(existingAnalysis.getSeverity(), policyAnalysis.getSeverity())) { - commentFactory.createComment(formatComment(AnalysisCommentField.SEVERITY, - existingAnalysis.getSeverity(), policyAnalysis.getSeverity())); - - existingAnalysis.setSeverity(policyAnalysis.getSeverity()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getCvssV2Vector(), policyAnalysis.getCvssV2Vector())) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_VECTOR, - existingAnalysis.getCvssV2Vector(), policyAnalysis.getCvssV2Vector())); - - existingAnalysis.setCvssV2Vector(policyAnalysis.getCvssV2Vector()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getCvssV2Score(), policyAnalysis.getCvssV2Score())) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV2_SCORE, - existingAnalysis.getCvssV2Score(), policyAnalysis.getCvssV2Score())); - - existingAnalysis.setCvssV2Score(policyAnalysis.getCvssV2Score()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getCvssV3Vector(), policyAnalysis.getCvssV3Vector())) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_VECTOR, - existingAnalysis.getCvssV3Vector(), policyAnalysis.getCvssV3Vector())); - - existingAnalysis.setCvssV3Vector(policyAnalysis.getCvssV3Vector()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getCvssV3Score(), policyAnalysis.getCvssV3Score())) { - commentFactory.createComment(formatComment(AnalysisCommentField.CVSSV3_SCORE, - existingAnalysis.getCvssV3Score(), policyAnalysis.getCvssV3Score())); - - existingAnalysis.setCvssV3Score(policyAnalysis.getCvssV3Score()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getOwaspVector(), policyAnalysis.getOwaspVector())) { - commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_VECTOR, - existingAnalysis.getOwaspVector(), policyAnalysis.getOwaspVector())); - - existingAnalysis.setOwaspVector(policyAnalysis.getCvssV2Vector()); - shouldUpdate = true; - } - if (!Objects.equals(existingAnalysis.getOwaspScore(), policyAnalysis.getOwaspScore())) { - commentFactory.createComment(formatComment(AnalysisCommentField.OWASP_SCORE, - existingAnalysis.getOwaspScore(), policyAnalysis.getOwaspScore())); - - existingAnalysis.setOwaspScore(policyAnalysis.getOwaspScore()); - shouldUpdate = true; - } - if (shouldUpdate) { - existingAnalysis.setVulnPolicyName(policy != null ? policy.getName() : null); - analysesToCreateOrUpdate.add(existingAnalysis); - analysisCommentsByVulnId.addAll(existingAnalysis.getVulnId(), commentFactory.getComments()); - var projectAuditChangeNotification = createProjectAuditChangeNotification(qm, component, vuln, policyAnalysis, analysisStateChange, suppressionChange); - if (projectAuditChangeNotification != null) { - projectAuditChangeNotifications.add(projectAuditChangeNotification); - } - } - } - - // If the finding was suppressed, do not report it as new. - if (Boolean.TRUE.equals(policyAnalysis.getSuppressed())) { - newFindingVulnIds.remove(vuln.getId()); - } - } - - if (!analysesToCreateOrUpdate.isEmpty()) { - final List createdAnalyses = dao.createOrUpdateAnalyses(analysesToCreateOrUpdate); - // Comments for new analyses do not have an analysis ID set yet, as that ID was not known prior - // to inserting the respective analysis record. Enrich comments with analysis IDs now that we know them. - for (final CreatedAnalysis createdAnalysis : createdAnalyses) { - analysisCommentsByVulnId.computeIfPresent(createdAnalysis.vulnId(), - (vulnId, comments) -> comments.stream() - .map(comment -> new AnalysisComment(createdAnalysis.id(), comment.comment(), comment.commenter())) - .toList()); - } - dao.createAnalysisComments(analysisCommentsByVulnId.values().stream().flatMap(Collection::stream).toList()); - } - - // dispatch PROJECT_AUDIT_CHANGE notifications - eventDispatcher.dispatchAllNotificationProtos(projectAuditChangeNotifications); - - return vulnById.entrySet().stream() - .filter(entry -> newFindingVulnIds.contains(entry.getKey())) - .map(Map.Entry::getValue) - .toList(); - } - - private org.dependencytrack.proto.notification.v1.Notification createProjectAuditChangeNotification(QueryManager qm, Component component, Vulnerability vuln, Analysis policyAnalysis, boolean analysisStateChange, boolean suppressionChange) { - if (!analysisStateChange && !suppressionChange) { - return null; - } - - return jdbi(qm) - .withExtension(NotificationSubjectDao.class, - dao -> dao.getForProjectAuditChange(component.uuid(), vuln.getUuid(), policyAnalysis.state, policyAnalysis.suppressed)) - .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() - .setScope(SCOPE_PORTFOLIO) - .setGroup(GROUP_PROJECT_AUDIT_CHANGE) - .setLevel(LEVEL_INFORMATIONAL) - .setTimestamp(Timestamps.now()) - .setTitle(generateTitle(policyAnalysis.getState(), policyAnalysis.getSuppressed(), analysisStateChange, suppressionChange)) - .setContent("An analysis decision was made to a finding affecting a project") - .setSubject(Any.pack(subject)) - .build()) - .orElse(null); - } - - /** - * Send {@link Group#GROUP_NEW_VULNERABLE_DEPENDENCY} and {@link Group#GROUP_NEW_VULNERABILITY} notifications - * for a given {@link Component}, if it was found to have at least one non-suppressed vulnerability. - * - * @param qm The {@link QueryManager} to use - * @param component The {@link Component} to send notifications for - * @param isNewComponent Whether {@code component} is new - * @param analysisLevel The {@link VulnerabilityAnalysisLevel} - * @param newVulns Newly identified {@link Vulnerability}s - */ - private void maybeSendNotifications(final QueryManager qm, final Component component, final boolean isNewComponent, - final VulnerabilityAnalysisLevel analysisLevel, final List newVulns) { - if (newVulns.isEmpty()) { - return; - } - - final Timestamp notificationTimestamp = Timestamps.now(); - final var notifications = new ArrayList(); - jdbi(qm).useExtension(NotificationSubjectDao.class, dao -> { - if (isNewComponent) { - dao.getForNewVulnerableDependency(component.uuid()) - .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() - .setScope(SCOPE_PORTFOLIO) - .setGroup(GROUP_NEW_VULNERABLE_DEPENDENCY) - .setLevel(LEVEL_INFORMATIONAL) - .setTimestamp(notificationTimestamp) - .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABLE_DEPENDENCY, subject.getProject())) - .setContent(generateNotificationContent(subject.getComponent(), subject.getVulnerabilitiesList())) - .setSubject(Any.pack(subject)) - .build()) - .ifPresent(notifications::add); - } - - dao.getForNewVulnerabilities(component.uuid(), newVulns.stream().map(Vulnerability::getUuid).toList(), analysisLevel).stream() - .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder() - .setScope(SCOPE_PORTFOLIO) - .setGroup(GROUP_NEW_VULNERABILITY) - .setLevel(LEVEL_INFORMATIONAL) - .setTimestamp(notificationTimestamp) - .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABILITY, subject.getProject())) - .setContent(generateNotificationContent(subject.getVulnerability())) - .setSubject(Any.pack(subject)) - .build()) - .forEach(notifications::add); - }); - - eventDispatcher.dispatchAllNotificationProtos(notifications); - } - - private boolean canUpdateVulnerability(final Vulnerability vuln, final Scanner scanner) { - var canUpdate = true; - - // Results from the internal scanner only contain vulnId and source, nothing else. - // As they only refer to existing vulnerabilities in the database, no update must be performed. - canUpdate &= scanner != SCANNER_INTERNAL; - - // Internal vulnerabilities can only be updated via REST API. - canUpdate &= !Vulnerability.Source.INTERNAL.name().equals(vuln.getSource()); - - // If the scanner is also the authoritative source of the given vulnerability, - // it should be able to update it. This will be the case for the OSS Index scanner - // and sonatype-XXX vulnerabilities for example. - canUpdate &= isAuthoritativeSource(vuln, convert(scanner)) - // Alternatively, if the vulnerability could be mirrored, but mirroring - // is disabled, it is OK to override any existing data. - // - // Ideally, we'd track the data from all sources instead of just overriding - // it, but for now this will have to do it. - || (canBeMirrored(vuln) && !isMirroringEnabled(vuln)); - - return canUpdate; - } - - private static VulnerabilityAnalysisLevel determineAnalysisLevel(final FixedKeyRecord record) { - return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.VULN_ANALYSIS_LEVEL) - .map(value -> { - try { - return VulnerabilityAnalysisLevel.valueOf(value); - } catch (IllegalArgumentException e) { - LOGGER.warn("The reported analysis type %s is invalid, assuming %s" - .formatted(value, VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS)); - return VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS; - } - }) - .orElse(VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS); - } - - private static boolean determineIsComponentNew(final FixedKeyRecord record) { - return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.IS_NEW_COMPONENT) - .map(Boolean::parseBoolean) - .orElse(false); - } - - private static String prettyPrint(final ScanKey scanKey) { - return "%s/%s".formatted(scanKey.getScanToken(), scanKey.getComponentUuid()); - } - - public interface Dao { - - @SqlQuery(""" - SELECT - "C"."ID" AS "id", - "C"."UUID" AS "uuid", - "P"."ID" AS "projectId", - "P"."UUID" AS "projectUuid" - FROM - "COMPONENT" AS "C" - INNER JOIN - "PROJECT" AS "P" ON "P"."ID" = "C"."PROJECT_ID" - WHERE - "C"."UUID" = (:uuid)::TEXT - """) - @RegisterConstructorMapper(Component.class) - Component getComponentByUuid(final UUID uuid); - - @SqlBatch(""" - INSERT INTO "COMPONENTS_VULNERABILITIES" - ("COMPONENT_ID", "VULNERABILITY_ID") - VALUES - (:component.id, :vuln.id) - ON CONFLICT DO NOTHING - RETURNING "VULNERABILITY_ID" - """) - @GetGeneratedKeys("VULNERABILITY_ID") - List createFindings(@BindMethods("component") final Component component, @BindBean("vuln") final Iterable vuln); - - @SqlBatch(""" - INSERT INTO "FINDINGATTRIBUTION" - ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "ANALYZERIDENTITY", "ATTRIBUTED_ON", "UUID") - VALUES - (:vulnId, :componentId, :projectId, :analyzer, NOW(), (:uuid)::TEXT) - ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID") DO NOTHING - """) - void createFindingAttributions(@BindMethods final Iterable attribution); - - @SqlQuery(""" - SELECT - "V"."ID" AS "vulnId", - "V"."UUID" AS "vulnUuid", - "A"."ID" AS "id", - "A"."COMPONENT_ID" AS "componentId", - "A"."PROJECT_ID" AS "projectId", - "A"."STATE" AS "state", - "A"."JUSTIFICATION" AS "justification", - "A"."RESPONSE" AS "response", - "A"."DETAILS" AS "details", - "A"."SUPPRESSED" AS "suppressed", - "A"."SEVERITY" AS "severity", - "A"."CVSSV2VECTOR" AS "cvssV2Vector", - "A"."CVSSV2SCORE" AS "cvssV2Score", - "A"."CVSSV3VECTOR" AS "cvssV3Vector", - "A"."CVSSV3SCORE" AS "cvssV3Score", - "A"."OWASPVECTOR" AS "owaspVector", - "A"."OWASPSCORE" AS "owaspScore", - "VP"."NAME" AS "vulnPolicyName" - FROM - "VULNERABILITY" AS "V" - INNER JOIN - "ANALYSIS" AS "A" ON "A"."VULNERABILITY_ID" = "V"."ID" - LEFT JOIN - "VULNERABILITY_POLICY" AS "VP" ON "VP"."ID" = "A"."VULNERABILITY_POLICY_ID" - WHERE - "A"."COMPONENT_ID" = :component.id - AND "V"."UUID" = ANY((:vulnUuids)::TEXT[]) - """) - @RegisterBeanMapper(Analysis.class) - List getAnalyses(@BindMethods("component") final Component component, final Iterable vulnUuids); - - @SqlBatch(""" - WITH "CTE_VULN_POLICY" AS ( - SELECT - "ID" - FROM - "VULNERABILITY_POLICY" - WHERE - "NAME" = :vulnPolicyName - ) - INSERT INTO "ANALYSIS" - ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "STATE", "JUSTIFICATION", "RESPONSE", "DETAILS", - "SUPPRESSED", "SEVERITY", "CVSSV2VECTOR", "CVSSV2SCORE", "CVSSV3VECTOR", "CVSSV3SCORE", "OWASPVECTOR", - "OWASPSCORE", "VULNERABILITY_POLICY_ID") - VALUES - (:vulnId, :componentId, :projectId, :state, :justification, :response, :details, :suppressed, - :severity, :cvssV2Vector, :cvssV2Score, :cvssV3Vector, :cvssV3Score, :owaspVector, :owaspScore, - (SELECT "ID" FROM "CTE_VULN_POLICY")) - ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID") DO UPDATE - SET - "STATE" = :state, - "JUSTIFICATION" = :justification, - "RESPONSE" = :response, - "DETAILS" = :details, - "SUPPRESSED" = :suppressed, - "SEVERITY" = :severity, - "CVSSV2VECTOR" = :cvssV2Vector, - "CVSSV2SCORE" = :cvssV2Score, - "CVSSV3VECTOR" = :cvssV3Vector, - "CVSSV3SCORE" = :cvssV3Score, - "OWASPVECTOR" = :owaspVector, - "OWASPSCORE" = :owaspScore, - "VULNERABILITY_POLICY_ID" = (SELECT "ID" FROM "CTE_VULN_POLICY") - RETURNING "ID", "VULNERABILITY_ID" - """) - @GetGeneratedKeys({"ID", "VULNERABILITY_ID"}) - @RegisterConstructorMapper(CreatedAnalysis.class) - List createOrUpdateAnalyses(@BindBean final Iterable analysis); - - @SqlBatch(""" - INSERT INTO "ANALYSISCOMMENT" - ("ANALYSIS_ID", "TIMESTAMP", "COMMENT", "COMMENTER") - VALUES - (:analysisId, NOW(), :comment, :commenter) - """) - void createAnalysisComments(@BindMethods final Iterable comment); - - } - - public static class Analysis { - - private long id; - private long componentId; - private long projectId; - private long vulnId; - private UUID vulnUuid; - private String vulnPolicyName; - private AnalysisState state; - private AnalysisJustification justification; - private AnalysisResponse response; - private String details; - private Boolean suppressed; - private Severity severity; - private String cvssV2Vector; - private Double cvssV2Score; - private String cvssV3Vector; - private Double cvssV3Score; - private String owaspVector; - private Double owaspScore; - - private static Analysis fromPolicy(final VulnerabilityPolicy policy) { - final var analysis = new Analysis(); - if (policy == null) { - analysis.setState(AnalysisState.NOT_SET); - analysis.setSuppressed(false); - return analysis; - } - - analysis.setVulnPolicyName(policy.getName()); - if (policy.getAnalysis().getState() != null) { - analysis.setState(switch (policy.getAnalysis().getState()) { - case EXPLOITABLE -> AnalysisState.EXPLOITABLE; - case FALSE_POSITIVE -> AnalysisState.FALSE_POSITIVE; - case IN_TRIAGE -> AnalysisState.IN_TRIAGE; - case NOT_AFFECTED -> AnalysisState.NOT_AFFECTED; - case RESOLVED -> AnalysisState.RESOLVED; - }); - } else { - throw new IllegalArgumentException("Analysis of policy does not define a state"); - } - if (policy.getAnalysis().getJustification() != null) { - analysis.setJustification(switch (policy.getAnalysis().getJustification()) { - case CODE_NOT_PRESENT -> AnalysisJustification.CODE_NOT_PRESENT; - case CODE_NOT_REACHABLE -> AnalysisJustification.CODE_NOT_REACHABLE; - case PROTECTED_AT_PERIMETER -> AnalysisJustification.PROTECTED_AT_PERIMETER; - case PROTECTED_AT_RUNTIME -> AnalysisJustification.PROTECTED_AT_RUNTIME; - case PROTECTED_BY_COMPILER -> AnalysisJustification.PROTECTED_BY_COMPILER; - case PROTECTED_BY_MITIGATING_CONTROL -> AnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL; - case REQUIRES_CONFIGURATION -> AnalysisJustification.REQUIRES_CONFIGURATION; - case REQUIRES_DEPENDENCY -> AnalysisJustification.REQUIRES_DEPENDENCY; - case REQUIRES_ENVIRONMENT -> AnalysisJustification.REQUIRES_ENVIRONMENT; - }); - } - if (policy.getAnalysis().getVendorResponse() != null) { - analysis.setResponse(switch (policy.getAnalysis().getVendorResponse()) { - case CAN_NOT_FIX -> AnalysisResponse.CAN_NOT_FIX; - case ROLLBACK -> AnalysisResponse.ROLLBACK; - case UPDATE -> AnalysisResponse.UPDATE; - case WILL_NOT_FIX -> AnalysisResponse.WILL_NOT_FIX; - case WORKAROUND_AVAILABLE -> AnalysisResponse.WORKAROUND_AVAILABLE; - }); - } - if (policy.getAnalysis().getDetails() != null) { - analysis.setDetails(policy.getAnalysis().getDetails()); - } - analysis.setSuppressed(policy.getAnalysis().isSuppress()); - - if (policy.getRatings() != null && !policy.getRatings().isEmpty()) { - if (policy.getRatings().size() > 3) { - throw new IllegalArgumentException("Policy defines more than three ratings"); - } - - final var methodsSeen = new HashSet(); - for (final VulnerabilityPolicyRating policyRating : policy.getRatings()) { - if (policyRating.getMethod() == null) { - throw new IllegalArgumentException("Rating #%d does not define a method" - .formatted(policy.getRatings().indexOf(policyRating))); - } - if (!methodsSeen.add(policyRating.getMethod())) { - throw new IllegalArgumentException("Rating method %s is defined more than once" - .formatted(policyRating.getMethod())); - } - if (policyRating.getSeverity() == null) { - throw new IllegalArgumentException("Rating #%d (%s) does not define a severity" - .formatted(policy.getRatings().indexOf(policyRating), policyRating.getMethod())); - } - - analysis.setSeverity(switch (policyRating.getSeverity()) { - case INFO -> Severity.INFO; - case LOW -> Severity.LOW; - case MEDIUM -> Severity.MEDIUM; - case HIGH -> Severity.HIGH; - case CRITICAL -> Severity.CRITICAL; - }); - switch (policyRating.getMethod()) { - case CVSSV2 -> { - analysis.setCvssV2Vector(policyRating.getVector()); - analysis.setCvssV2Score(policyRating.getScore()); - } - case CVSSV3 -> { - analysis.setCvssV3Vector(policyRating.getVector()); - analysis.setCvssV3Score(policyRating.getScore()); - } - case OWASP -> { - analysis.setOwaspVector(policyRating.getVector()); - analysis.setOwaspScore(policyRating.getScore()); - } - } - } - } - - return analysis; - } - - public long getId() { - return id; - } - - public void setId(final long id) { - this.id = id; - } - - public long getComponentId() { - return componentId; - } - - public void setComponentId(final long componentId) { - this.componentId = componentId; - } - - public long getProjectId() { - return projectId; - } - - public void setProjectId(final long projectId) { - this.projectId = projectId; - } - - public long getVulnId() { - return vulnId; - } - - public void setVulnId(final long vulnId) { - this.vulnId = vulnId; - } - - public UUID getVulnUuid() { - return vulnUuid; - } - - public void setVulnUuid(final UUID vulnUuid) { - this.vulnUuid = vulnUuid; - } - - public String getVulnPolicyName() { - return vulnPolicyName; - } - - public void setVulnPolicyName(final String vulnPolicyName) { - this.vulnPolicyName = vulnPolicyName; - } - - public AnalysisState getState() { - return state; - } - - public void setState(final AnalysisState state) { - this.state = state; - } - - public AnalysisJustification getJustification() { - return justification; - } - - public void setJustification(final AnalysisJustification justification) { - this.justification = justification; - } - - public AnalysisResponse getResponse() { - return response; - } - - public void setResponse(final AnalysisResponse response) { - this.response = response; - } - - public String getDetails() { - return details; - } - - public void setDetails(final String details) { - this.details = details; - } - - public Boolean getSuppressed() { - return suppressed; - } - - public void setSuppressed(final Boolean suppressed) { - this.suppressed = suppressed; - } - - public Severity getSeverity() { - return severity; - } - - public void setSeverity(final Severity severity) { - this.severity = severity; - } - - public String getCvssV2Vector() { - return cvssV2Vector; - } - - public void setCvssV2Vector(final String cvssV2Vector) { - this.cvssV2Vector = cvssV2Vector; - } - - public Double getCvssV2Score() { - return cvssV2Score; - } - - public void setCvssV2Score(final Double cvssV2Score) { - this.cvssV2Score = cvssV2Score; - } - - public String getCvssV3Vector() { - return cvssV3Vector; - } - - public void setCvssV3Vector(final String cvssV3Vector) { - this.cvssV3Vector = cvssV3Vector; - } - - public Double getCvssV3Score() { - return cvssV3Score; - } - - public void setCvssV3Score(final Double cvssV3Score) { - this.cvssV3Score = cvssV3Score; - } - - public String getOwaspVector() { - return owaspVector; - } - - public void setOwaspVector(final String owaspVector) { - this.owaspVector = owaspVector; - } - - public Double getOwaspScore() { - return owaspScore; - } - - public void setOwaspScore(final Double owaspScore) { - this.owaspScore = owaspScore; - } - - } - - public record CreatedAnalysis(long id, @ColumnName("VULNERABILITY_ID") long vulnId) { - } - - public record AnalysisComment(Long analysisId, String comment, String commenter) { - } - - private static final class AnalysisCommentFactory { - - private final Long analysisId; - private final VulnerabilityPolicy policy; - private final String commenter; - private final List comments; - - private AnalysisCommentFactory(final Long analysisId, VulnerabilityPolicy policy) { - this.analysisId = analysisId; - this.policy = policy; - this.commenter = createCommenter(policy); - this.comments = new ArrayList<>(); - } - - private void createComment(final String comment) { - comments.add(new AnalysisComment(this.analysisId, comment, this.commenter)); - } - - private List getComments() { - if (comments.isEmpty()) { - return comments; - } - - // If we have comments already, additionally include what the policy matched on. - // Include this as the very first comment, and do not modify the original list. - final var commentsCopy = new ArrayList(); - if (policy == null) { - commentsCopy.add(new AnalysisComment(this.analysisId, "No longer covered by any policy", this.commenter)); - } else { - commentsCopy.add(new AnalysisComment(this.analysisId, "Matched on condition(s):\n%s" - .formatted(policy.getConditions().stream().map("- %s"::formatted).collect(Collectors.joining("\n"))), this.commenter)); - } - commentsCopy.addAll(comments); - return commentsCopy; - } - - private static String createCommenter(final VulnerabilityPolicy policy) { - if (policy == null) { - return "[Policy{None}]"; - } - - if (isNotBlank(policy.getAuthor())) { - return "[Policy{Name=%s, Author=%s}]".formatted(policy.getName(), policy.getAuthor()); - } - - return "[Policy{Name=%s}]".formatted(policy.getName()); - } - - } - - public record Component(long id, UUID uuid, long projectId, UUID projectUuid) { - } - - public record FindingAttribution(long vulnId, long componentId, long projectId, String analyzer, UUID uuid) { - } - -} diff --git a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java index e24561aef..dac0b51b0 100644 --- a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java +++ b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java @@ -37,7 +37,6 @@ public class HealthCheckInitializer implements ServletContextListener { public void contextInitialized(final ServletContextEvent event) { LOGGER.info("Registering health checks"); HealthCheckRegistry.getInstance().register("database", new DatabaseHealthCheck()); - HealthCheckRegistry.getInstance().register("kafka-streams", new KafkaStreamsHealthCheck()); HealthCheckRegistry.getInstance().register("kafka-processors", new ProcessorsHealthCheck()); // TODO: Move this to its own initializer if it turns out to be useful diff --git a/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java b/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java deleted file mode 100644 index d5c5f8e6d..000000000 --- a/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.health; - -import org.apache.kafka.streams.KafkaStreams; -import org.dependencytrack.event.kafka.streams.KafkaStreamsInitializer; -import org.eclipse.microprofile.health.HealthCheck; -import org.eclipse.microprofile.health.HealthCheckResponse; -import org.eclipse.microprofile.health.Liveness; - -/** - * A {@link HealthCheck} for Kafka Streams. - *

- * This code has been copied and slightly modified from Quarkus' Kafka Streams extension. - * - * @see Quarkus Kafka Streams Health Check - */ -@Liveness -class KafkaStreamsHealthCheck implements HealthCheck { - - @Override - public HealthCheckResponse call() { - final var responseBuilder = HealthCheckResponse.named("kafka-streams"); - - final KafkaStreams kafkaStreams = KafkaStreamsInitializer.getKafkaStreams(); - if (kafkaStreams == null) { - return responseBuilder.down().build(); - } - - try { - final KafkaStreams.State state = kafkaStreams.state(); - responseBuilder.status(state.isRunningOrRebalancing()) - .withData("state", state.name()); - } catch (Exception e) { - responseBuilder.down() - .withData("exception_message", e.getMessage()); - } - - return responseBuilder.build(); - } - -} diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 834cb62d0..a1ddddcb3 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -395,9 +395,6 @@ kafka.bootstrap.servers=localhost:9092 # Optional kafka.auto.offset.reset=earliest -# Optional -kafka.num.stream.threads=3 - #Optional kafka.tls.enabled=false @@ -425,36 +422,6 @@ kafka.topic.prefix= # Required application.id=dtrack-apiserver -# Optional -# Defines the number of deserialization errors deemed to be acceptable in a given time frame. -# Until the threshold is reached, records failing deserialization will be logged and skipped. -# When the threshold is exceeded, further consumption is stopped. -# The interval must be specified in ISO8601 duration notation (https://en.wikipedia.org/wiki/ISO_8601#Durations). -# The default threshold is 5 errors per 30min. -kafka.streams.deserialization.exception.threshold.count=5 -kafka.streams.deserialization.exception.threshold.interval=PT30M - -# Optional -# Defines the number of production errors deemed to be acceptable in a given time frame. -# Until the threshold is reached, records failing to be produced will be logged and skipped. -# When the threshold is exceeded, further production is stopped. -# Only certain types of errors will be treated this way; Unexpected errors will cause a -# stop of production immediately. -# The interval must be specified in ISO8601 duration notation (https://en.wikipedia.org/wiki/ISO_8601#Durations). -# The default threshold is 5 errors per 30min. -kafka.streams.production.exception.threshold.count=5 -kafka.streams.production.exception.threshold.interval=PT30M - -# Optional -# Defines the number of times record processing will be retried in case of unhandled, yet transient errors. -# Until the threshold is reached, records fetched since the last successful offset commit will be attempted to be re-processed. -# When the threshold is exceeded, further processing is stopped. -# Only transient errors will be treated this way; Unexpected or non-transient errors will cause a stop of processing immediately. -# The interval must be specified in ISO8601 duration notation (https://en.wikipedia.org/wiki/ISO_8601#Durations). -# The default threshold is 50 errors per 30min. -kafka.streams.transient.processing.exception.threshold.count=50 -kafka.streams.transient.processing.exception.threshold.interval=PT30M - # Optional # Defines the order in which records are being processed. # Valid options are: diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 7a44b7606..9ffe68339 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -50,9 +50,6 @@ org.dependencytrack.event.kafka.processor.ProcessorInitializer - org.dependencytrack.event.PurlMigrator diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java deleted file mode 100644 index 0329919e1..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java +++ /dev/null @@ -1,316 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams; - -import net.mguenther.kafka.junit.KeyValue; -import net.mguenther.kafka.junit.ReadKeyValues; -import net.mguenther.kafka.junit.SendKeyValues; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.dependencytrack.event.kafka.KafkaTopics; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer; -import org.dependencytrack.model.Project; -import org.dependencytrack.model.VulnerabilityScan; -import org.dependencytrack.model.WorkflowState; -import org.dependencytrack.model.WorkflowStatus; -import org.dependencytrack.model.WorkflowStep; -import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; -import org.dependencytrack.proto.notification.v1.Notification; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; -import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; -import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; -import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; -import org.junit.Test; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; -import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_SUCCESSFUL; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; - -public class KafkaStreamsDelayedBomProcessedNotificationTest extends KafkaStreamsTest { - - public KafkaStreamsDelayedBomProcessedNotificationTest() { - super(new KafkaStreamsTopologyFactory(true)::createTopology); - } - - @Test - public void shouldSendBomProcessedNotification() throws Exception { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - project.setDescription("Some Description"); - project.setPurl("pkg:maven/com.acme/acme-app"); - qm.persist(project); - qm.bind(project, List.of( - qm.createTag("tag-a"), - qm.createTag("tag-b") - )); - - final var scanToken = UUID.randomUUID().toString(); - - // Initialize a vulnerability scan for 5 components, and create a workflow for it accordingly. - final VulnerabilityScan scan = qm.createVulnerabilityScan(VulnerabilityScan.TargetType.PROJECT, project.getUuid(), scanToken, 5); - qm.createWorkflowSteps(UUID.fromString(scanToken)); - - // Transition the BOM_PROCESSING step of the workflow to COMPLETED. A delayed BOM_PROCESSED notification - // will only be sent, when there's a successful BOM_PROCESSING step in the workflow. - final WorkflowState state = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.BOM_PROCESSING); - state.setStatus(WorkflowStatus.COMPLETED); - qm.updateWorkflowState(state); - - // Emulate arrival of 5 vulnerability scan results, one for each component in the project. - final var componentUuids = new ArrayList(); - for (int i = 0; i < 5; i++) { - componentUuids.add(UUID.randomUUID()); - } - for (final UUID uuid : componentUuids) { - final ScanKey scanKey = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(uuid.toString()) - .build(); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - } - - // Wait for vulnerability scan to transition to COMPLETED status. - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); - }); - - await("BOM processed notification") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_BOM.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)) - ).satisfiesExactly( - notification -> { - final BomConsumedOrProcessedSubject subject = - notification.getSubject().unpack(BomConsumedOrProcessedSubject.class); - assertThat(subject.getBom().getContent()).isEqualTo("(Omitted)"); - assertThat(subject.getBom().getFormat()).isEqualTo("CycloneDX"); - assertThat(subject.getBom().getSpecVersion()).isEqualTo("Unknown"); - assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); - assertThat(subject.getProject().getName()).isEqualTo(project.getName()); - assertThat(subject.getProject().getVersion()).isEqualTo(project.getVersion()); - assertThat(subject.getProject().getDescription()).isEqualTo(project.getDescription()); - assertThat(subject.getProject().getPurl()).isEqualTo(project.getPurl().toString()); - assertThat(subject.getProject().getTagsList()).containsExactlyInAnyOrder("tag-a", "tag-b"); - } - ); - }); - - // ... we still want to get a PROJECT_VULN_ANALYSIS_COMPLETE notification though. - // In this case, no vulnerabilities were found, so no findings are expected. - await("Analysis complete notification") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)) - ).satisfiesExactly( - notification -> { - final ProjectVulnAnalysisCompleteSubject subject = - notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); - assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); - assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); - assertThat(subject.getFindingsList()).isEmpty(); - } - ); - }); - } - - @Test - public void shouldNotSendBomProcessedNotificationWhenWorkflowHasNoCompletedBomProcessingStep() throws Exception { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var scanToken = UUID.randomUUID().toString(); - - // Initialize a vulnerability scan for 5 components, and create a workflow for a manual re-analysis. - // This workflow does not include a BOM_PROCESSING step. Without it, no BOM_PROCESSED notification should be sent. - final VulnerabilityScan scan = qm.createVulnerabilityScan(VulnerabilityScan.TargetType.PROJECT, project.getUuid(), scanToken, 5); - qm.createReanalyzeSteps(UUID.fromString(scanToken)); - - // Emulate arrival of 5 vulnerability scan results, one for each component in the project. - final var componentUuids = new ArrayList(); - for (int i = 0; i < 5; i++) { - componentUuids.add(UUID.randomUUID()); - } - for (final UUID uuid : componentUuids) { - final ScanKey scanKey = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(uuid.toString()) - .build(); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - } - - // Wait for vulnerability scan to transition to COMPLETED status. - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); - }); - - // We still want to get a PROJECT_VULN_ANALYSIS_COMPLETE notification though. - // In this case, no vulnerabilities were found, so no findings are expected. - await("Analysis complete notification") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)) - ).satisfiesExactly( - notification -> { - final ProjectVulnAnalysisCompleteSubject subject = - notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); - assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); - assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); - assertThat(subject.getFindingsList()).isEmpty(); - } - ); - }); - - // No BOM_PROCESSED notification should've been sent. - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_BOM.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class)) - ).isEmpty(); - } - - @Test - public void shouldNotSendBomProcessedNotificationWhenProjectDoesNotExistAnymore() throws Exception { - // Instead of creating a project, just generate a random project UUID. - // Internally, vulnerability analysis should still complete, but no notification should be sent. - final var projectUuid = UUID.randomUUID(); - - final var scanToken = UUID.randomUUID().toString(); - - // Initialize a vulnerability scan for 5 components, and create a workflow for it accordingly. - final VulnerabilityScan scan = qm.createVulnerabilityScan(VulnerabilityScan.TargetType.PROJECT, projectUuid, scanToken, 5); - qm.createWorkflowSteps(UUID.fromString(scanToken)); - - // Transition the BOM_PROCESSING step of the workflow to COMPLETED. A delayed BOM_PROCESSED notification - // will only be sent, when there's a successful BOM_PROCESSING step in the workflow. - final WorkflowState state = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.BOM_PROCESSING); - state.setStatus(WorkflowStatus.COMPLETED); - qm.updateWorkflowState(state); - - // Emulate arrival of 5 vulnerability scan results, one for each component in the project. - final var componentUuids = new ArrayList(); - for (int i = 0; i < 5; i++) { - componentUuids.add(UUID.randomUUID()); - } - for (final UUID uuid : componentUuids) { - final ScanKey scanKey = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(uuid.toString()) - .build(); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - } - - // Wait for vulnerability scan to transition to COMPLETED status. - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); - }); - - // No PROJECT_VULN_ANALYSIS_COMPLETE notification should've been sent. - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class)) - ).isEmpty(); - - // No BOM_PROCESSED notification should've been sent. - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_BOM.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class)) - ).isEmpty(); - } - -} diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTest.java deleted file mode 100644 index 9cfdbd66b..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams; - -import net.mguenther.kafka.junit.ExternalKafkaCluster; -import net.mguenther.kafka.junit.TopicConfig; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.Topology; -import org.dependencytrack.PersistenceCapableTest; -import org.dependencytrack.event.kafka.KafkaTopics; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufDeserializer; -import org.dependencytrack.proto.notification.v1.Notification; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.testcontainers.redpanda.RedpandaContainer; -import org.testcontainers.utility.DockerImageName; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.function.Supplier; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; - -abstract class KafkaStreamsTest extends PersistenceCapableTest { - - @Rule - public RedpandaContainer container = new RedpandaContainer(DockerImageName - .parse("docker.redpanda.com/vectorized/redpanda:v23.3.11")); - - KafkaStreams kafkaStreams; - ExternalKafkaCluster kafka; - private final Supplier topologySupplier; - private Path kafkaStreamsStateDirectory; - - protected KafkaStreamsTest() { - this(new KafkaStreamsTopologyFactory()::createTopology); - } - - protected KafkaStreamsTest(final Supplier topologySupplier) { - this.topologySupplier = topologySupplier; - } - - @Before - public void before() throws Exception { - super.before(); - - kafka = ExternalKafkaCluster.at(container.getBootstrapServers()); - - kafka.createTopic(TopicConfig - .withName(KafkaTopics.VULN_ANALYSIS_COMMAND.name()) - .withNumberOfPartitions(3) - .withNumberOfReplicas(1)); - kafka.createTopic(TopicConfig - .withName(KafkaTopics.VULN_ANALYSIS_RESULT.name()) - .withNumberOfPartitions(3) - .withNumberOfReplicas(1)); - kafka.createTopic(TopicConfig - .withName(KafkaTopics.REPO_META_ANALYSIS_RESULT.name()) - .withNumberOfPartitions(3) - .withNumberOfReplicas(1)); - kafka.createTopic(TopicConfig - .withName(KafkaTopics.NEW_VULNERABILITY.name()) - .withNumberOfPartitions(3) - .withNumberOfReplicas(1)); - - kafkaStreamsStateDirectory = Files.createTempDirectory(getClass().getSimpleName()); - - final var streamsConfig = KafkaStreamsInitializer.getDefaultProperties(); - streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, container.getBootstrapServers()); - streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, getClass().getSimpleName()); - streamsConfig.put(StreamsConfig.STATE_DIR_CONFIG, kafkaStreamsStateDirectory.toString()); - streamsConfig.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "3"); - - kafkaStreams = new KafkaStreams(topologySupplier.get(), streamsConfig); - kafkaStreams.start(); - - await("Kafka Streams Readiness") - .atMost(Duration.ofSeconds(15)) - .failFast(() -> assertThat(kafkaStreams.state()).isNotIn( - KafkaStreams.State.ERROR, - KafkaStreams.State.PENDING_ERROR, - KafkaStreams.State.PENDING_SHUTDOWN - )) - .untilAsserted(() -> assertThat(kafkaStreams.state()).isEqualTo(KafkaStreams.State.RUNNING)); - } - - @After - public void after() { - if (kafkaStreams != null) { - kafkaStreams.close(); - } - if (kafkaStreamsStateDirectory != null) { - kafkaStreamsStateDirectory.toFile().delete(); - } - - super.after(); - } - - public static class NotificationDeserializer extends KafkaProtobufDeserializer { - - public NotificationDeserializer() { - super(Notification.parser()); - } - - } - -} diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java deleted file mode 100644 index 9cf4ac726..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java +++ /dev/null @@ -1,610 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams; - -import alpine.event.framework.Event; -import alpine.event.framework.EventService; -import alpine.event.framework.Subscriber; -import net.mguenther.kafka.junit.KeyValue; -import net.mguenther.kafka.junit.ReadKeyValues; -import net.mguenther.kafka.junit.SendKeyValues; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.TopologyDescription; -import org.assertj.core.api.SoftAssertions; -import org.cyclonedx.proto.v1_4.Bom; -import org.cyclonedx.proto.v1_4.Source; -import org.cyclonedx.proto.v1_4.VulnerabilityRating; -import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent; -import org.dependencytrack.event.ProjectMetricsUpdateEvent; -import org.dependencytrack.event.ProjectPolicyEvaluationEvent; -import org.dependencytrack.event.kafka.KafkaTopics; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer; -import org.dependencytrack.model.Policy; -import org.dependencytrack.model.PolicyCondition; -import org.dependencytrack.model.Project; -import org.dependencytrack.model.VulnerabilityScan; -import org.dependencytrack.model.VulnerabilityScan.TargetType; -import org.dependencytrack.model.WorkflowStatus; -import org.dependencytrack.model.WorkflowStep; -import org.dependencytrack.proto.notification.v1.Notification; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; -import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; -import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; -import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; -import org.dependencytrack.tasks.PolicyEvaluationTask; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.TimeUnit; - -import static java.util.stream.Collectors.joining; -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; -import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_CVSSV3; -import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED; -import static org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_FAILED; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_SUCCESSFUL; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_OSSINDEX; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_SNYK; - -public class KafkaStreamsTopologyTest extends KafkaStreamsTest { - - public static class EventSubscriber implements Subscriber { - - @Override - public void inform(final Event event) { - EVENTS.add(event); - } - - } - - private static final ConcurrentLinkedQueue EVENTS = new ConcurrentLinkedQueue<>(); - - @BeforeClass - public static void setUpClass() { - EventService.getInstance().subscribe(ProjectPolicyEvaluationEvent.class, PolicyEvaluationTask.class); - EventService.getInstance().subscribe(ProjectMetricsUpdateEvent.class, EventSubscriber.class); - } - - @After - public void after() { - super.after(); - EVENTS.clear(); - } - - @AfterClass - public static void tearDownClass() { - EventService.getInstance().unsubscribe(PolicyEvaluationTask.class); - EventService.getInstance().unsubscribe(EventSubscriber.class); - } - - @Test - @Ignore - // Un-ignore and run this test manually to get the topology description. - // The description can be visualized using https://zz85.github.io/kafka-streams-viz/ - public void topologyDescriptionTest() { - System.out.println(new KafkaStreamsTopologyFactory().createTopology().describe().toString()); - } - - @Test - public void processorNodeNamingTest() { - final TopologyDescription topologyDescription = new KafkaStreamsTopologyFactory().createTopology().describe(); - - final var softAsserts = new SoftAssertions(); - for (final TopologyDescription.Subtopology subtopology : topologyDescription.subtopologies()) { - for (final TopologyDescription.Node node : subtopology.nodes()) { - softAsserts.assertThat(node.name()) - .as("Processor node has an invalid name (subTopology %d; parents: %s; children: %s)", subtopology.id(), - node.predecessors().stream().map(TopologyDescription.Node::name).collect(joining(", ")), - node.successors().stream().map(TopologyDescription.Node::name).collect(joining(", ")) - ) - .matches("^[a-z-_.]+$"); - } - } - - softAsserts.assertAll(); - } - - @Test - public void vulnScanResultProcessingTest() throws Exception { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var componentA = new org.dependencytrack.model.Component(); - componentA.setName("acme-lib-a"); - componentA.setVersion("1.1.0"); - componentA.setProject(project); - qm.persist(componentA); - - final var componentB = new org.dependencytrack.model.Component(); - componentB.setName("acme-lib-b"); - componentB.setVersion("1.2.0"); - componentB.setProject(project); - qm.persist(componentB); - - final var scanToken = UUID.randomUUID(); - final var scanKeyComponentA = ScanKey.newBuilder() - .setScanToken(scanToken.toString()) - .setComponentUuid(componentA.getUuid().toString()) - .build(); - final var scanKeyComponentB = ScanKey.newBuilder() - .setScanToken(scanToken.toString()) - .setComponentUuid(componentB.getUuid().toString()) - .build(); - final var vulnComponentA = org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("SNYK-001") - .setSource(Source.newBuilder().setName("SNYK").build()) - .addRatings(VulnerabilityRating.newBuilder() - .setSource(Source.newBuilder().setName("SNYK").build()) - .setMethod(SCORE_METHOD_CVSSV3) - .setScore(10.0) - .setVector("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H")) - .build(); - final var vulnComponentB = org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("SONATYPE-001") - .setSource(Source.newBuilder().setName("OSSINDEX").build()) - .build(); - - qm.createVulnerabilityScan(TargetType.PROJECT, project.getUuid(), scanToken.toString(), 2); - qm.createWorkflowSteps(scanToken); - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>(scanKeyComponentA, - ScanResult.newBuilder() - .setKey(scanKeyComponentA) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_SNYK) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities(vulnComponentA)).build()) - .build()), - new KeyValue<>(scanKeyComponentB, - ScanResult.newBuilder() - .setKey(scanKeyComponentB) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities(vulnComponentB)).build()) - .build()))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - assertThat(qm.getAllVulnerabilities(componentA)).hasSize(1); - assertThat(qm.getAllVulnerabilities(componentB)).hasSize(1); - }); - - await("Workflow completion") - .atMost(Duration.ofSeconds(30)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - var workflowStatus = qm.getWorkflowStateByTokenAndStep(scanToken, WorkflowStep.VULN_ANALYSIS); - qm.getPersistenceManager().refresh(workflowStatus); // Ensure we're not getting stale values from L1 cache - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.COMPLETED); - }); - - await("Analysis complete notification") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)) - ).satisfiesExactly( - notification -> { - final ProjectVulnAnalysisCompleteSubject subject = - notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); - assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); - assertThat(subject.getProject().getUuid()).isEqualTo(project.getUuid().toString()); - assertThat(subject.getFindingsList()).satisfiesExactlyInAnyOrder( - finding -> { - assertThat(finding.getComponent().getUuid()).isEqualTo(componentA.getUuid().toString()); - assertThat(finding.getVulnerabilitiesCount()).isEqualTo(1); - assertThat(finding.getVulnerabilities(0).getVulnId()).isEqualTo("SNYK-001"); - assertThat(finding.getVulnerabilities(0).getSource()).isEqualTo("SNYK"); - assertThat(finding.getVulnerabilities(0).getSeverity()).isEqualTo("CRITICAL"); - assertThat(finding.getVulnerabilities(0).getCvssV3()).isEqualTo(10.0); - }, - finding -> { - assertThat(finding.getComponent().getUuid()).isEqualTo(componentB.getUuid().toString()); - assertThat(finding.getVulnerabilitiesCount()).isEqualTo(1); - assertThat(finding.getVulnerabilities(0).getVulnId()).isEqualTo("SONATYPE-001"); - assertThat(finding.getVulnerabilities(0).getSource()).isEqualTo("OSSINDEX"); - } - ); - } - ); - }); - } - - @Test - public void vulnScanCompletionTest() throws Exception { - final var projectUuid = UUID.randomUUID(); - final var scanToken = UUID.randomUUID().toString(); - - final VulnerabilityScan scan = qm.createVulnerabilityScan(TargetType.PROJECT, projectUuid, scanToken, 500); - qm.createWorkflowSteps(UUID.fromString(scanToken)); - - final var componentUuids = new ArrayList(); - for (int i = 0; i < 500; i++) { - componentUuids.add(UUID.randomUUID()); - } - - for (final UUID uuid : componentUuids) { - final ScanKey scanKey = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(uuid.toString()) - .build(); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - } - - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getReceivedResults()).isEqualTo(500); - }); - - assertThat(scan.getToken()).isEqualTo(scanToken); - assertThat(scan.getTargetType()).isEqualTo(TargetType.PROJECT); - assertThat(scan.getTargetIdentifier()).isEqualTo(projectUuid); - assertThat(scan.getExpectedResults()).isEqualTo(500); - assertThat(scan.getReceivedResults()).isEqualTo(500); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); - assertThat(scan.getUpdatedAt()).isAfter(scan.getStartedAt()); - - var workflowStatus = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.VULN_ANALYSIS); - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.COMPLETED); - } - - @Test - public void vulnScanFailureTest() throws Exception { - final var project = new Project(); - project.setName("foo"); - qm.persist(project); - - final var projectUuid = project.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - - final VulnerabilityScan scan = qm.createVulnerabilityScan(TargetType.PROJECT, projectUuid, scanToken, 100); - qm.createWorkflowSteps(UUID.fromString(scanToken)); - - final var componentUuids = new ArrayList(); - for (int i = 0; i < 100; i++) { - componentUuids.add(UUID.randomUUID()); - } - - for (int i = 0; i < 100; i++) { - var scanStatus = i < 6 ? SCAN_STATUS_FAILED : SCAN_STATUS_SUCCESSFUL; - final ScanKey scanKey = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(componentUuids.get(i).toString()) - .build(); - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(scanStatus)) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(scanStatus)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - } - - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getReceivedResults()).isEqualTo(100); - }); - - assertThat(scan.getToken()).isEqualTo(scanToken); - assertThat(scan.getTargetType()).isEqualTo(TargetType.PROJECT); - assertThat(scan.getTargetIdentifier()).isEqualTo(projectUuid); - assertThat(scan.getExpectedResults()).isEqualTo(100); - assertThat(scan.getReceivedResults()).isEqualTo(100); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.FAILED); - assertThat(scan.getUpdatedAt()).isAfter(scan.getStartedAt()); - - await("Workflow completion") - .atMost(Duration.ofSeconds(30)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - var workflowStatus = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.VULN_ANALYSIS); - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.FAILED); - assertThat(workflowStatus.getFailureReason()).isEqualTo("Failure threshold of 0.05% exceeded: 0.06% of scans failed"); - - workflowStatus = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.POLICY_EVALUATION); - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.CANCELLED); - - workflowStatus = qm.getWorkflowStateByTokenAndStep(UUID.fromString(scanToken), WorkflowStep.METRICS_UPDATE); - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.CANCELLED); - }); - - await("Analysis complete notification") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(50)) - .untilAsserted(() -> { - assertThat(kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)) - ).satisfiesExactly( - notification -> { - final ProjectVulnAnalysisCompleteSubject subject = - notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); - assertThat(subject.getStatus()).isEqualTo(PROJECT_VULN_ANALYSIS_STATUS_FAILED); - assertThat(subject.getProject().getUuid()).isEqualTo(projectUuid.toString()); - assertThat(subject.getFindingsCount()).isZero(); - } - ); - }); - - // Policy evaluation and metrics were cancelled, - // so no such events should've been emitted. - assertThat(EVENTS).isEmpty(); - } - - @Test - public void projectPolicyEvaluationAfterCompletedVulnScanTest() throws Exception { - var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - project = qm.createProject(project, null, false); - - final var componentA = new org.dependencytrack.model.Component(); - componentA.setName("acme-lib-a"); - componentA.setVersion("1.1.0"); - componentA.setProject(project); - componentA.setPurl("pkg:maven/org.acme/acme-lib-a@1.1.0"); - qm.persist(componentA); - - final var componentB = new org.dependencytrack.model.Component(); - componentB.setName("acme-lib-b"); - componentB.setVersion("1.2.0"); - componentB.setProject(project); - qm.persist(componentB); - - final var scanToken = UUID.randomUUID().toString(); - - qm.createWorkflowSteps(UUID.fromString(scanToken)); - - final VulnerabilityScan scan = qm.createVulnerabilityScan(TargetType.PROJECT, project.getUuid(), scanToken, 2); - final var scanKeyA = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(componentA.getUuid().toString()) - .build(); - final var scanKeyB = ScanKey.newBuilder() - .setScanToken(scanToken) - .setComponentUuid(componentB.getUuid().toString()) - .build(); - - final Policy policy = qm.createPolicy("Test Policy", Policy.Operator.ANY, Policy.ViolationState.FAIL); - qm.createPolicyCondition(policy, - PolicyCondition.Subject.PACKAGE_URL, - PolicyCondition.Operator.MATCHES, - "pkg:maven/org.acme/acme-lib-a@1.1.0" - ); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKeyA, - ScanResult.newBuilder() - .setKey(scanKeyA) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - - await("First scan result processing") - .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getReceivedResults()).isEqualTo(1); - }); - - // Evaluation of componentA should raise a policy violation. But because the vulnerability - // scan was targeting a project, evaluation of individual components should not be performed. - assertThat(qm.getAllPolicyViolations(project)).isEmpty(); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>( - scanKeyB, - ScanResult.newBuilder() - .setKey(scanKeyB) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(SCAN_STATUS_SUCCESSFUL)) - .build())) - ) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - - await("Scan completion") - .atMost(Duration.ofSeconds(10)) - .untilAsserted(() -> { - qm.getPersistenceManager().refresh(scan); - assertThat(scan).isNotNull(); - assertThat(scan.getReceivedResults()).isEqualTo(2); - }); - // Vulnerability scan of the project completed. Policy evaluation of all components should - // have been performed, so we expect the violation for componentA to appear. - final var finalProject = project; - await("Policy evaluation") - .atMost(Duration.ofSeconds(30)) - .untilAsserted(() -> assertThat(qm.getAllPolicyViolations(finalProject)).hasSize(1)); - - // A project metrics update should have been executed AFTER policy evaluation. - // It thus should include the newly discovered policy violation. - await("Project metrics update") - .atMost(Duration.ofSeconds(5)) - .untilAsserted(() -> assertThat(EVENTS).hasSize(1)); - } - - @Test - public void projectVulnAnalysisCompleteNotificationFailureTest() throws Exception { - // Initiate a vulnerability scan, but do not create a corresponding project. - // Scan and workflow completion should work just fine, but assembling a notification - // for PROJECT_VULN_ANALYSIS_COMPLETE will fail. - final var scanToken = UUID.randomUUID(); - final var scanKey = ScanKey.newBuilder() - .setScanToken(scanToken.toString()) - .setComponentUuid(UUID.randomUUID().toString()) - .build(); - qm.createVulnerabilityScan(TargetType.PROJECT, UUID.randomUUID(), scanToken.toString(), 1); - qm.createWorkflowSteps(scanToken); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>(scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_SNYK) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().build()) - .build()) - .build()))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - - await("Workflow completion") - .atMost(Duration.ofSeconds(30)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> { - var workflowStatus = qm.getWorkflowStateByTokenAndStep(scanToken, WorkflowStep.VULN_ANALYSIS); - qm.getPersistenceManager().refresh(workflowStatus); // Ensure we're not getting stale values from L1 cache - assertThat(workflowStatus.getStatus()).isEqualTo(WorkflowStatus.COMPLETED); - }); - - // Verify that no notification was sent. - final List notifications = kafka.readValues(ReadKeyValues - .from(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name(), String.class, Notification.class) - .with(ConsumerConfig.GROUP_ID_CONFIG, "foo") - .with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class) - .with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, NotificationDeserializer.class) - .withMaxTotalPollTime(5, TimeUnit.SECONDS)); - assertThat(notifications).isEmpty(); - - // Ensure that Kafka Streams did not terminate due to project not existing. - assertThat(kafkaStreams.state()).isEqualTo(KafkaStreams.State.RUNNING); - } - - @Test - public void portfolioVulnAnalysisNotTrackedTest() throws Exception { - var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - project = qm.createProject(project, null, false); - - final var component = new org.dependencytrack.model.Component(); - component.setName("acme-lib-a"); - component.setVersion("1.1.0"); - component.setProject(project); - component.setPurl("pkg:maven/org.acme/acme-lib-a@1.1.0"); - qm.persist(component); - - final UUID scanToken = PortfolioVulnerabilityAnalysisEvent.CHAIN_IDENTIFIER; - final var scanKey = ScanKey.newBuilder() - .setScanToken(scanToken.toString()) - .setComponentUuid(component.getUuid().toString()) - .build(); - - // Create a VulnerabilityScan targeting a project, using the scan token dedicated to - // portfolio analysis. This will never actually happen, but we do it here to be able to verify - // that portfolio analysis results are indeed filtered out. - final VulnerabilityScan scan = qm.createVulnerabilityScan(TargetType.PROJECT, project.getUuid(), scanToken.toString(), 1); - - kafka.send(SendKeyValues.to(KafkaTopics.VULN_ANALYSIS_RESULT.name(), List.of( - new KeyValue<>(scanKey, - ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_SNYK) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder() - .addVulnerabilities(org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("SNYK-001") - .setSource(Source.newBuilder().setName("SNYK").build())) - .build()) - .build()) - .build()))) - .with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class) - .with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class)); - - // Vulnerability results must still be processed... - await("Result processing") - .atMost(Duration.ofSeconds(15)) - .pollInterval(Duration.ofMillis(250)) - .untilAsserted(() -> assertThat(qm.getAllVulnerabilities(component)).hasSize(1)); - - // ... but scan completion must not be. - qm.getPersistenceManager().refresh(scan); - assertThat(scan.getReceivedResults()).isZero(); - assertThat(scan.getStatus()).isEqualTo(VulnerabilityScan.Status.IN_PROGRESS); - } - -} diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java deleted file mode 100644 index 4607aca18..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.common.errors.SerializationException; -import org.apache.kafka.streams.errors.DeserializationExceptionHandler.DeserializationHandlerResponse; -import org.apache.kafka.streams.processor.ProcessorContext; -import org.junit.Test; - -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class KafkaStreamsDeserializationExceptionHandlerTest { - - @Test - public void testHandle() { - final var record = new ConsumerRecord<>("topic", 6, 3, "key".getBytes(), "value".getBytes()); - final var processorContext = mock(ProcessorContext.class); - final var handler = new KafkaStreamsDeserializationExceptionHandler(Clock.systemUTC(), Duration.ofMinutes(5), 10); - - for (int i = 0; i < 9; i++) { - assertThat(handler.handle(processorContext, record, new SerializationException())).isEqualTo(DeserializationHandlerResponse.CONTINUE); - } - - assertThat(handler.handle(processorContext, record, new SerializationException())).isEqualTo(DeserializationHandlerResponse.FAIL); - } - - @Test - public void testHandleWithThresholdReset() { - final var clockMock = mock(Clock.class); - when(clockMock.instant()) - .thenReturn(Instant.EPOCH) - .thenReturn(Instant.EPOCH.plusMillis(250)) - .thenReturn(Instant.EPOCH.plusSeconds(1).plusMillis(251)); - - final var record = new ConsumerRecord<>("topic", 6, 3, "key".getBytes(), "value".getBytes()); - final var processorContext = mock(ProcessorContext.class); - final var handler = new KafkaStreamsDeserializationExceptionHandler(clockMock, Duration.ofSeconds(1), 2); - - assertThat(handler.handle(processorContext, record, new SerializationException())).isEqualTo(DeserializationHandlerResponse.CONTINUE); - assertThat(handler.handle(processorContext, record, new SerializationException())).isEqualTo(DeserializationHandlerResponse.FAIL); - assertThat(handler.handle(processorContext, record, new SerializationException())).isEqualTo(DeserializationHandlerResponse.CONTINUE); - } - -} \ No newline at end of file diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java deleted file mode 100644 index 9265bcba6..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.errors.RecordTooLargeException; -import org.apache.kafka.streams.errors.ProductionExceptionHandler.ProductionExceptionHandlerResponse; -import org.junit.Test; - -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class KafkaStreamsProductionExceptionHandlerTest { - - @Test - public void testHandle() { - final var record = new ProducerRecord<>("topic", 6, "key".getBytes(), "value".getBytes()); - final var handler = new KafkaStreamsProductionExceptionHandler(Clock.systemUTC(), Duration.ofMinutes(5), 10); - - for (int i = 0; i < 9; i++) { - assertThat(handler.handle(record, new RecordTooLargeException())).isEqualTo(ProductionExceptionHandlerResponse.CONTINUE); - } - - assertThat(handler.handle(record, new RecordTooLargeException())).isEqualTo(ProductionExceptionHandlerResponse.FAIL); - } - - @Test - public void testHandleWithThresholdReset() { - final var clockMock = mock(Clock.class); - when(clockMock.instant()) - .thenReturn(Instant.EPOCH) - .thenReturn(Instant.EPOCH.plusMillis(250)) - .thenReturn(Instant.EPOCH.plusSeconds(1).plusMillis(251)); - - final var record = new ProducerRecord<>("topic", 6, "key".getBytes(), "value".getBytes()); - final var handler = new KafkaStreamsProductionExceptionHandler(clockMock, Duration.ofSeconds(1), 2); - - assertThat(handler.handle(record, new RecordTooLargeException())).isEqualTo(ProductionExceptionHandlerResponse.CONTINUE); - assertThat(handler.handle(record, new RecordTooLargeException())).isEqualTo(ProductionExceptionHandlerResponse.FAIL); - assertThat(handler.handle(record, new RecordTooLargeException())).isEqualTo(ProductionExceptionHandlerResponse.CONTINUE); - } - - @Test - public void testHandleWithUnexpectedException() { - final var record = new ProducerRecord<>("topic", 6, "key".getBytes(), "value".getBytes()); - final var handler = new KafkaStreamsProductionExceptionHandler(Clock.systemUTC(), Duration.ofMinutes(5), 10); - - assertThat(handler.handle(record, new IllegalStateException())).isEqualTo(ProductionExceptionHandlerResponse.FAIL); - } - -} \ No newline at end of file diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java deleted file mode 100644 index d469d0537..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.exception; - -import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse; -import org.junit.Test; - -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; -import java.util.concurrent.TimeoutException; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class KafkaStreamsUncaughtExceptionHandlerTest { - - @Test - public void testHandleWithTransientError() { - final var handler = new KafkaStreamsUncaughtExceptionHandler(); - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.REPLACE_THREAD); - } - - @Test - public void testHandleWithNonTransientError() { - final var handler = new KafkaStreamsUncaughtExceptionHandler(); - assertThat(handler.handle(new IllegalStateException())).isEqualTo(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); - } - - @Test - public void testHandleWithTransientErrorExceedingThreshold() { - final var handler = new KafkaStreamsUncaughtExceptionHandler(Clock.systemUTC(), Duration.ofMinutes(5), 10); - - for (int i = 0; i < 9; i++) { - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.REPLACE_THREAD); - } - - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); - } - - @Test - public void testHandleWithTransientErrorThresholdReset() { - final var clockMock = mock(Clock.class); - when(clockMock.instant()) - .thenReturn(Instant.EPOCH) - .thenReturn(Instant.EPOCH.plusMillis(250)) - .thenReturn(Instant.EPOCH.plusSeconds(1).plusMillis(251)); - - final var handler = new KafkaStreamsUncaughtExceptionHandler(clockMock, Duration.ofSeconds(1), 2); - - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.REPLACE_THREAD); - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.SHUTDOWN_CLIENT); - assertThat(handler.handle(new TimeoutException())).isEqualTo(StreamThreadExceptionResponse.REPLACE_THREAD); - } - -} \ No newline at end of file diff --git a/src/test/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessorTest.java deleted file mode 100644 index e49cb9ae3..000000000 --- a/src/test/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessorTest.java +++ /dev/null @@ -1,1374 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event.kafka.streams.processor; - -import com.google.protobuf.Timestamp; -import junitparams.JUnitParamsRunner; -import junitparams.Parameters; -import org.apache.kafka.common.header.Headers; -import org.apache.kafka.common.header.internals.RecordHeaders; -import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.TestInputTopic; -import org.apache.kafka.streams.TestOutputTopic; -import org.apache.kafka.streams.TopologyTestDriver; -import org.apache.kafka.streams.kstream.Consumed; -import org.apache.kafka.streams.kstream.Produced; -import org.apache.kafka.streams.test.TestRecord; -import org.cyclonedx.proto.v1_4.Advisory; -import org.cyclonedx.proto.v1_4.Bom; -import org.cyclonedx.proto.v1_4.Property; -import org.cyclonedx.proto.v1_4.Source; -import org.cyclonedx.proto.v1_4.VulnerabilityRating; -import org.cyclonedx.proto.v1_4.VulnerabilityReference; -import org.dependencytrack.PersistenceCapableTest; -import org.dependencytrack.TestCacheManager; -import org.dependencytrack.event.kafka.KafkaEventHeaders; -import org.dependencytrack.event.kafka.KafkaTopics; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufDeserializer; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerde; -import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer; -import org.dependencytrack.model.Analysis; -import org.dependencytrack.model.AnalysisComment; -import org.dependencytrack.model.AnalysisJustification; -import org.dependencytrack.model.AnalysisResponse; -import org.dependencytrack.model.AnalysisState; -import org.dependencytrack.model.AnalyzerIdentity; -import org.dependencytrack.model.Component; -import org.dependencytrack.model.ConfigPropertyConstants; -import org.dependencytrack.model.Finding; -import org.dependencytrack.model.FindingAttribution; -import org.dependencytrack.model.Project; -import org.dependencytrack.model.Severity; -import org.dependencytrack.model.Vulnerability; -import org.dependencytrack.model.VulnerabilityAlias; -import org.dependencytrack.model.VulnerabilityAnalysisLevel; -import org.dependencytrack.notification.NotificationConstants; -import org.dependencytrack.persistence.CweImporter; -import org.dependencytrack.persistence.jdbi.VulnerabilityPolicyDao; -import org.dependencytrack.policy.cel.CelPolicyScriptHost; -import org.dependencytrack.policy.cel.CelPolicyType; -import org.dependencytrack.policy.cel.CelVulnerabilityPolicyEvaluator; -import org.dependencytrack.policy.vulnerability.DatabaseVulnerabilityPolicyProvider; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicy; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyAnalysis; -import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating; -import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; -import org.dependencytrack.proto.notification.v1.NewVulnerableDependencySubject; -import org.dependencytrack.proto.notification.v1.Notification; -import org.dependencytrack.proto.notification.v1.VulnerabilityAnalysisDecisionChangeSubject; -import org.dependencytrack.proto.vulnanalysis.v1.ScanKey; -import org.dependencytrack.proto.vulnanalysis.v1.ScanResult; -import org.dependencytrack.proto.vulnanalysis.v1.Scanner; -import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import java.math.BigDecimal; -import java.sql.Date; -import java.time.Instant; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_CVSSV2; -import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_CVSSV3; -import static org.cyclonedx.proto.v1_4.ScoreMethod.SCORE_METHOD_OWASP; -import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_ANALYZER; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABILITY; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABLE_DEPENDENCY; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_AUDIT_CHANGE; -import static org.dependencytrack.proto.notification.v1.Level.LEVEL_ERROR; -import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; -import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO; -import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_SYSTEM; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_PENDING; -import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_SUCCESSFUL; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_OSSINDEX; -import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_SNYK; -import static org.dependencytrack.util.KafkaTestUtil.deserializeValue; - -@RunWith(JUnitParamsRunner.class) -public class VulnerabilityScanResultProcessorTest extends PersistenceCapableTest { - - private TopologyTestDriver testDriver; - private TestInputTopic inputTopic; - private TestOutputTopic outputTopic; - - @Before - public void before() throws Exception { - super.before(); - - final var cacheManager = new TestCacheManager(5, TimeUnit.MINUTES, 100); - final var scriptHost = new CelPolicyScriptHost(cacheManager, CelPolicyType.VULNERABILITY); - final var policyProvider = new DatabaseVulnerabilityPolicyProvider(); - final var policyEvaluator = new CelVulnerabilityPolicyEvaluator(policyProvider, scriptHost, cacheManager); - - final var streamsBuilder = new StreamsBuilder(); - streamsBuilder - .stream("input-topic", Consumed - .with(new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser()))) - .processValues(() -> new VulnerabilityScanResultProcessor(policyEvaluator)) - .to("output-topic", Produced - .with(new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser()))); - - testDriver = new TopologyTestDriver(streamsBuilder.build()); - inputTopic = testDriver.createInputTopic("input-topic", - new KafkaProtobufSerializer<>(), new KafkaProtobufSerializer<>()); - outputTopic = testDriver.createOutputTopic("output-topic", - new KafkaProtobufDeserializer<>(ScanKey.parser()), new KafkaProtobufDeserializer<>(ScanResult.parser())); - - new CweImporter().processCweDefinitions(); // Required for CWE mapping - } - - @After - public void after() { - if (testDriver != null) { - testDriver.close(); - } - - super.after(); - } - - @Test - public void dropFailedScanResultTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_FAILED) - .setFailureReason("just because")) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - assertThat(kafkaMockProducer.history()).satisfiesExactly( - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_ANALYZER.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_ANALYZER, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_SYSTEM); - assertThat(notification.getLevel()).isEqualTo(LEVEL_ERROR); - assertThat(notification.getGroup()).isEqualTo(GROUP_ANALYZER); - assertThat(notification.getTitle()).isEqualTo(NotificationConstants.Title.ANALYZER_ERROR); - assertThat(notification.getContent()).isEqualTo( - "Scan of component %s with %s failed (scanKey: %s): just because", - component.getUuid(), SCANNER_INTERNAL, scanToken + "/" + component.getUuid()); - } - ); - } - - @Test - public void dropPendingScanResultTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_PENDING)) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - assertThat(kafkaMockProducer.history()).isEmpty(); - } - - @Test - public void processSuccessfulScanResultWhenComponentDoesNotExistTest() { - final var componentUuid = UUID.randomUUID(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var vuln = new Vulnerability(); - vuln.setVulnId("INT-001"); - vuln.setSource(Vulnerability.Source.INTERNAL); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities(createVuln("INT-001", "INTERNAL")))) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - assertThat(kafkaMockProducer.history()).isEmpty(); - } - - @Test - public void processSuccessfulScanResult() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var vulnA = new Vulnerability(); - vulnA.setVulnId("INT-001"); - vulnA.setSource(Vulnerability.Source.INTERNAL); - qm.persist(vulnA); - final var vulnB = new Vulnerability(); - vulnB.setVulnId("SONATYPE-002"); - vulnB.setSource(Vulnerability.Source.OSSINDEX); - final var vulnC = new Vulnerability(); - vulnC.setVulnId("INT-002"); - vulnC.setSource(Vulnerability.Source.INTERNAL); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vulnA.getVulnId(), "INTERNAL"), - createVuln(vulnB.getVulnId(), "OSSINDEX"), - createVuln(vulnC.getVulnId(), "INTERNAL") - )))) - .build(); - final Headers headers = new RecordHeaders(); - headers.add(KafkaEventHeaders.VULN_ANALYSIS_LEVEL, VulnerabilityAnalysisLevel.BOM_UPLOAD_ANALYSIS.name().getBytes()); - headers.add(KafkaEventHeaders.IS_NEW_COMPONENT, "true".getBytes()); - - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult, headers)); - - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().refresh(component); - assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder( - vuln -> { - assertThat(vuln.getVulnId()).isEqualTo("INT-001"); - assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.INTERNAL.name()); - }, - vuln -> { - assertThat(vuln.getVulnId()).isEqualTo("SONATYPE-002"); - assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.OSSINDEX.name()); - } - // INT-002 is discarded because it is internal but doesn't exist in the database. - ); - - final List findings = qm.getFindings(project, false); - assertThat(findings).satisfiesExactlyInAnyOrder( - finding -> { - assertThat(finding.getVulnerability().get("vulnId")).isEqualTo("INT-001"); - assertThat(finding.getAttribution().get("analyzerIdentity")).isEqualTo(AnalyzerIdentity.INTERNAL_ANALYZER.name()); - }, - finding -> { - assertThat(finding.getVulnerability().get("vulnId")).isEqualTo("SONATYPE-002"); - assertThat(finding.getAttribution().get("analyzerIdentity")).isEqualTo(AnalyzerIdentity.INTERNAL_ANALYZER.name()); - } - // INT-002 is discarded because it is internal but doesn't exist in the database. - ); - - assertThat(kafkaMockProducer.history()).satisfiesExactly( - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABLE_DEPENDENCY); - assertThat(notification.getSubject().is(NewVulnerableDependencySubject.class)).isTrue(); - final var subject = notification.getSubject().unpack(NewVulnerableDependencySubject.class); - assertThat(subject.getComponent().getName()).isEqualTo("acme-lib"); - assertThat(subject.getComponent().getVersion()).isEqualTo("1.1.0"); - assertThat(subject.getProject().getName()).isEqualTo("acme-app"); - assertThat(subject.getProject().getVersion()).isEqualTo("1.0.0"); - assertThat(subject.getVulnerabilitiesCount()).isEqualTo(2); - }, - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); - assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); - final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); - assertThat(subject.getVulnerabilityAnalysisLevel()).isEqualTo("BOM_UPLOAD_ANALYSIS"); - }, - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); - assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); - final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); - assertThat(subject.getVulnerabilityAnalysisLevel()).isEqualTo("BOM_UPLOAD_ANALYSIS"); - } - // INT-002 is discarded because it is internal but doesn't exist in the database. - ); - } - - @Test - public void processSuccessfulScanResultWithExistingFindingTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var vulnerability = new Vulnerability(); - vulnerability.setVulnId("CVE-001"); - vulnerability.setSource(Vulnerability.Source.NVD); - qm.persist(vulnerability); - qm.addVulnerability(vulnerability, component, AnalyzerIdentity.OSSINDEX_ANALYZER); - - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities(createVuln("CVE-001", "NVD")))) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - qm.getPersistenceManager().refreshAll(component, vulnerability); - assertThat(component.getVulnerabilities()).satisfiesExactly( - vuln -> { - assertThat(vuln.getVulnId()).isEqualTo("CVE-001"); - assertThat(vuln.getSource()).isEqualTo(Vulnerability.Source.NVD.name()); - } - ); - - // Attribution should still refer to the first scanner that identified the vulnerability. - final FindingAttribution attribution = qm.getFindingAttribution(vulnerability, component); - assertThat(attribution).isNotNull(); - assertThat(attribution.getAnalyzerIdentity()).isEqualTo(AnalyzerIdentity.OSSINDEX_ANALYZER); - - // Because the vulnerability was reported already, no notification must be sent. - assertThat(kafkaMockProducer.history()).isEmpty(); - } - - private Object[] canUpdateExistingVulnerabilityTestParams() { - return new Object[]{ - // Results from the internal scanner must never override any existing data. - new Object[]{"INT-001", "INTERNAL", SCANNER_INTERNAL, null, null, false}, - new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "true", false}, - new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "false", false}, - new Object[]{"CVE-001", "NVD", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, null, false}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "true", false}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "false", false}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, null, false}, - new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "Go;Maven", false}, - new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "", false}, - new Object[]{"OSV-001", "OSV", SCANNER_INTERNAL, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, null, false}, - new Object[]{"sonatype-001", "OSSINDEX", SCANNER_INTERNAL, null, null, false}, - new Object[]{"snyk-001", "SNYK", SCANNER_INTERNAL, null, null, false}, - new Object[]{"001", "VULNDB", SCANNER_INTERNAL, null, null, false}, - // Scanners must be allowed to override existing data if they themselves are the authoritative source, - // or mirroring of the actual authoritative source is not enabled. - new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "true", false}, - new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, "false", true}, - new Object[]{"CVE-001", "NVD", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_NVD_ENABLED, null, true}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "true", false}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, "false", true}, - new Object[]{"GHSA-001", "GITHUB", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GITHUB_ADVISORIES_ENABLED, null, true}, - new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "Go;Maven", false}, - new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, "", true}, - new Object[]{"OSV-001", "OSV", SCANNER_OSSINDEX, ConfigPropertyConstants.VULNERABILITY_SOURCE_GOOGLE_OSV_ENABLED, null, true}, - new Object[]{"sonatype-001", "OSSINDEX", SCANNER_OSSINDEX, null, null, true}, - new Object[]{"SNYK-001", "SNYK", SCANNER_OSSINDEX, null, null, false}, - new Object[]{"sonatype-001", "OSSINDEX", SCANNER_SNYK, null, null, false}, - new Object[]{"SNYK-001", "SNYK", SCANNER_SNYK, null, null, true}, - // Updating of internal vulnerabilities must always be forbidden. - new Object[]{"INT-001", "INTERNAL", SCANNER_OSSINDEX, null, null, false}, - new Object[]{"INT-001", "INTERNAL", SCANNER_SNYK, null, null, false}, - }; - } - - @Test - @Parameters(method = "canUpdateExistingVulnerabilityTestParams") - public void canUpdateExistingVulnerabilityTest(final String vulnId, final String vulnSource, final Scanner scanner, - final ConfigPropertyConstants mirrorSourceConfigProperty, - final String mirrorSourceConfigPropertyValue, final boolean expectModified) { - if (mirrorSourceConfigProperty != null && mirrorSourceConfigPropertyValue != null) { - qm.createConfigProperty( - mirrorSourceConfigProperty.getGroupName(), - mirrorSourceConfigProperty.getPropertyName(), - mirrorSourceConfigPropertyValue, - mirrorSourceConfigProperty.getPropertyType(), - null - ); - } - - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var vulnerability = new Vulnerability(); - vulnerability.setVulnId(vulnId); - vulnerability.setSource(vulnSource); - vulnerability.setDescription("original description"); - qm.persist(vulnerability); - - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(scanner) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities( - createVuln(vulnId, vulnSource).toBuilder().setDescription("modified description") - .build()))) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - qm.getPersistenceManager().refreshAll(component, vulnerability); - assertThat(component.getVulnerabilities()).satisfiesExactly( - vuln -> { - assertThat(vuln.getVulnId()).isEqualTo(vulnId); - assertThat(vuln.getSource()).isEqualTo(vulnSource); - if (expectModified) { - assertThat(vuln.getDescription()).isEqualTo("modified description"); - } else { - assertThat(vuln.getDescription()).isEqualTo("original description"); - } - } - ); - } - - @Test - public void updateExistingVulnerabilityTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var vulnerability = new Vulnerability(); - vulnerability.setVulnId("CVE-001"); - vulnerability.setSource(Vulnerability.Source.NVD); - vulnerability.setTitle("original title"); - vulnerability.setSubTitle("original subTitle"); - vulnerability.setDescription("original description"); - vulnerability.setDetail("original detail"); - vulnerability.setRecommendation("original recommendation"); - vulnerability.setReferences("original references"); - vulnerability.setCreated(Date.from(Instant.ofEpochSecond(1672527600))); // Sat Dec 31 2022 23:00:00 GMT+0000 - vulnerability.setPublished(Date.from(Instant.ofEpochSecond(1672614000))); // Sun Jan 01 2023 23:00:00 GMT+0000 - vulnerability.setUpdated(Date.from(Instant.ofEpochSecond(1672700400))); // Mon Jan 02 2023 23:00:00 GMT+0000 - vulnerability.setCwes(List.of(666, 777)); - vulnerability.setSeverity(Severity.LOW); - vulnerability.setCvssV2BaseScore(BigDecimal.valueOf(2.2)); - vulnerability.setCvssV2ExploitabilitySubScore(BigDecimal.valueOf(2.2)); - vulnerability.setCvssV2ImpactSubScore(BigDecimal.valueOf(2.3)); - vulnerability.setCvssV2Vector("original cvssV2Vector"); - vulnerability.setCvssV3BaseScore(BigDecimal.valueOf(3.1)); - vulnerability.setCvssV3ExploitabilitySubScore(BigDecimal.valueOf(3.2)); - vulnerability.setCvssV3ImpactSubScore(BigDecimal.valueOf(3.3)); - vulnerability.setCvssV3Vector("original cvssv3Vector"); - vulnerability.setOwaspRRLikelihoodScore(BigDecimal.valueOf(4.1)); - vulnerability.setOwaspRRTechnicalImpactScore(BigDecimal.valueOf(4.2)); - vulnerability.setOwaspRRBusinessImpactScore(BigDecimal.valueOf(4.3)); - vulnerability.setOwaspRRVector("original owaspRrVector"); - vulnerability.setVulnerableVersions("original vulnerableVersions"); - vulnerability.setPatchedVersions("original patchedVersions"); - vulnerability.setEpssScore(BigDecimal.valueOf(5.1)); - vulnerability.setEpssPercentile(BigDecimal.valueOf(5.2)); - qm.persist(vulnerability); - - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(component.getUuid().toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_OSSINDEX) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addVulnerabilities( - createVuln("CVE-001", "NVD").toBuilder() - .setDescription("modified description") - .setDetail("modified detail") - .setRecommendation("modified recommendation") - .setCreated(Timestamp.newBuilder() - .setSeconds(1673305200)) // Mon Jan 09 2023 23:00:00 GMT+0000 - .setPublished(Timestamp.newBuilder() - .setSeconds(1673391600)) // Tue Jan 10 2023 23:00:00 GMT+0000 - .setUpdated(Timestamp.newBuilder() - .setSeconds(1673478000)) // Wed Jan 11 2023 23:00:00 GMT+0000 - .addCwes(999) - .addAdvisories(Advisory.newBuilder().setUrl("modified reference").build()) - .addRatings(VulnerabilityRating.newBuilder() - .setSource(Source.newBuilder().setName("NVD").build()) - .setMethod(SCORE_METHOD_CVSSV2) - .setScore(9.3) - .setVector("(AV:N/AC:M/Au:N/C:C/I:C/A:C)")) - .addRatings(VulnerabilityRating.newBuilder() - .setSource(Source.newBuilder().setName("NVD").build()) - .setMethod(SCORE_METHOD_CVSSV3) - .setScore(10.0) - .setVector("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H")) - .addRatings(VulnerabilityRating.newBuilder() - .setSource(Source.newBuilder().setName("OSSINDEX").build()) - .setMethod(SCORE_METHOD_OWASP) - .setScore(6.6) - .setVector("SL:1/M:4/O:4/S:9/ED:7/EE:3/A:4/ID:3/LC:9/LI:1/LAV:5/LAC:1/FD:3/RD:4/NC:7/PV:9")) - .addProperties(Property.newBuilder() - .setName("dependency-track:vuln:title") - .setValue("modified title").build()) - .build()))) - .build(); - - inputTopic.pipeInput(scanKey, scanResult); - - qm.getPersistenceManager().refreshAll(component, vulnerability); - assertThat(component.getVulnerabilities()).hasSize(1); - - assertThat(vulnerability.getVulnId()).isEqualTo("CVE-001"); - assertThat(vulnerability.getSource()).isEqualTo("NVD"); - assertThat(vulnerability.getDescription()).isEqualTo("modified description"); - assertThat(vulnerability.getDetail()).isEqualTo("modified detail"); - assertThat(vulnerability.getTitle()).isEqualTo("modified title"); - assertThat(vulnerability.getSubTitle()).isNull(); - assertThat(vulnerability.getRecommendation()).isEqualTo("modified recommendation"); - assertThat(vulnerability.getReferences()).isEqualTo("* [modified reference](modified reference)\n"); - assertThat(vulnerability.getCreated()).isEqualTo(Instant.ofEpochSecond(1673305200)); - assertThat(vulnerability.getPublished()).isEqualTo(Instant.ofEpochSecond(1673391600)); - assertThat(vulnerability.getUpdated()).isEqualTo(Instant.ofEpochSecond(1673478000)); - assertThat(vulnerability.getCwes()).containsOnly(999); - assertThat(vulnerability.getSeverity()).isEqualTo(Severity.CRITICAL); - assertThat(vulnerability.getCvssV2BaseScore()).isEqualTo("9.3"); - assertThat(vulnerability.getCvssV2ExploitabilitySubScore()).isEqualTo("8.6"); - assertThat(vulnerability.getCvssV2ImpactSubScore()).isEqualTo("10.0"); - assertThat(vulnerability.getCvssV2Vector()).isEqualTo("(AV:N/AC:M/Au:N/C:C/I:C/A:C)"); - assertThat(vulnerability.getCvssV3BaseScore()).isEqualTo("10.0"); - assertThat(vulnerability.getCvssV3ExploitabilitySubScore()).isEqualTo("3.9"); - assertThat(vulnerability.getCvssV3ImpactSubScore()).isEqualTo("6.0"); - assertThat(vulnerability.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H"); - assertThat(vulnerability.getOwaspRRLikelihoodScore()).isEqualTo("4.375"); - assertThat(vulnerability.getOwaspRRTechnicalImpactScore()).isEqualTo("4.0"); - assertThat(vulnerability.getOwaspRRBusinessImpactScore()).isEqualTo("5.75"); - assertThat(vulnerability.getOwaspRRVector()).isEqualTo("SL:1/M:4/O:4/S:9/ED:7/EE:3/A:4/ID:3/LC:9/LI:1/LAV:5/LAC:1/FD:3/RD:4/NC:7/PV:9"); - assertThat(vulnerability.getVulnerableVersions()).isNull(); - assertThat(vulnerability.getPatchedVersions()).isNull(); - assertThat(vulnerability.getEpssScore()).isEqualByComparingTo("5.1"); - assertThat(vulnerability.getEpssPercentile()).isEqualByComparingTo("5.2"); - } - - @Test - public void analysisThroughPolicyNewAnalysisTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - // Create a vulnerability that was not previously reported for the component. - final var newVuln = new Vulnerability(); - newVuln.setVulnId("CVE-100"); - newVuln.setSource(Vulnerability.Source.NVD); - newVuln.setSeverity(Severity.CRITICAL); - qm.persist(newVuln); - - // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); - policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); - policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); - policyAnalysis.setDetails("Because I say so."); - final var policyRating = new VulnerabilityPolicyRating(); - policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); - policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); - policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - policyRating.setScore(1.6); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setAuthor("Jane Doe"); - policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); - policy.setAnalysis(policyAnalysis); - policy.setRatings(List.of(policyRating)); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(newVuln.getVulnId(), newVuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactly( - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); - assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); - assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); - assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); - assertThat(analysis.isSuppressed()).isFalse(); - assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); - assertThat(analysis.getCvssV2Vector()).isNull(); - assertThat(analysis.getCvssV2Score()).isNull(); - assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); - assertThat(analysis.getOwaspVector()).isNull(); - assertThat(analysis.getOwaspScore()).isNull(); - - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( - "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", - "Analysis: NOT_SET → NOT_AFFECTED", - "Justification: NOT_SET → CODE_NOT_REACHABLE", - "Vendor Response: NOT_SET → WILL_NOT_FIX", - "Details: Because I say so.", - "Severity: CRITICAL → LOW", - "CVSSv3 Vector: (None) → CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L", - "CVSSv3 Score: (None) → 1.6" - ); - }); - }); - - // TODO: There should be PROJECT_AUDIT_CHANGE notifications. - assertThat(kafkaMockProducer.history()).satisfiesExactly( - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY); - assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue(); - final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class); - assertThat(subject.getVulnerability().getVulnId()).isEqualTo("CVE-100"); - assertThat(subject.getVulnerability().getSource()).isEqualTo("NVD"); - assertThat(subject.getVulnerability().getSeverity()).isEqualTo("LOW"); - assertThat(subject.getVulnerability().getCvssV3()).isEqualTo(1.6); - } - ); - } - - @Test - public void analysisThroughPolicyNewAnalysisSuppressionTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - // Create a vulnerability that was not previously reported for the component. - final var newVuln = new Vulnerability(); - newVuln.setVulnId("CVE-100"); - newVuln.setSource(Vulnerability.Source.NVD); - newVuln.setSeverity(Severity.CRITICAL); - qm.persist(newVuln); - - // Create a policy that marks any finding as FALSE_POSITIVE, and suppresses it. - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - policyAnalysis.setSuppress(true); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setAuthor("Jane Doe"); - policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); - policy.setAnalysis(policyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(newVuln.getVulnId(), newVuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactly( - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); - assertThat(analysis.getAnalysisJustification()).isNull(); - assertThat(analysis.getAnalysisResponse()).isNull(); - assertThat(analysis.getAnalysisDetails()).isNull(); - assertThat(analysis.isSuppressed()).isTrue(); - assertThat(analysis.getSeverity()).isNull(); - assertThat(analysis.getCvssV2Vector()).isNull(); - assertThat(analysis.getCvssV2Score()).isNull(); - assertThat(analysis.getCvssV3Vector()).isNull(); - assertThat(analysis.getCvssV3Score()).isNull(); - assertThat(analysis.getOwaspVector()).isNull(); - assertThat(analysis.getOwaspScore()).isNull(); - - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( - "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", - "Analysis: NOT_SET → FALSE_POSITIVE", - "Suppressed" - ); - }); - }); - - // The vulnerability was suppressed, so no notifications to be expected. - // TODO: There should be PROJECT_AUDIT_CHANGE notifications. - assertThat(kafkaMockProducer.history()).isEmpty(); - } - - @Test - public void analysisThroughPolicyExistingDifferentAnalysisTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - // Create an existing vulnerability, for which the analysis is entirely different - // over what's defined in the policy. - final var vuln = new Vulnerability(); - vuln.setVulnId("CVE-100"); - vuln.setSource(Vulnerability.Source.NVD); - vuln.setSeverity(Severity.CRITICAL); - qm.persist(vuln); - qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); - final Analysis vulnAnalysis = qm.makeAnalysis(component, vuln, AnalysisState.FALSE_POSITIVE, - AnalysisJustification.NOT_SET, AnalysisResponse.CAN_NOT_FIX, "oldDetails", true); - vulnAnalysis.setSeverity(Severity.INFO); - vulnAnalysis.setCvssV2Vector("oldCvssV2Vector"); - vulnAnalysis.setCvssV2Score(BigDecimal.ZERO); - vulnAnalysis.setCvssV3Vector("oldCvssV3Vector"); - vulnAnalysis.setCvssV3Score(BigDecimal.ZERO); - vulnAnalysis.setOwaspVector("oldOwaspVector"); - vulnAnalysis.setOwaspScore(BigDecimal.ZERO); - qm.persist(vulnAnalysis); - - // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); - policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); - policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); - policyAnalysis.setDetails("Because I say so."); - final var policyRating = new VulnerabilityPolicyRating(); - policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); - policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); - policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - policyRating.setScore(1.6); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setAuthor("Jane Doe"); - policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); - policy.setAnalysis(policyAnalysis); - policy.setRatings(List.of(policyRating)); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vuln.getVulnId(), vuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactly( - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); - assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); - assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); - assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); - assertThat(analysis.isSuppressed()).isFalse(); - assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); - assertThat(analysis.getCvssV2Vector()).isNull(); - assertThat(analysis.getCvssV2Score()).isNull(); - assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); - assertThat(analysis.getOwaspVector()).isNull(); - assertThat(analysis.getOwaspScore()).isNull(); - - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{Name=Foo, Author=Jane Doe}]"); - assertThat(analysis.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactly( - "Matched on condition(s):\n- has(component.name)\n- project.version != \"\"", - "Analysis: FALSE_POSITIVE → NOT_AFFECTED", - "Justification: NOT_SET → CODE_NOT_REACHABLE", - "Vendor Response: CAN_NOT_FIX → WILL_NOT_FIX", - "Details: Because I say so.", - "Unsuppressed", - "Severity: INFO → LOW", - "CVSSv2 Vector: oldCvssV2Vector → (None)", - "CVSSv2 Score: 0.0 → (None)", - "CVSSv3 Vector: oldCvssV3Vector → CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L", - "CVSSv3 Score: 0.0 → 1.6", - "OWASP Vector: oldOwaspVector → (None)", - "OWASP Score: 0.0 → (None)" - ); - }); - }); - - // The vulnerability already existed, so no notifications to be expected. - // There should be PROJECT_AUDIT_CHANGE notification. - assertThat(kafkaMockProducer.history()).satisfiesExactly( - record -> { - assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE.name()); - final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE, record); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_PROJECT_AUDIT_CHANGE); - assertThat(notification.getSubject().is(VulnerabilityAnalysisDecisionChangeSubject.class)).isTrue(); - final var subject = notification.getSubject().unpack(VulnerabilityAnalysisDecisionChangeSubject.class); - assertThat(subject.getAnalysis().getState()).isEqualTo("NOT_AFFECTED"); - } - ); - } - - @Test - public void analysisThroughPolicyExistingEqualAnalysisTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - // Create an existing vulnerability, for which the analysis is completely - // identical to what's defined in the policy. - final var vuln = new Vulnerability(); - vuln.setVulnId("CVE-100"); - vuln.setSource(Vulnerability.Source.NVD); - vuln.setSeverity(Severity.CRITICAL); - qm.persist(vuln); - qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); - final Analysis vulnAnalysis = qm.makeAnalysis(component, vuln, AnalysisState.NOT_AFFECTED, - AnalysisJustification.CODE_NOT_REACHABLE, AnalysisResponse.WILL_NOT_FIX, "Because I say so.", false); - vulnAnalysis.setSeverity(Severity.LOW); - vulnAnalysis.setCvssV3Vector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - vulnAnalysis.setCvssV3Score(BigDecimal.valueOf(1.6)); - qm.persist(vulnAnalysis); - - // Create a policy that marks any finding as NOT_AFFECTED, and downgrades the severity to LOW. - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.NOT_AFFECTED); - policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); - policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); - policyAnalysis.setDetails("Because I say so."); - final var policyRating = new VulnerabilityPolicyRating(); - policyRating.setMethod(VulnerabilityPolicyRating.Method.CVSSV3); - policyRating.setSeverity(VulnerabilityPolicyRating.Severity.LOW); - policyRating.setVector("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - policyRating.setScore(1.6); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setConditions(List.of("has(component.name)", "project.version != \"\"")); - policy.setAnalysis(policyAnalysis); - policy.setRatings(List.of(policyRating)); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vuln.getVulnId(), vuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactly( - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); - assertThat(analysis.getAnalysisJustification()).isEqualTo(AnalysisJustification.CODE_NOT_REACHABLE); - assertThat(analysis.getAnalysisResponse()).isEqualTo(AnalysisResponse.WILL_NOT_FIX); - assertThat(analysis.getAnalysisDetails()).isEqualTo("Because I say so."); - assertThat(analysis.isSuppressed()).isFalse(); - assertThat(analysis.getSeverity()).isEqualTo(Severity.LOW); - assertThat(analysis.getCvssV2Vector()).isNull(); - assertThat(analysis.getCvssV2Score()).isNull(); - assertThat(analysis.getCvssV3Vector()).isEqualTo("CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:U/C:N/I:N/A:L"); - assertThat(analysis.getCvssV3Score()).isEqualByComparingTo("1.6"); - assertThat(analysis.getOwaspVector()).isNull(); - assertThat(analysis.getOwaspScore()).isNull(); - - // As no changes were made, no analysis comments should've been created. - assertThat(analysis.getAnalysisComments()).isEmpty(); - }); - }); - - // The vulnerability already existed, so no notifications to be expected. - assertThat(kafkaMockProducer.history()).isEmpty(); - } - - @Test - public void analysisThroughPolicyWithAliasesTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - // Create a vulnerability for which no aliases are currently known. - // Aliases will be reported by the ScanResult. - final var vulnA = new Vulnerability(); - vulnA.setVulnId("CVE-100"); - vulnA.setSource(Vulnerability.Source.NVD); - qm.persist(vulnA); - - // Create a vulnerability for which an alias is already known. - // The same alias will be reported by the ScanResult. - final var vulnB = new Vulnerability(); - vulnB.setVulnId("CVE-200"); - vulnB.setSource(Vulnerability.Source.NVD); - qm.persist(vulnB); - final var vulnAliasB = new VulnerabilityAlias(); - vulnAliasB.setCveId("CVE-200"); - vulnAliasB.setGhsaId("GHSA-200"); - qm.synchronizeVulnerabilityAlias(vulnAliasB); - - // Create a policy that suppresses any finding with the alias GHSA-100 or GHSA-200. - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - policyAnalysis.setSuppress(true); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setConditions(List.of("vuln.aliases.exists(alias, alias.id == \"GHSA-100\" || alias.id == \"GHSA-200\")")); - policy.setAnalysis(policyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - // Report three vulnerabilities for the component: - // - CVE-100 with alias GHSA-100 (vuln already in DB, alias is new) - // - CVE-200 with alias GHSA-200 (vuln and alias already in DB) - // - CVE-300 without alias (vuln already in DB) - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("CVE-100") - .setSource(Source.newBuilder().setName("NVD")) - .addReferences(VulnerabilityReference.newBuilder() - .setId("GHSA-100") - .setSource(Source.newBuilder().setName("GITHUB"))) - .build(), - org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("CVE-200") - .setSource(Source.newBuilder().setName("NVD")) - .addReferences(VulnerabilityReference.newBuilder() - .setId("GHSA-200") - .setSource(Source.newBuilder().setName("GITHUB"))) - .build(), - org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId("CVE-300") - .setSource(Source.newBuilder().setName("NVD")) - .build() - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder( - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); - assertThat(analysis.isSuppressed()).isTrue(); - }); - }, - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-200"); - assertThat(qm.getAnalysis(component, v)).satisfies(analysis -> { - assertThat(analysis.getAnalysisState()).isEqualTo(AnalysisState.FALSE_POSITIVE); - assertThat(analysis.isSuppressed()).isTrue(); - }); - }, - v -> { - assertThat(v.getVulnId()).isEqualTo("CVE-300"); - assertThat(qm.getAnalysis(component, v)).isNull(); - } - ); - } - - @Test - public void analysisThroughPolicyResetOnNoMatchTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - policyAnalysis.setJustification(VulnerabilityPolicyAnalysis.Justification.CODE_NOT_REACHABLE); - policyAnalysis.setVendorResponse(VulnerabilityPolicyAnalysis.Response.WILL_NOT_FIX); - policyAnalysis.setSuppress(true); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setConditions(List.of("component.name == \"some-other-name\"")); - policy.setAnalysis(policyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - // Create vulnerability with existing analysis that was previously applied by the above policy, - // but is no longer current. - final var vulnA = new Vulnerability(); - vulnA.setVulnId("CVE-100"); - vulnA.setSource(Vulnerability.Source.NVD); - vulnA.setSeverity(Severity.CRITICAL); - qm.persist(vulnA); - qm.addVulnerability(vulnA, component, AnalyzerIdentity.INTERNAL_ANALYZER); - final var analysisA = new Analysis(); - analysisA.setComponent(component); - analysisA.setVulnerability(vulnA); - analysisA.setAnalysisState(AnalysisState.NOT_AFFECTED); - analysisA.setAnalysisJustification(AnalysisJustification.CODE_NOT_REACHABLE); - analysisA.setAnalysisResponse(AnalysisResponse.WILL_NOT_FIX); - analysisA.setAnalysisDetails("Because I say so."); - analysisA.setSeverity(Severity.MEDIUM); - analysisA.setCvssV2Vector("oldCvssV2Vector"); - analysisA.setCvssV2Score(BigDecimal.valueOf(1.1)); - analysisA.setCvssV3Vector("oldCvssV3Vector"); - analysisA.setCvssV3Score(BigDecimal.valueOf(2.2)); - analysisA.setOwaspVector("oldOwaspVector"); - analysisA.setOwaspScore(BigDecimal.valueOf(3.3)); - analysisA.setSuppressed(true); - qm.getPersistenceManager().makePersistent(analysisA); - jdbi(qm).useHandle(jdbiHandle -> jdbiHandle.createUpdate(""" - UPDATE - "ANALYSIS" - SET - "VULNERABILITY_POLICY_ID" = (SELECT "ID" FROM "VULNERABILITY_POLICY" WHERE "NAME" = :vulnPolicyName) - WHERE - "ID" = :analysisId - """) - .bind("vulnPolicyName", policy.getName()) - .bind("analysisId", analysisA.getId()) - .execute()); - - // Create another vulnerability with existing analysis that was manually applied. - final var vulnB = new Vulnerability(); - vulnB.setVulnId("CVE-200"); - vulnB.setSource(Vulnerability.Source.NVD); - vulnB.setSeverity(Severity.HIGH); - qm.persist(vulnB); - qm.addVulnerability(vulnB, component, AnalyzerIdentity.INTERNAL_ANALYZER); - final var analysisB = new Analysis(); - analysisB.setComponent(component); - analysisB.setVulnerability(vulnB); - analysisB.setAnalysisState(AnalysisState.NOT_AFFECTED); - qm.getPersistenceManager().makePersistent(analysisB); - - // Report both CVE-100 and CVE-200 again. - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vulnA.getVulnId(), vulnA.getSource()), - createVuln(vulnB.getVulnId(), vulnB.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(component.getVulnerabilities()).satisfiesExactly( - v -> { - // The analysis that was previously applied via policy must have been reverted. - assertThat(v.getVulnId()).isEqualTo("CVE-100"); - assertThat(qm.getAnalysis(component, v)).satisfies(a -> { - assertThat(a.getAnalysisState()).isEqualTo(AnalysisState.NOT_SET); - assertThat(a.getVulnerabilityPolicy()).isNull(); - assertThat(a.getAnalysisComments()).extracting(AnalysisComment::getCommenter).containsOnly("[Policy{None}]"); - assertThat(a.getAnalysisComments()).extracting(AnalysisComment::getComment).containsExactlyInAnyOrder( - "No longer covered by any policy", - "Analysis: NOT_AFFECTED → NOT_SET", - "Justification: CODE_NOT_REACHABLE → NOT_SET", - "Vendor Response: WILL_NOT_FIX → NOT_SET", - "Details: (None)", - "Severity: MEDIUM → UNASSIGNED", - "CVSSv2 Vector: oldCvssV2Vector → (None)", - "CVSSv2 Score: 1.1 → (None)", - "CVSSv3 Vector: oldCvssV3Vector → (None)", - "CVSSv3 Score: 2.2 → (None)", - "OWASP Vector: oldOwaspVector → (None)", - "OWASP Score: 3.3 → (None)", - "Unsuppressed" - ); - }); - }, - v -> { - // The manually applied analysis must not be touched! - assertThat(v.getVulnId()).isEqualTo("CVE-200"); - assertThat(qm.getAnalysis(component, v)).satisfies(a -> { - assertThat(a.getAnalysisState()).isEqualTo(AnalysisState.NOT_AFFECTED); - assertThat(a.getAnalysisJustification()).isNull(); - assertThat(a.getAnalysisResponse()).isNull(); - assertThat(a.getAnalysisDetails()).isNull(); - assertThat(a.getSeverity()).isNull(); - assertThat(a.getCvssV2Vector()).isNull(); - assertThat(a.getCvssV2Score()).isNull(); - assertThat(a.getCvssV3Vector()).isNull(); - assertThat(a.getCvssV3Score()).isNull(); - assertThat(a.getVulnerabilityPolicy()).isNull(); - assertThat(a.getAnalysisComments()).isEmpty(); - }); - }); - } - - @Test - public void analysisThroughPolicyWithPoliciesNotYetValidOrNotValidAnymoreTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var notYetValidPolicyAnalysis = new VulnerabilityPolicyAnalysis(); - notYetValidPolicyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - notYetValidPolicyAnalysis.setSuppress(true); - final var notYetValidPolicy = new VulnerabilityPolicy(); - notYetValidPolicy.setName("NotYetValid"); - notYetValidPolicy.setValidFrom(ZonedDateTime.ofInstant(Instant.now().plusSeconds(180), ZoneOffset.UTC)); - notYetValidPolicy.setConditions(List.of("true")); - notYetValidPolicy.setAnalysis(notYetValidPolicyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(notYetValidPolicy)); - - final var notValidAnymorePolicyAnalysis = new VulnerabilityPolicyAnalysis(); - notValidAnymorePolicyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - notValidAnymorePolicyAnalysis.setSuppress(true); - final var notValidAnymorePolicy = new VulnerabilityPolicy(); - notValidAnymorePolicy.setName("NotValidAnymore"); - notValidAnymorePolicy.setValidUntil(ZonedDateTime.ofInstant(Instant.now().minusSeconds(180), ZoneOffset.UTC)); - notValidAnymorePolicy.setConditions(List.of("true")); - notValidAnymorePolicy.setAnalysis(notValidAnymorePolicyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(notValidAnymorePolicy)); - - final var vuln = new Vulnerability(); - vuln.setVulnId("CVE-100"); - vuln.setSource(Vulnerability.Source.NVD); - vuln.setSeverity(Severity.CRITICAL); - qm.persist(vuln); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vuln.getVulnId(), vuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().evictAll(); - assertThat(qm.getAnalysis(component, vuln)).isNull(); - } - - @Test - public void analysisThroughPolicyWithAnalysisUpdateNotOnStateOrSuppressionTest() { - final var project = new Project(); - project.setName("acme-app"); - project.setVersion("1.0.0"); - qm.persist(project); - - final var component = new Component(); - component.setName("acme-lib"); - component.setVersion("1.1.0"); - component.setProject(project); - qm.persist(component); - - final var vuln = new Vulnerability(); - vuln.setVulnId("CVE-100"); - vuln.setSource(Vulnerability.Source.NVD); - vuln.setSeverity(Severity.CRITICAL); - qm.persist(vuln); - - qm.addVulnerability(vuln, component, AnalyzerIdentity.INTERNAL_ANALYZER); - - final var analysis = new Analysis(); - analysis.setComponent(component); - analysis.setVulnerability(vuln); - analysis.setAnalysisState(AnalysisState.FALSE_POSITIVE); - analysis.setSuppressed(true); - qm.persist(analysis); - - final var policyAnalysis = new VulnerabilityPolicyAnalysis(); - policyAnalysis.setState(VulnerabilityPolicyAnalysis.State.FALSE_POSITIVE); - policyAnalysis.setDetails("newDetails"); - policyAnalysis.setSuppress(true); - final var policy = new VulnerabilityPolicy(); - policy.setName("Foo"); - policy.setConditions(List.of("true")); - policy.setAnalysis(policyAnalysis); - jdbi(qm).withExtension(VulnerabilityPolicyDao.class, dao -> dao.create(policy)); - - final var componentUuid = component.getUuid(); - final var scanToken = UUID.randomUUID().toString(); - final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build(); - final var scanResult = ScanResult.newBuilder() - .setKey(scanKey) - .addScannerResults(ScannerResult.newBuilder() - .setScanner(SCANNER_INTERNAL) - .setStatus(SCAN_STATUS_SUCCESSFUL) - .setBom(Bom.newBuilder().addAllVulnerabilities(List.of( - createVuln(vuln.getVulnId(), vuln.getSource()) - )))) - .build(); - inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult)); - assertThat(outputTopic.readValuesToList()).containsOnly(scanResult); - - qm.getPersistenceManager().refresh(analysis); - assertThat(analysis.getAnalysisDetails()).isEqualTo("newDetails"); - - assertThat(kafkaMockProducer.history()).noneSatisfy( - record -> assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_AUDIT_CHANGE.name())); - } - - private org.cyclonedx.proto.v1_4.Vulnerability createVuln(final String id, final String source) { - return org.cyclonedx.proto.v1_4.Vulnerability.newBuilder() - .setId(id) - .setSource(Source.newBuilder().setName(source).build()).build(); - } -} \ No newline at end of file From 7412fc246562c3a7dc8e1faf6b9c841f054b1309 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 17:25:56 +0100 Subject: [PATCH 14/24] Fix missing dispatch of `BOM_PROCESSED` notification The status of the workflow step will always be `COMPLETED`, no matter if delayed dispatch is enabled or not. This is existing behavior and the regression was catched via `BomProcessedNotificationDelayedE2ET`. Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 46 ++++++++---------- .../persistence/jdbi/WorkflowDao.java | 41 ++++++++++------ ...dVulnerabilityScanResultProcessorTest.java | 47 +++++++++++++++++++ 3 files changed, 92 insertions(+), 42 deletions(-) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index 2c98fae37..e046ed198 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -246,43 +246,35 @@ private static List updateWorkflowStates(final Handle jdbiHandle, } private static List> createBomProcessedNotifications(final Handle jdbiHandle, final List completedVulnScans) { - final int numScans = completedVulnScans.size(); - final var tokens = new ArrayList(numScans); - final var statuses = new ArrayList(numScans); - final var failureReasons = new ArrayList(numScans); - - for (final VulnerabilityScan completedVulnScan : completedVulnScans) { - if (completedVulnScan.getTargetType() != VulnerabilityScan.TargetType.PROJECT) { - // BOM_PROCESSED notifications only make sense when the scan target is a project. - continue; - } - - tokens.add(completedVulnScan.getToken()); - statuses.add(WorkflowStatus.COMPLETED); - failureReasons.add(null); - } - if (tokens.isEmpty()) { - LOGGER.debug("None of the possible %d completed vulnerability scans target a project".formatted(numScans)); + // Collect the workflow tokens for all completed scans, as long as they target a project. + // Dispatching BOM_PROCESSED notifications does not make sense when individual components, + // or even the entire portfolio was scanned. + final Set workflowTokens = completedVulnScans.stream() + .filter(vulnScan -> vulnScan.getTargetType() == VulnerabilityScan.TargetType.PROJECT) + .map(VulnerabilityScan::getToken) + .collect(Collectors.toSet()); + if (workflowTokens.isEmpty()) { + LOGGER.debug("None of the possible %d completed vulnerability scans target a project".formatted(completedVulnScans.size())); return Collections.emptyList(); } + // Ensure that all eligible workflows have a BOM_PROCESSED step with status COMPLETED. + // For example, a scan triggered via "Reanalyze" button in the UI won't have such as step, + // hence it doesn't make sense to dispatch a BOM_PROCESSED notification for it. final var workflowDao = jdbiHandle.attach(WorkflowDao.class); - final List updatedWorkflowStates = - workflowDao.updateAllStatesIfPending(WorkflowStep.BOM_PROCESSING, tokens, statuses, failureReasons); - if (updatedWorkflowStates.isEmpty()) { - LOGGER.debug("None of the possible %d workflow states for %s were transitioned to %s status" - .formatted(tokens.size(), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)); + final Set workflowTokensWithBomProcessed = + workflowDao.getTokensByStepAndStateAndTokenAnyOf(WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED, workflowTokens); + if (workflowTokensWithBomProcessed.isEmpty()) { + LOGGER.debug("None of the possible %d workflows have %s steps with status %s" + .formatted(workflowTokens.size(), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)); return Collections.emptyList(); } final var notificationSubjectDao = jdbiHandle.attach(NotificationSubjectDao.class); - - final Set updatedWorkflowStateTokens = updatedWorkflowStates.stream() - .map(WorkflowState::getToken).map(UUID::toString).collect(Collectors.toSet()); final List notificationSubjects = - notificationSubjectDao.getForDelayedBomProcessed(updatedWorkflowStateTokens); + notificationSubjectDao.getForDelayedBomProcessed(workflowTokensWithBomProcessed); - final var notifications = new ArrayList>(updatedWorkflowStates.size()); + final var notifications = new ArrayList>(workflowTokensWithBomProcessed.size()); notificationSubjects.stream() .map(subject -> Notification.newBuilder() .setScope(SCOPE_PORTFOLIO) diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java index b0ebe76a6..e79b4265d 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/WorkflowDao.java @@ -25,8 +25,13 @@ import org.jdbi.v3.sqlobject.customizer.Bind; import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys; import org.jdbi.v3.sqlobject.statement.SqlBatch; +import org.jdbi.v3.sqlobject.statement.SqlQuery; +import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Optional; +import java.util.Set; public interface WorkflowDao { @@ -46,22 +51,28 @@ List updateAllStates(@Bind WorkflowStep step, @Bind("status") List statuses, @Bind("failureReason") List failureReasons); - @SqlBatch(""" - UPDATE "WORKFLOW_STATE" - SET "STATUS" = :status - , "FAILURE_REASON" = :failureReason - , "UPDATED_AT" = NOW() - WHERE "TOKEN" = :token - AND "STEP" = :step - AND "STATUS" = 'PENDING' - RETURNING * + default Optional updateState(final WorkflowStep step, + final String token, + final WorkflowStatus status, + final String failureReason) { + final List updatedStates = updateAllStates(step, List.of(token), List.of(status), Collections.singletonList(failureReason)); + if (updatedStates.isEmpty()) { + return Optional.empty(); + } + + return Optional.of(updatedStates.getFirst()); + } + + @SqlQuery(""" + SELECT "TOKEN" + FROM "WORKFLOW_STATE" + WHERE "STEP" = :step + AND "STATUS" = :status + AND "TOKEN" = ANY(:tokens) """) - @GetGeneratedKeys("*") - @RegisterBeanMapper(WorkflowState.class) - List updateAllStatesIfPending(@Bind WorkflowStep step, - @Bind("token") List tokens, - @Bind("status") List statuses, - @Bind("failureReason") List failureReasons); + Set getTokensByStepAndStateAndTokenAnyOf(@Bind WorkflowStep step, + @Bind WorkflowStatus status, + @Bind Collection tokens); @SqlBatch(""" WITH RECURSIVE diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java index 5ff6f6a87..b9380f1af 100644 --- a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -30,6 +30,7 @@ import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.model.WorkflowStatus; import org.dependencytrack.model.WorkflowStep; +import org.dependencytrack.persistence.jdbi.WorkflowDao; import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Notification; import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; @@ -46,6 +47,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import static org.assertj.core.api.Assertions.assertThat; +import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi; import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSED; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL; @@ -259,6 +261,11 @@ public void testProcessWithDelayedBomProcessedNotification() throws Exception { final UUID workflowToken = UUID.randomUUID(); qm.createWorkflowSteps(workflowToken); + // Transition BOM_PROCESSING step to COMPLETED status, + // without this we won't send a BOM_PROCESSED notification. + jdbi(qm).useExtension(WorkflowDao.class, dao -> dao.updateState(WorkflowStep.BOM_PROCESSING, + workflowToken.toString(), WorkflowStatus.COMPLETED, /* failureReason */ null)); + // Create a VulnerabilityScan, and configure it such that no more than 10% // of scanners are allowed to fail in order for the scan to be considered successful. final var vulnScan = new VulnerabilityScan(); @@ -309,6 +316,11 @@ public void testProcessWithDelayedBomProcessedNotificationWhenVulnerabilityScanF final UUID workflowToken = UUID.randomUUID(); qm.createWorkflowSteps(workflowToken); + // Transition BOM_PROCESSING step to COMPLETED status, + // without this we won't send a BOM_PROCESSED notification. + jdbi(qm).useExtension(WorkflowDao.class, dao -> dao.updateState(WorkflowStep.BOM_PROCESSING, + workflowToken.toString(), WorkflowStatus.COMPLETED, /* failureReason */ null)); + final var vulnScan = new VulnerabilityScan(); vulnScan.setToken(workflowToken.toString()); vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); @@ -351,6 +363,41 @@ record -> { ); } + @Test + public void testProcessWithDelayedBomProcessedNotificationWithoutCompletedBomProcessingWorkflowStep() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final UUID workflowToken = UUID.randomUUID(); + qm.createWorkflowSteps(workflowToken); + + // NB: BOM_PROCESSING workflow step remains in status PENDING + + // Create a VulnerabilityScan, and configure it such that no more than 10% + // of scanners are allowed to fail in order for the scan to be considered successful. + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(workflowToken.toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.PROJECT); + vulnScan.setTargetIdentifier(project.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(1); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + // Create a ScanResult without any ScannerResults attached to it. + // This might happen when no scanner is capable of scanning a component, + // or when all scanners are disabled. + final var scanResult = ScanResult.newBuilder().build(); + + final var processor = new ProcessedVulnerabilityScanResultProcessor(/* shouldDispatchBomProcessedNotification */ true); + processor.process(List.of(aConsumerRecord(vulnScan.getToken(), scanResult).build())); + + assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> + assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name())); + } + private static final ConcurrentLinkedQueue EVENTS = new ConcurrentLinkedQueue<>(); public static class EventSubscriber implements Subscriber { From e0d8d1cad2e2096e45bed093e25f0fde12c2ed94 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 17:54:07 +0100 Subject: [PATCH 15/24] Fix regression in `BOM_PROCESSED` notification content Signed-off-by: nscuro --- .../persistence/jdbi/NotificationSubjectDao.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java index ba27bf26a..70020af28 100644 --- a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java +++ b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java @@ -393,7 +393,7 @@ LEFT JOIN LATERAL ( WHERE "PT"."PROJECT_ID" = "P"."ID" ) AS "projectTags" , 'CycloneDX' AS "bomFormat" - , '(Unknown)' AS "bomSpecVersion" + , 'Unknown' AS "bomSpecVersion" , '(Omitted)' AS "bomContent" , "WFS"."TOKEN" AS "token" FROM "VULNERABILITYSCAN" AS "VS" From 5918d76098b30cbd576477f7e0e23b8229acd45f Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 28 Mar 2024 18:20:05 +0100 Subject: [PATCH 16/24] Remove unused code Signed-off-by: nscuro --- .../NewVulnerableDependencyAnalysisEvent.java | 44 --- .../vo/ComponentVulnAnalysisComplete.java | 42 --- .../vo/ProjectVulnAnalysisComplete.java | 56 ---- .../NotificationModelConverter.java | 27 -- .../graphql/GitHubSecurityAdvisoryParser.java | 166 ---------- .../graphql/model/GitHubSecurityAdvisory.java | 212 ------------- .../graphql/model/GitHubVulnerability.java | 79 ----- .../github/graphql/model/PageableList.java | 79 ----- .../upgrade/UpgradeInitializer.java | 121 -------- .../dependencytrack/upgrade/UpgradeItems.java | 41 --- .../upgrade/v510/v510Updater.java | 51 ---- .../upgrade/v520/v520Updater.java | 60 ---- .../util/ComponentIdentificationUtil.java | 109 ------- .../util/ComponentVersion.java | 289 ------------------ .../org/dependencytrack/util/JsonUtil.java | 73 ----- .../util/NotificationUtil.java | 99 ------ .../NotificationModelConverterTest.java | 37 --- 17 files changed, 1585 deletions(-) delete mode 100644 src/main/java/org/dependencytrack/event/NewVulnerableDependencyAnalysisEvent.java delete mode 100644 src/main/java/org/dependencytrack/notification/vo/ComponentVulnAnalysisComplete.java delete mode 100644 src/main/java/org/dependencytrack/notification/vo/ProjectVulnAnalysisComplete.java delete mode 100644 src/main/java/org/dependencytrack/parser/github/graphql/GitHubSecurityAdvisoryParser.java delete mode 100644 src/main/java/org/dependencytrack/parser/github/graphql/model/GitHubSecurityAdvisory.java delete mode 100644 src/main/java/org/dependencytrack/parser/github/graphql/model/GitHubVulnerability.java delete mode 100644 src/main/java/org/dependencytrack/parser/github/graphql/model/PageableList.java delete mode 100644 src/main/java/org/dependencytrack/upgrade/UpgradeInitializer.java delete mode 100644 src/main/java/org/dependencytrack/upgrade/UpgradeItems.java delete mode 100644 src/main/java/org/dependencytrack/upgrade/v510/v510Updater.java delete mode 100644 src/main/java/org/dependencytrack/upgrade/v520/v520Updater.java delete mode 100644 src/main/java/org/dependencytrack/util/ComponentIdentificationUtil.java delete mode 100644 src/main/java/org/dependencytrack/util/ComponentVersion.java delete mode 100644 src/main/java/org/dependencytrack/util/JsonUtil.java diff --git a/src/main/java/org/dependencytrack/event/NewVulnerableDependencyAnalysisEvent.java b/src/main/java/org/dependencytrack/event/NewVulnerableDependencyAnalysisEvent.java deleted file mode 100644 index 3b52682a4..000000000 --- a/src/main/java/org/dependencytrack/event/NewVulnerableDependencyAnalysisEvent.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.event; - -import alpine.event.framework.Event; -import org.dependencytrack.model.Component; - -import java.util.List; -import java.util.Objects; - -/** - * Defines an {@link Event} triggered when one or more new components have been added to a project. - *

- * The main purpose of this event is the delayed evaluation of notification criteria - * for the {@link org.dependencytrack.notification.NotificationGroup#NEW_VULNERABLE_DEPENDENCY} group.@ - * - * @since 4.6.0 - */ -public record NewVulnerableDependencyAnalysisEvent(List components) implements Event { - - /** - * @param components A {@link List} of {@link Component}s that are considered to be new - */ - public NewVulnerableDependencyAnalysisEvent(final List components) { - this.components = Objects.requireNonNull(components); - } - -} diff --git a/src/main/java/org/dependencytrack/notification/vo/ComponentVulnAnalysisComplete.java b/src/main/java/org/dependencytrack/notification/vo/ComponentVulnAnalysisComplete.java deleted file mode 100644 index 141b15395..000000000 --- a/src/main/java/org/dependencytrack/notification/vo/ComponentVulnAnalysisComplete.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.notification.vo; - -import org.dependencytrack.model.Component; -import org.dependencytrack.model.Vulnerability; - -import java.util.List; - -public class ComponentVulnAnalysisComplete { - private final List vulnerabilityList; - private final Component component; - - public ComponentVulnAnalysisComplete(List vulnerabilityList, Component component) { - this.vulnerabilityList = vulnerabilityList; - this.component = component; - } - - public List getVulnerabilityList() { - return vulnerabilityList; - } - - public Component getComponent() { - return this.component; - } -} diff --git a/src/main/java/org/dependencytrack/notification/vo/ProjectVulnAnalysisComplete.java b/src/main/java/org/dependencytrack/notification/vo/ProjectVulnAnalysisComplete.java deleted file mode 100644 index 45829b186..000000000 --- a/src/main/java/org/dependencytrack/notification/vo/ProjectVulnAnalysisComplete.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.notification.vo; - -import org.dependencytrack.model.Project; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus; - -import java.util.List; -import java.util.UUID; - -public class ProjectVulnAnalysisComplete { - - private UUID token; - private final Project project; - private final List findingsList; - private final ProjectVulnAnalysisStatus status; - - public ProjectVulnAnalysisComplete(final UUID token, Project project, List findingsList, ProjectVulnAnalysisStatus status) { - this.token = token; - this.project = project; - this.findingsList = findingsList; - this.status = status; - } - - public UUID getToken() { - return token; - } - - public List getComponentAnalysisCompleteList() { - return findingsList; - } - - public Project getProject() { - return this.project; - } - - public ProjectVulnAnalysisStatus getStatus() { - return status; - } -} diff --git a/src/main/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverter.java b/src/main/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverter.java index 04add5042..41c6a69c2 100644 --- a/src/main/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverter.java +++ b/src/main/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverter.java @@ -30,11 +30,9 @@ import org.dependencytrack.notification.vo.AnalysisDecisionChange; import org.dependencytrack.notification.vo.BomConsumedOrProcessed; import org.dependencytrack.notification.vo.BomProcessingFailed; -import org.dependencytrack.notification.vo.ComponentVulnAnalysisComplete; import org.dependencytrack.notification.vo.NewVulnerabilityIdentified; import org.dependencytrack.notification.vo.NewVulnerableDependency; import org.dependencytrack.notification.vo.PolicyViolationIdentified; -import org.dependencytrack.notification.vo.ProjectVulnAnalysisComplete; import org.dependencytrack.notification.vo.VexConsumedOrProcessed; import org.dependencytrack.notification.vo.ViolationAnalysisDecisionChange; import org.dependencytrack.parser.common.resolver.CweResolver; @@ -42,7 +40,6 @@ import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.BomProcessingFailedSubject; import org.dependencytrack.proto.notification.v1.Component; -import org.dependencytrack.proto.notification.v1.ComponentVulnAnalysisCompleteSubject; import org.dependencytrack.proto.notification.v1.Group; import org.dependencytrack.proto.notification.v1.Level; import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject; @@ -55,7 +52,6 @@ import org.dependencytrack.proto.notification.v1.PolicyViolationAnalysisDecisionChangeSubject; import org.dependencytrack.proto.notification.v1.PolicyViolationSubject; import org.dependencytrack.proto.notification.v1.Project; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; import org.dependencytrack.proto.notification.v1.Scope; import org.dependencytrack.proto.notification.v1.VexConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Vulnerability; @@ -66,7 +62,6 @@ import java.math.BigDecimal; import java.time.ZoneOffset; import java.util.Collections; -import java.util.List; import java.util.Objects; import java.util.Optional; @@ -185,8 +180,6 @@ private static Optional convert(final Object subject) { return Optional.of(Any.pack(convert(vcop))); } else if (subject instanceof final PolicyViolationIdentified pvi) { return Optional.of(Any.pack(convert(pvi))); - } else if (subject instanceof final ProjectVulnAnalysisComplete pac) { - return Optional.of(Any.pack(convert(pac))); } else if (subject instanceof final org.dependencytrack.model.Project p) { return Optional.of(Any.pack(convert(p))); } @@ -324,26 +317,6 @@ private static Project convert(final org.dependencytrack.model.Project project) return builder.build(); } - private static ComponentVulnAnalysisCompleteSubject convert(ComponentVulnAnalysisComplete componentVulnAnalysisComplete) { - - Component component = convert(componentVulnAnalysisComplete.getComponent()); - ComponentVulnAnalysisCompleteSubject.Builder builder = ComponentVulnAnalysisCompleteSubject.newBuilder(); - builder.setComponent(component); - List vulnerabilities = componentVulnAnalysisComplete.getVulnerabilityList().stream().map(NotificationModelConverter::convert).toList(); - builder.addAllVulnerabilities(vulnerabilities); - return builder.build(); - } - - private static ProjectVulnAnalysisCompleteSubject convert(ProjectVulnAnalysisComplete notification) { - ProjectVulnAnalysisCompleteSubject.Builder builder = ProjectVulnAnalysisCompleteSubject.newBuilder(); - builder.setToken(notification.getToken().toString()); - builder.setProject(convert(notification.getProject())); - List componentAnalysisCompleteSubjects = notification.getComponentAnalysisCompleteList().stream().map(NotificationModelConverter::convert).toList(); - builder.addAllFindings(componentAnalysisCompleteSubjects); - builder.setStatus(notification.getStatus()); - return builder.build(); - } - private static Vulnerability convert(final org.dependencytrack.model.Vulnerability vulnerability) { final Vulnerability.Builder builder = Vulnerability.newBuilder() .setUuid(vulnerability.getUuid().toString()) diff --git a/src/main/java/org/dependencytrack/parser/github/graphql/GitHubSecurityAdvisoryParser.java b/src/main/java/org/dependencytrack/parser/github/graphql/GitHubSecurityAdvisoryParser.java deleted file mode 100644 index b7462a820..000000000 --- a/src/main/java/org/dependencytrack/parser/github/graphql/GitHubSecurityAdvisoryParser.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.parser.github.graphql; - - -import org.apache.commons.lang3.tuple.Pair; -import org.dependencytrack.parser.github.graphql.model.GitHubSecurityAdvisory; -import org.dependencytrack.parser.github.graphql.model.GitHubVulnerability; -import org.dependencytrack.parser.github.graphql.model.PageableList; -import org.json.JSONArray; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.List; - -import static org.dependencytrack.util.JsonUtil.jsonStringToTimestamp; - -public class GitHubSecurityAdvisoryParser { - - public PageableList parse(final JSONObject object) { - final PageableList pageableList = new PageableList(); - final List advisories = new ArrayList<>(); - final JSONObject data = object.optJSONObject("data"); - if (data != null) { - final JSONObject securityAdvisories = data.getJSONObject("securityAdvisories"); - if (securityAdvisories != null) { - final JSONArray securityAdvisoriesNodes = securityAdvisories.getJSONArray("nodes"); - if (securityAdvisoriesNodes != null) { - for (int i = 0; i < securityAdvisoriesNodes.length(); i++) { - final JSONObject securityAdvisory = securityAdvisoriesNodes.getJSONObject(i); - final GitHubSecurityAdvisory advisory = parseSecurityAdvisory(securityAdvisory); - advisories.add(advisory); - } - } - pageableList.setTotalCount(securityAdvisories.optInt("totalCount")); - final JSONObject pageInfo = securityAdvisories.getJSONObject("pageInfo"); - if (pageInfo != null) { - pageableList.setHasNextPage(pageInfo.optBoolean("hasNextPage")); - pageableList.setHasPreviousPage(pageInfo.optBoolean("hasPreviousPage")); - pageableList.setStartCursor(pageInfo.optString("startCursor")); - pageableList.setEndCursor(pageInfo.optString("endCursor")); - } - } - } - pageableList.setAdvisories(advisories); - return pageableList; - } - - private GitHubSecurityAdvisory parseSecurityAdvisory(final JSONObject object) { - final GitHubSecurityAdvisory advisory = new GitHubSecurityAdvisory(); - advisory.setDatabaseId(object.getInt("databaseId")); - advisory.setDescription(object.optString("description", null)); - advisory.setGhsaId(object.optString("ghsaId", null)); - advisory.setId(object.optString("id", null)); - advisory.setNotificationsPermalink(object.optString("notificationsPermalink", null)); - advisory.setOrigin(object.optString("origin", null)); - advisory.setPermalink(object.optString("permalink", null)); - advisory.setSeverity(object.optString("severity", null)); - advisory.setSummary(object.optString("summary", null)); - advisory.setPublishedAt(jsonStringToTimestamp(object.optString("publishedAt", null))); - advisory.setUpdatedAt(jsonStringToTimestamp(object.optString("updatedAt", null))); - advisory.setWithdrawnAt(jsonStringToTimestamp(object.optString("withdrawnAt", null))); - - final JSONArray identifiers = object.optJSONArray("identifiers"); - if (identifiers != null) { - for (int i=0; i pair = Pair.of(type, value); - advisory.addIdentifier(pair); - } - } - } - - final JSONArray references = object.optJSONArray("references"); - if (references != null) { - for (int i=0; i vulnerabilities = parseVulnerabilities(object); - advisory.setVulnerabilities(vulnerabilities); - return advisory; - } - - private List parseVulnerabilities(final JSONObject object) { - final List vulnerabilities = new ArrayList<>(); - final JSONObject vs = object.optJSONObject("vulnerabilities"); - if (vs != null) { - final JSONArray edges = vs.optJSONArray("edges"); - if (edges != null) { - for (int i=0; i> identifiers; - private String notificationsPermalink; - private String origin; - private String permalink; - private List references; - private String severity; - private String summary; - private ZonedDateTime publishedAt; - private ZonedDateTime updatedAt; - private ZonedDateTime withdrawnAt; - private List vulnerabilities; - private double cvssScore; - private String cvssVector; - private List cwes; - - public int getDatabaseId() { - return databaseId; - } - - public void setDatabaseId(int databaseId) { - this.databaseId = databaseId; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getGhsaId() { - return ghsaId; - } - - public void setGhsaId(String ghsaId) { - this.ghsaId = ghsaId; - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public List> getIdentifiers() { - return identifiers; - } - - public void addIdentifier(Pair identifier) { - if (this.identifiers == null) { - this.identifiers = new ArrayList<>(); - } - this.identifiers.add(identifier); - } - - public void setIdentifiers(List> identifiers) { - this.identifiers = identifiers; - } - - public String getNotificationsPermalink() { - return notificationsPermalink; - } - - public void setNotificationsPermalink(String notificationsPermalink) { - this.notificationsPermalink = notificationsPermalink; - } - - public String getOrigin() { - return origin; - } - - public void setOrigin(String origin) { - this.origin = origin; - } - - public String getPermalink() { - return permalink; - } - - public void setPermalink(String permalink) { - this.permalink = permalink; - } - - public List getReferences() { - return references; - } - - public void addReference(String reference) { - if (this.references == null) { - this.references = new ArrayList<>(); - } - this.references.add(reference); - } - - public void setReferences(List references) { - this.references = references; - } - - public String getSeverity() { - return severity; - } - - public void setSeverity(String severity) { - this.severity = severity; - } - - public String getSummary() { - return summary; - } - - public void setSummary(String summary) { - this.summary = summary; - } - - public ZonedDateTime getPublishedAt() { - return publishedAt; - } - - public void setPublishedAt(ZonedDateTime publishedAt) { - this.publishedAt = publishedAt; - } - - public ZonedDateTime getUpdatedAt() { - return updatedAt; - } - - public void setUpdatedAt(ZonedDateTime updatedAt) { - this.updatedAt = updatedAt; - } - - public ZonedDateTime getWithdrawnAt() { - return withdrawnAt; - } - - public void setWithdrawnAt(ZonedDateTime withdrawnAt) { - this.withdrawnAt = withdrawnAt; - } - - public List getVulnerabilities() { - return vulnerabilities; - } - - public void setVulnerabilities(List vulnerabilities) { - this.vulnerabilities = vulnerabilities; - } - - public double getCvssScore() { - return cvssScore; - } - - public void setCvssScore(double cvssScore) { - this.cvssScore = cvssScore; - } - - public String getCvssVector() { - return cvssVector; - } - - public void setCvssVector(String cvssVector) { - this.cvssVector = cvssVector; - } - - public List getCwes() { - return cwes; - } - - public void addCwe(String cwe) { - if (cwes == null) { - cwes = new ArrayList<>(); - } - cwes.add(cwe); - } - - public void setCwes(List cwes) { - this.cwes = cwes; - } -} diff --git a/src/main/java/org/dependencytrack/parser/github/graphql/model/GitHubVulnerability.java b/src/main/java/org/dependencytrack/parser/github/graphql/model/GitHubVulnerability.java deleted file mode 100644 index 4786f49a4..000000000 --- a/src/main/java/org/dependencytrack/parser/github/graphql/model/GitHubVulnerability.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.parser.github.graphql.model; - -import java.time.ZonedDateTime; - -public class GitHubVulnerability { - - private String severity; - private ZonedDateTime updatedAt; - private String firstPatchedVersionIdentifier; - private String vulnerableVersionRange; - private String packageEcosystem; - private String packageName; - - public String getSeverity() { - return severity; - } - - public void setSeverity(String severity) { - this.severity = severity; - } - - public ZonedDateTime getUpdatedAt() { - return updatedAt; - } - - public void setUpdatedAt(ZonedDateTime updatedAt) { - this.updatedAt = updatedAt; - } - - public String getFirstPatchedVersionIdentifier() { - return firstPatchedVersionIdentifier; - } - - public void setFirstPatchedVersionIdentifier(String firstPatchedVersionIdentifier) { - this.firstPatchedVersionIdentifier = firstPatchedVersionIdentifier; - } - - public String getVulnerableVersionRange() { - return vulnerableVersionRange; - } - - public void setVulnerableVersionRange(String vulnerableVersionRange) { - this.vulnerableVersionRange = vulnerableVersionRange; - } - - public String getPackageEcosystem() { - return packageEcosystem; - } - - public void setPackageEcosystem(String packageEcosystem) { - this.packageEcosystem = packageEcosystem; - } - - public String getPackageName() { - return packageName; - } - - public void setPackageName(String packageName) { - this.packageName = packageName; - } -} diff --git a/src/main/java/org/dependencytrack/parser/github/graphql/model/PageableList.java b/src/main/java/org/dependencytrack/parser/github/graphql/model/PageableList.java deleted file mode 100644 index baf24bc4a..000000000 --- a/src/main/java/org/dependencytrack/parser/github/graphql/model/PageableList.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.parser.github.graphql.model; - -import java.util.List; - -public class PageableList { - - private List advisories; - private long totalCount; - private boolean hasNextPage; - private boolean hasPreviousPage; - private String startCursor; - private String endCursor; - - public List getAdvisories() { - return advisories; - } - - public void setAdvisories(List advisories) { - this.advisories = advisories; - } - - public long getTotalCount() { - return totalCount; - } - - public void setTotalCount(long totalCount) { - this.totalCount = totalCount; - } - - public boolean isHasNextPage() { - return hasNextPage; - } - - public void setHasNextPage(boolean hasNextPage) { - this.hasNextPage = hasNextPage; - } - - public boolean isHasPreviousPage() { - return hasPreviousPage; - } - - public void setHasPreviousPage(boolean hasPreviousPage) { - this.hasPreviousPage = hasPreviousPage; - } - - public String getStartCursor() { - return startCursor; - } - - public void setStartCursor(String startCursor) { - this.startCursor = startCursor; - } - - public String getEndCursor() { - return endCursor; - } - - public void setEndCursor(String endCursor) { - this.endCursor = endCursor; - } -} diff --git a/src/main/java/org/dependencytrack/upgrade/UpgradeInitializer.java b/src/main/java/org/dependencytrack/upgrade/UpgradeInitializer.java deleted file mode 100644 index c28190ffc..000000000 --- a/src/main/java/org/dependencytrack/upgrade/UpgradeInitializer.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.upgrade; - -import alpine.Config; -import alpine.common.logging.Logger; -import alpine.common.util.VersionComparator; -import alpine.model.InstalledUpgrades; -import alpine.model.SchemaVersion; -import alpine.server.persistence.PersistenceManagerFactory; -import alpine.server.upgrade.UpgradeException; -import alpine.server.upgrade.UpgradeExecutor; -import alpine.server.upgrade.UpgradeMetaProcessor; -import org.datanucleus.PersistenceNucleusContext; -import org.datanucleus.PropertyNames; -import org.datanucleus.api.jdo.JDOPersistenceManagerFactory; -import org.datanucleus.store.schema.SchemaAwareStoreManager; -import org.dependencytrack.persistence.QueryManager; - -import javax.jdo.JDOHelper; -import javax.jdo.PersistenceManager; -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -public class UpgradeInitializer implements ServletContextListener { - - private static final Logger LOGGER = Logger.getLogger(UpgradeInitializer.class); - - /** - * {@inheritDoc} - */ - @Override - public void contextInitialized(final ServletContextEvent event) { - LOGGER.info("Initializing upgrade framework"); - try { - final UpgradeMetaProcessor ump = new UpgradeMetaProcessor(); - final VersionComparator currentVersion = ump.getSchemaVersion(); - ump.close(); - if (currentVersion != null && currentVersion.isOlderThan(new VersionComparator("4.0.0"))) { - LOGGER.error("Unable to upgrade Dependency-Track versions prior to v4.0.0. Please refer to documentation for migration details. Halting."); - Runtime.getRuntime().halt(-1); - } - } catch (UpgradeException e) { - LOGGER.error("An error occurred determining database schema version. Unable to continue.", e); - Runtime.getRuntime().halt(-1); - } - - try (final JDOPersistenceManagerFactory pmf = createPersistenceManagerFactory()) { - // Ensure that the UpgradeMetaProcessor and SchemaVersion tables are created NOW, not dynamically at runtime. - final PersistenceNucleusContext ctx = pmf.getNucleusContext(); - final Set classNames = new HashSet<>(); - classNames.add(InstalledUpgrades.class.getCanonicalName()); - classNames.add(SchemaVersion.class.getCanonicalName()); - ((SchemaAwareStoreManager) ctx.getStoreManager()).createSchemaForClasses(classNames, new Properties()); - - try (final PersistenceManager pm = pmf.getPersistenceManager(); - final QueryManager qm = new QueryManager(pm)) { - final UpgradeExecutor executor = new UpgradeExecutor(qm); - try { - executor.executeUpgrades(UpgradeItems.getUpgradeItems()); - } catch (UpgradeException e) { - LOGGER.error("An error occurred performing upgrade processing. " + e.getMessage()); - } - } - } - } - - /** - * {@inheritDoc} - */ - @Override - public void contextDestroyed(final ServletContextEvent event) { - /* Intentionally blank to satisfy interface */ - } - - /** - * Create a new, dedicated {@link javax.jdo.PersistenceManagerFactory} to be used for schema - * generation and execution of schema upgrades. - *

- * Necessary because {@link UpgradeInitializer} is executed before {@link PersistenceManagerFactory} - * on application startup. The PMF created by this method does not use connection pooling, as all - * operations are performed in serial order. - * - * @return A {@link JDOPersistenceManagerFactory} - */ - private JDOPersistenceManagerFactory createPersistenceManagerFactory() { - final var dnProps = new Properties(); - dnProps.put(PropertyNames.PROPERTY_CONNECTION_URL, Config.getInstance().getProperty(Config.AlpineKey.DATABASE_URL)); - dnProps.put(PropertyNames.PROPERTY_CONNECTION_DRIVER_NAME, Config.getInstance().getProperty(Config.AlpineKey.DATABASE_DRIVER)); - dnProps.put(PropertyNames.PROPERTY_CONNECTION_USER_NAME, Config.getInstance().getProperty(Config.AlpineKey.DATABASE_USERNAME)); - dnProps.put(PropertyNames.PROPERTY_CONNECTION_PASSWORD, Config.getInstance().getPropertyOrFile(Config.AlpineKey.DATABASE_PASSWORD)); - dnProps.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_DATABASE, "true"); - dnProps.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_TABLES, "true"); - dnProps.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_COLUMNS, "true"); - dnProps.put(PropertyNames.PROPERTY_SCHEMA_AUTOCREATE_CONSTRAINTS, "true"); - dnProps.put(PropertyNames.PROPERTY_SCHEMA_GENERATE_DATABASE_MODE, "create"); - dnProps.put(PropertyNames.PROPERTY_QUERY_JDOQL_ALLOWALL, "true"); - dnProps.put(PropertyNames.PROPERTY_PERSISTENCE_UNIT_NAME, "Alpine"); - return (JDOPersistenceManagerFactory) JDOHelper.getPersistenceManagerFactory(dnProps); - } - -} diff --git a/src/main/java/org/dependencytrack/upgrade/UpgradeItems.java b/src/main/java/org/dependencytrack/upgrade/UpgradeItems.java deleted file mode 100644 index cfdb798fb..000000000 --- a/src/main/java/org/dependencytrack/upgrade/UpgradeItems.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.upgrade; - -import alpine.server.upgrade.UpgradeItem; -import org.dependencytrack.upgrade.v510.v510Updater; -import org.dependencytrack.upgrade.v520.v520Updater; - -import java.util.ArrayList; -import java.util.List; - -class UpgradeItems { - - private static final List> UPGRADE_ITEMS = new ArrayList<>(); - - static { - UPGRADE_ITEMS.add(v510Updater.class); - UPGRADE_ITEMS.add(v520Updater.class); - } - - static List> getUpgradeItems() { - return UPGRADE_ITEMS; - } - -} diff --git a/src/main/java/org/dependencytrack/upgrade/v510/v510Updater.java b/src/main/java/org/dependencytrack/upgrade/v510/v510Updater.java deleted file mode 100644 index 3cd554866..000000000 --- a/src/main/java/org/dependencytrack/upgrade/v510/v510Updater.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.upgrade.v510; - -import alpine.common.logging.Logger; -import alpine.persistence.AlpineQueryManager; -import alpine.server.upgrade.AbstractUpgradeItem; - -import java.sql.Connection; -import java.sql.PreparedStatement; - -public class v510Updater extends AbstractUpgradeItem { - - private static final Logger LOGGER = Logger.getLogger(v510Updater.class); - - @Override - public String getSchemaVersion() { - return "5.1.0"; - } - - @Override - public void executeUpgrade(final AlpineQueryManager qm, final Connection connection) throws Exception { - changePolicyConditionValueTypeToText(connection); - } - - private static void changePolicyConditionValueTypeToText(final Connection connection) throws Exception { - LOGGER.info("Changing type of \"POLICYCONDITION\".\"VALUE\" from VARCHAR(255) to TEXT"); - try (final PreparedStatement ps = connection.prepareStatement(""" - ALTER TABLE "POLICYCONDITION" ALTER COLUMN "VALUE" TYPE TEXT; - """)) { - ps.execute(); - } - } - -} diff --git a/src/main/java/org/dependencytrack/upgrade/v520/v520Updater.java b/src/main/java/org/dependencytrack/upgrade/v520/v520Updater.java deleted file mode 100644 index 3d2b97f2d..000000000 --- a/src/main/java/org/dependencytrack/upgrade/v520/v520Updater.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.upgrade.v520; - -import alpine.common.logging.Logger; -import alpine.persistence.AlpineQueryManager; -import alpine.server.upgrade.AbstractUpgradeItem; - -import java.sql.Connection; -import java.sql.PreparedStatement; - -public class v520Updater extends AbstractUpgradeItem { - - private static final Logger LOGGER = Logger.getLogger(v520Updater.class); - - @Override - public String getSchemaVersion() { - return "5.2.0"; - } - - @Override - public void executeUpgrade(final AlpineQueryManager qm, final Connection connection) throws Exception { - changePurlColumnLengthInIntegrityMetaComponentTable(connection); - dropStatusCheckConstraintOnIntegrityMetaComponentTable(connection); - } - - private static void changePurlColumnLengthInIntegrityMetaComponentTable(final Connection connection) throws Exception { - LOGGER.info("Changing length of \"PURL\" from VARCHAR(255) to VARCHAR(1024)"); - try (final PreparedStatement ps = connection.prepareStatement(""" - ALTER TABLE "INTEGRITY_META_COMPONENT" ALTER "PURL" TYPE VARCHAR(1024); - """)) { - ps.execute(); - } - } - - private static void dropStatusCheckConstraintOnIntegrityMetaComponentTable(final Connection connection) throws Exception { - LOGGER.info("Dropping constraint \"INTEGRITY_META_COMPONENT_STATUS_check\" if it exists on \"INTEGRITY_META_COMPONENT\" table"); - try (final PreparedStatement ps = connection.prepareStatement(""" - ALTER TABLE "INTEGRITY_META_COMPONENT" DROP CONSTRAINT IF EXISTS "INTEGRITY_META_COMPONENT_STATUS_check" RESTRICT; - """)) { - ps.execute(); - } - } -} diff --git a/src/main/java/org/dependencytrack/util/ComponentIdentificationUtil.java b/src/main/java/org/dependencytrack/util/ComponentIdentificationUtil.java deleted file mode 100644 index aec952e12..000000000 --- a/src/main/java/org/dependencytrack/util/ComponentIdentificationUtil.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.util; - -import com.github.packageurl.MalformedPackageURLException; -import com.github.packageurl.PackageURL; -import org.apache.commons.lang3.StringUtils; -import org.dependencytrack.model.Component; - -/** - * A collection of utilities that compare the identity of a component. - * - * @since 4.0.0 - */ -public class ComponentIdentificationUtil { - - private ComponentIdentificationUtil() { - } - - @SuppressWarnings("deprecation") - public static boolean doesIdentityMatch(final Component a, final org.cyclonedx.model.Component b) { - if (a == null || b == null) { - return false; - } - if (isMatch(a.getPurl(), b.getPurl())) { - return true; - } - if (isMatch(a.getPurlCoordinates(), b.getPurl())) { - return true; - } - if (b.getSwid() != null && isMatch(a.getSwidTagId(), b.getSwid().getTagId())) { - return true; - } - if (isMatch(a.getCpe(), b.getCpe())) { - return true; - } - if (StringUtils.trimToEmpty(a.getGroup()).equals(StringUtils.trimToEmpty(b.getGroup())) - && StringUtils.trimToEmpty(a.getName()).equals(StringUtils.trimToEmpty(b.getName())) - && StringUtils.trimToEmpty(a.getVersion()).equals(StringUtils.trimToEmpty(b.getVersion()))) { - return true; - } - return false; - } - - public static boolean doesIdentityMatch(final Component a, final Component b) { - if (a == null || b == null) { - return false; - } - if (isMatch(a.getPurl(), b.getPurl())) { - return true; - } - if (isMatch(a.getPurlCoordinates(), b.getPurlCoordinates())) { - return true; - } - if (isMatch(a.getSwidTagId(), b.getSwidTagId())) { - return true; - } - if (isMatch(a.getCpe(), b.getCpe())) { - return true; - } - if (StringUtils.trimToEmpty(a.getGroup()).equals(StringUtils.trimToEmpty(b.getGroup())) - && StringUtils.trimToEmpty(a.getName()).equals(StringUtils.trimToEmpty(b.getName())) - && StringUtils.trimToEmpty(a.getVersion()).equals(StringUtils.trimToEmpty(b.getVersion()))) { - return true; - } - return false; - } - - private static boolean isMatch(final PackageURL a, final PackageURL b) { - if (a != null && b != null) { - return a.canonicalize().equals(b.canonicalize()); - } - return false; - } - - private static boolean isMatch(final PackageURL a, final String b) { - if (a != null && b != null) { - try { - return a.canonicalize().equals(new PackageURL(b).canonicalize()); - } catch (MalformedPackageURLException e) { - return false; - } - } - return false; - } - - private static boolean isMatch(final String a, final String b) { - if (StringUtils.trimToNull(a) != null && StringUtils.trimToNull(b) != null) { - return StringUtils.trimToNull(a).equals(StringUtils.trimToNull(b)); - } - return false; - } -} diff --git a/src/main/java/org/dependencytrack/util/ComponentVersion.java b/src/main/java/org/dependencytrack/util/ComponentVersion.java deleted file mode 100644 index 725e51200..000000000 --- a/src/main/java/org/dependencytrack/util/ComponentVersion.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.util; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.builder.HashCodeBuilder; - -import javax.annotation.concurrent.NotThreadSafe; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - *

- * Simple object to track the parts of a version number. The parts are contained - * in a List such that version 1.2.3 will be stored as: versionParts[0] = 1; - * versionParts[1] = 2; - * versionParts[2] = 3; - *

- *

- * Note, the parser contained in this class expects the version numbers to be - * separated by periods. If a different separator is used the parser will likely - * fail.

- * - * @author Jeremy Long - * - * Ported from DependencyVersion in Dependency-Check v5.2.1 - */ -@NotThreadSafe -public class ComponentVersion implements Iterable, Comparable { - - /** - * A list of the version parts. - */ - private List versionParts; - - /** - * Constructor for a empty DependencyVersion. - */ - public ComponentVersion() { - } - - /** - * Constructor for a DependencyVersion that will parse a version string. - * Note, this should only be used when the version passed in is - * already known to be a well formatted version number. Otherwise, - * DependencyVersionUtil.parseVersion() should be used instead. - * - * @param version the well formatted version number to parse - */ - public ComponentVersion(String version) { - parseVersion(version); - } - - /** - * Parses a version string into its sub parts: major, minor, revision, - * build, etc. Note, this should only be used to parse something that - * is already known to be a version number. - * - * @param version the version string to parse - */ - public final void parseVersion(String version) { - versionParts = new ArrayList<>(); - if (version != null) { - // https://github.com/DependencyTrack/dependency-track/issues/1374 - // handle deb versions - String lcVersion = version.toLowerCase(); - final Pattern debrx = Pattern.compile("^([0-9]+:)?(.*)(-[^-]+ubuntu[^-]+)$"); - final Matcher debmatcher = debrx.matcher(lcVersion); - if (debmatcher.matches()) { - lcVersion = debmatcher.group(2); - } - - final Pattern rx = Pattern.compile("(\\d+[a-z]{1,3}$|[a-z]{1,3}[_-]?\\d+|\\d+|(rc|release|snapshot|beta|alpha)$)", - Pattern.CASE_INSENSITIVE); - final Matcher matcher = rx.matcher(lcVersion); - while (matcher.find()) { - versionParts.add(matcher.group()); - } - if (versionParts.isEmpty()) { - versionParts.add(version); - } - } - } - - /** - * Get the value of versionParts. - * - * @return the value of versionParts - */ - public List getVersionParts() { - return versionParts; - } - - /** - * Set the value of versionParts. - * - * @param versionParts new value of versionParts - */ - public void setVersionParts(List versionParts) { - this.versionParts = versionParts; - } - - /** - * Retrieves an iterator for the version parts. - * - * @return an iterator for the version parts - */ - @Override - public Iterator iterator() { - return versionParts.iterator(); - } - - /** - * Reconstructs the version string from the split version parts. - * - * @return a string representing the version. - */ - @Override - public String toString() { - return StringUtils.join(versionParts, '.'); - } - - /** - * Compares the equality of this object to the one passed in as a parameter. - * - * @param obj the object to compare equality - * @return returns true only if the two objects are equal, otherwise false - */ - @Override - public boolean equals(Object obj) { - if (obj == null || !(obj instanceof ComponentVersion)) { - return false; - } - if (this == obj) { - return true; - } - final ComponentVersion other = (ComponentVersion) obj; - final int minVersionMatchLength = (this.versionParts.size() < other.versionParts.size()) - ? this.versionParts.size() : other.versionParts.size(); - final int maxVersionMatchLength = (this.versionParts.size() > other.versionParts.size()) - ? this.versionParts.size() : other.versionParts.size(); - - if (minVersionMatchLength == 1 && maxVersionMatchLength >= 3) { - return false; - } - - //TODO steal better version of code from compareTo - for (int i = 0; i < minVersionMatchLength; i++) { - final String thisPart = this.versionParts.get(i); - final String otherPart = other.versionParts.get(i); - if (!thisPart.equals(otherPart)) { - return false; - } - } - if (this.versionParts.size() > minVersionMatchLength) { - for (int i = minVersionMatchLength; i < this.versionParts.size(); i++) { - if (!"0".equals(this.versionParts.get(i))) { - return false; - } - } - } - - if (other.versionParts.size() > minVersionMatchLength) { - for (int i = minVersionMatchLength; i < other.versionParts.size(); i++) { - if (!"0".equals(other.versionParts.get(i))) { - return false; - } - } - } - - /* - * if (this.versionParts != other.versionParts && (this.versionParts == null || !this.versionParts.equals(other.versionParts))) { - * return false; - * } - */ - return true; - } - - /** - * Calculates the hashCode for this object. - * - * @return the hashCode - */ - @Override - public int hashCode() { - return new HashCodeBuilder(5, 71) - .append(versionParts) - .toHashCode(); - } - - /** - * Determines if the three most major major version parts are identical. For - * instances, if version 1.2.3.4 was compared to 1.2.3 this function would - * return true. - * - * @param version the version number to compare - * @return true if the first three major parts of the version are identical - */ - public boolean matchesAtLeastThreeLevels(ComponentVersion version) { - if (version == null) { - return false; - } - if (Math.abs(this.versionParts.size() - version.versionParts.size()) >= 3) { - return false; - } - - final int max = (this.versionParts.size() < version.versionParts.size()) - ? this.versionParts.size() : version.versionParts.size(); - - boolean ret = true; - for (int i = 0; i < max; i++) { - final String thisVersion = this.versionParts.get(i); - final String otherVersion = version.getVersionParts().get(i); - if (i >= 3) { - if (thisVersion.compareToIgnoreCase(otherVersion) >= 0) { - ret = false; - break; - } - } else if (!thisVersion.equals(otherVersion)) { - ret = false; - break; - } - } - - return ret; - } - - @Override - public int compareTo(ComponentVersion version) { - if (version == null) { - return 1; - } - final List left = this.getVersionParts(); - final List right = version.getVersionParts(); - final int max = left.size() < right.size() ? left.size() : right.size(); - - for (int i = 0; i < max; i++) { - final String lStr = left.get(i); - final String rStr = right.get(i); - if (lStr.equals(rStr)) { - continue; - } - try { - final int l = Integer.parseInt(lStr); - final int r = Integer.parseInt(rStr); - if (l < r) { - return -1; - } else if (l > r) { - return 1; - } - } catch (NumberFormatException ex) { - final int comp = left.get(i).compareTo(right.get(i)); - if (comp < 0) { - return -1; - } else if (comp > 0) { - return 1; - } - } - } - // Modified from original by Steve Springett - // Account for comparisons where one version may be 1.0.0 and another may be 1.0.0.0. - if (left.size() == max && right.size() == left.size()+1 && right.get(right.size()-1).equals("0")) { - return 0; - } else if (right.size() == max && left.size() == right.size()+1 && left.get(left.size()-1).equals("0")) { - return 0; - } else { - return Integer.compare(left.size(), right.size()); - } - } -} diff --git a/src/main/java/org/dependencytrack/util/JsonUtil.java b/src/main/java/org/dependencytrack/util/JsonUtil.java deleted file mode 100644 index 2fbbd0649..000000000 --- a/src/main/java/org/dependencytrack/util/JsonUtil.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * This file is part of Dependency-Track. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * Copyright (c) OWASP Foundation. All Rights Reserved. - */ -package org.dependencytrack.util; - -import javax.json.JsonObjectBuilder; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.time.ZonedDateTime; -import java.time.format.DateTimeParseException; - -public final class JsonUtil { - - /** - * Private constructor. - */ - private JsonUtil() { } - - public static JsonObjectBuilder add(final JsonObjectBuilder builder, final String key, final String value) { - if (value != null) { - builder.add(key, value); - } - return builder; - } - - public static JsonObjectBuilder add(final JsonObjectBuilder builder, final String key, final BigInteger value) { - if (value != null) { - builder.add(key, value); - } - return builder; - } - - public static JsonObjectBuilder add(final JsonObjectBuilder builder, final String key, final BigDecimal value) { - if (value != null) { - builder.add(key, value); - } - return builder; - } - - public static JsonObjectBuilder add(final JsonObjectBuilder builder, final String key, final Enum value) { - if (value != null) { - builder.add(key, value.name()); - } - return builder; - } - - public static ZonedDateTime jsonStringToTimestamp(final String s) { - if (s == null) { - return null; - } - try { - return ZonedDateTime.parse(s); - } catch (DateTimeParseException e) { - return null; - } - } - -} diff --git a/src/main/java/org/dependencytrack/util/NotificationUtil.java b/src/main/java/org/dependencytrack/util/NotificationUtil.java index 1fd511d6b..629825157 100644 --- a/src/main/java/org/dependencytrack/util/NotificationUtil.java +++ b/src/main/java/org/dependencytrack/util/NotificationUtil.java @@ -28,7 +28,6 @@ import org.dependencytrack.model.AnalysisState; import org.dependencytrack.model.Component; import org.dependencytrack.model.ConfigPropertyConstants; -import org.dependencytrack.model.Finding; import org.dependencytrack.model.NotificationPublisher; import org.dependencytrack.model.Policy; import org.dependencytrack.model.PolicyCondition; @@ -37,20 +36,14 @@ import org.dependencytrack.model.Tag; import org.dependencytrack.model.ViolationAnalysis; import org.dependencytrack.model.ViolationAnalysisState; -import org.dependencytrack.model.Vulnerability; -import org.dependencytrack.model.VulnerabilityAlias; -import org.dependencytrack.model.VulnerabilityScan; import org.dependencytrack.notification.NotificationConstants; import org.dependencytrack.notification.NotificationGroup; import org.dependencytrack.notification.NotificationScope; import org.dependencytrack.notification.publisher.DefaultNotificationPublishers; import org.dependencytrack.notification.vo.AnalysisDecisionChange; -import org.dependencytrack.notification.vo.ComponentVulnAnalysisComplete; import org.dependencytrack.notification.vo.PolicyViolationIdentified; -import org.dependencytrack.notification.vo.ProjectVulnAnalysisComplete; import org.dependencytrack.notification.vo.ViolationAnalysisDecisionChange; import org.dependencytrack.persistence.QueryManager; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus; import javax.jdo.FetchPlan; import javax.jdo.Query; @@ -58,17 +51,12 @@ import java.io.IOException; import java.net.URLDecoder; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; import static java.nio.charset.StandardCharsets.UTF_8; @@ -419,93 +407,6 @@ public static String generateNotificationTitle(final String messageType, final o return messageType + " on Project: [" + projectStr + "]"; } - public static Notification createProjectVulnerabilityAnalysisCompleteNotification(VulnerabilityScan vulnScan, UUID token, ProjectVulnAnalysisStatus status) { - // TODO: Convert data loading to raw SQL to avoid loading unneeded data and excessive queries. - // See #analyzeNotificationCriteria(QueryManager, PolicyViolation) for an example. - try (QueryManager qm = new QueryManager()) { - Project project = qm.getObjectByUuid(Project.class, vulnScan.getTargetIdentifier()); - if (project == null) { - // This can happen when the project was deleted before completion of the vuln scan is detected. - throw new NoSuchElementException("Project with UUID %s does not exist".formatted(vulnScan.getTargetIdentifier())); - } - - List findings = qm.getFindings(project); - List componentList = new ArrayList<>(); - ConcurrentHashMap> map = new ConcurrentHashMap<>(); - for (Finding finding : findings) { - final var componentUuid = (String) finding.getComponent().get("uuid"); - Component component = qm.getObjectByUuid(Component.class, componentUuid); - if (component == null) { - // This can happen when the project was deleted while this method is executing. - throw new NoSuchElementException("Component with UUID %s does not exist in project %s" - .formatted(componentUuid, project.getUuid())); - } - final var vulnerabilityUuid = (String) finding.getVulnerability().get("uuid"); - Vulnerability vulnerability = qm.getObjectByUuid(Vulnerability.class, vulnerabilityUuid); - if (vulnerability == null) { - // Unlikely to happen, but when in doubt it's still better to raise this exception - // instead of running into a generic NPE. - throw new NoSuchElementException("Vulnerability with UUID %s does not exist".formatted(vulnerabilityUuid)); - } - final List aliases = qm.detach(qm.getVulnerabilityAliases(vulnerability)); - vulnerability.setAliases(aliases); - if (map.containsKey(component.getUuid().toString())) { - List temp1 = new ArrayList<>(); - temp1.add(vulnerability); - temp1.addAll(map.get(component.getUuid().toString())); - map.remove(component.getUuid().toString()); - map.put(component.getUuid().toString(), temp1); - } else { - //component should be added to list only if not present in map - componentList.add(component); - map.put(component.getUuid().toString(), List.of(vulnerability)); - } - } - - - List componentAnalysisCompleteList = createList(componentList, map); - return new Notification() - .scope(NotificationScope.PORTFOLIO) - .group(NotificationGroup.PROJECT_VULN_ANALYSIS_COMPLETE) - .level(NotificationLevel.INFORMATIONAL) - .title(NotificationConstants.Title.PROJECT_VULN_ANALYSIS_COMPLETE) - .content("project analysis complete for project " + project.getName() + " with id: " + project.getUuid() + " and with version: " + project.getVersion() + ". Vulnerability details added to subject ") - .subject(new ProjectVulnAnalysisComplete(token, project, componentAnalysisCompleteList, status)); - } - } - - public static List createList(List componentList, Map> map) { - List componentAnalysisCompleteList = new ArrayList<>(); - for (Component component : componentList) { - List vulnerabilities = map.get(component.getUuid().toString()); - List result = new ArrayList<>(); - for (Vulnerability vulnerability : vulnerabilities) { - Vulnerability vulnerability1 = new Vulnerability(); - vulnerability1.setId(vulnerability.getId()); - vulnerability1.setVulnId(vulnerability.getVulnId()); - vulnerability1.setSource(vulnerability.getSource()); - vulnerability1.setTitle(vulnerability.getTitle()); - vulnerability1.setSubTitle(vulnerability.getSubTitle()); - vulnerability1.setRecommendation(vulnerability.getRecommendation()); - vulnerability1.setSeverity(vulnerability.getSeverity()); - vulnerability1.setCvssV2BaseScore(vulnerability.getCvssV2BaseScore()); - vulnerability1.setCvssV3BaseScore(vulnerability.getCvssV3BaseScore()); - vulnerability1.setOwaspRRLikelihoodScore(vulnerability.getOwaspRRLikelihoodScore()); - vulnerability1.setOwaspRRTechnicalImpactScore(vulnerability.getOwaspRRTechnicalImpactScore()); - vulnerability1.setOwaspRRBusinessImpactScore(vulnerability.getOwaspRRBusinessImpactScore()); - vulnerability1.setCwes(vulnerability.getCwes()); - vulnerability1.setUuid(vulnerability.getUuid()); - vulnerability1.setVulnerableSoftware(vulnerability.getVulnerableSoftware()); - if (vulnerability.getAliases() != null && !vulnerability.getAliases().isEmpty()) { - vulnerability1.setAliases(vulnerability.getAliases()); - } - result.add(vulnerability1); - } - componentAnalysisCompleteList.add(new ComponentVulnAnalysisComplete(result, component)); - } - return componentAnalysisCompleteList; - } - public static class PolicyViolationNotificationProjection { public String projectUuid; public String projectName; diff --git a/src/test/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverterTest.java b/src/test/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverterTest.java index 042bb500e..81dad9341 100644 --- a/src/test/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverterTest.java +++ b/src/test/java/org/dependencytrack/parser/dependencytrack/NotificationModelConverterTest.java @@ -33,11 +33,9 @@ import org.dependencytrack.notification.vo.AnalysisDecisionChange; import org.dependencytrack.notification.vo.BomConsumedOrProcessed; import org.dependencytrack.notification.vo.BomProcessingFailed; -import org.dependencytrack.notification.vo.ComponentVulnAnalysisComplete; import org.dependencytrack.notification.vo.NewVulnerabilityIdentified; import org.dependencytrack.notification.vo.NewVulnerableDependency; import org.dependencytrack.notification.vo.PolicyViolationIdentified; -import org.dependencytrack.notification.vo.ProjectVulnAnalysisComplete; import org.dependencytrack.notification.vo.VexConsumedOrProcessed; import org.dependencytrack.notification.vo.ViolationAnalysisDecisionChange; import org.dependencytrack.persistence.CweImporter; @@ -54,8 +52,6 @@ import org.dependencytrack.proto.notification.v1.PolicyViolationAnalysisDecisionChangeSubject; import org.dependencytrack.proto.notification.v1.PolicyViolationSubject; import org.dependencytrack.proto.notification.v1.Project; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject; -import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisStatus; import org.dependencytrack.proto.notification.v1.VexConsumedOrProcessedSubject; import org.dependencytrack.proto.notification.v1.Vulnerability; import org.dependencytrack.proto.notification.v1.VulnerabilityAnalysis; @@ -83,7 +79,6 @@ import static org.dependencytrack.proto.notification.v1.Group.GROUP_POLICY_VIOLATION; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_AUDIT_CHANGE; import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_CREATED; -import static org.dependencytrack.proto.notification.v1.Group.GROUP_PROJECT_VULN_ANALYSIS_COMPLETE; import static org.dependencytrack.proto.notification.v1.Group.GROUP_REPOSITORY; import static org.dependencytrack.proto.notification.v1.Group.GROUP_VEX_CONSUMED; import static org.dependencytrack.proto.notification.v1.Group.GROUP_VEX_PROCESSED; @@ -738,36 +733,4 @@ private void assertPolicyViolation(final PolicyViolation policyViolation) { assertThat(policyViolation.getTimestamp().getSeconds()).isEqualTo(1679326314); } - @Test - public void testConvertComponentVulnAnalysisCompleteSubject() throws Exception { - final var token = UUID.randomUUID(); - final org.dependencytrack.model.Project project = createProject(); - final org.dependencytrack.model.Component component = createComponent(project); - final org.dependencytrack.model.Vulnerability vulnerability = createVulnerability(); - ComponentVulnAnalysisComplete componentVulnAnalysisComplete = new ComponentVulnAnalysisComplete(List.of(vulnerability), component); - final var alpineNotification = new alpine.notification.Notification(); - alpineNotification.setScope(NotificationScope.PORTFOLIO.name()); - alpineNotification.setLevel(NotificationLevel.INFORMATIONAL); - alpineNotification.setGroup(NotificationGroup.PROJECT_VULN_ANALYSIS_COMPLETE.name()); - alpineNotification.setTitle("Foo"); - alpineNotification.setContent("Bar"); - alpineNotification.setSubject(new ProjectVulnAnalysisComplete(token, project, List.of(componentVulnAnalysisComplete), ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED)); - - final Notification notification = NotificationModelConverter.convert(alpineNotification); - assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO); - assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL); - assertThat(notification.getGroup()).isEqualTo(GROUP_PROJECT_VULN_ANALYSIS_COMPLETE); - assertThat(notification.getTitle()).isEqualTo("Foo"); - assertThat(notification.getContent()).isEqualTo("Bar"); - assertThat(notification.getTimestamp().getSeconds()).isNotZero(); - assertThat(notification.hasSubject()).isTrue(); - assertThat(notification.getSubject().is(ProjectVulnAnalysisCompleteSubject.class)).isTrue(); - - final var subject = notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class); - assertProject(subject.getProject()); - assertThat(subject.getToken()).isEqualTo(token.toString()); - assertComponent(subject.getFindingsList().get(0).getComponent()); - assertVulnerability(subject.getFindingsList().get(0).getVulnerabilities(0)); - assertThat(subject.getStatus()).isEqualTo(ProjectVulnAnalysisStatus.PROJECT_VULN_ANALYSIS_STATUS_COMPLETED); - } } \ No newline at end of file From a8065df5bb9d908a2a71c6898fa28b469fb8eda3 Mon Sep 17 00:00:00 2001 From: nscuro Date: Tue, 2 Apr 2024 12:52:39 +0200 Subject: [PATCH 17/24] Fix duplicate metrics update events being dispatched for vuln scans of components Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 3 +- ...dVulnerabilityScanResultProcessorTest.java | 65 +++++++++++++++++++ 2 files changed, 67 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index e046ed198..d4b920948 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -26,6 +26,7 @@ import com.google.protobuf.util.Timestamps; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.dependencytrack.event.ComponentMetricsUpdateEvent; +import org.dependencytrack.event.ComponentPolicyEvaluationEvent; import org.dependencytrack.event.ProjectMetricsUpdateEvent; import org.dependencytrack.event.ProjectPolicyEvaluationEvent; import org.dependencytrack.event.kafka.KafkaEvent; @@ -121,7 +122,7 @@ public void process(final List> records) thro case COMPONENT -> { LOGGER.debug("Triggering policy evaluation for component %s".formatted(completedVulnScan.getTargetIdentifier())); metricsUpdateEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); - policyEvalEvent = new ComponentMetricsUpdateEvent(completedVulnScan.getTargetIdentifier()); + policyEvalEvent = new ComponentPolicyEvaluationEvent(completedVulnScan.getTargetIdentifier()); } case PROJECT -> { LOGGER.debug("Triggering policy evaluation for project %s".formatted(completedVulnScan.getTargetIdentifier())); diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java index b9380f1af..1bd02864a 100644 --- a/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java +++ b/src/test/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessorTest.java @@ -26,8 +26,10 @@ import org.dependencytrack.event.ProjectMetricsUpdateEvent; import org.dependencytrack.event.ProjectPolicyEvaluationEvent; import org.dependencytrack.event.kafka.KafkaTopics; +import org.dependencytrack.model.Component; import org.dependencytrack.model.Project; import org.dependencytrack.model.VulnerabilityScan; +import org.dependencytrack.model.WorkflowState; import org.dependencytrack.model.WorkflowStatus; import org.dependencytrack.model.WorkflowStep; import org.dependencytrack.persistence.jdbi.WorkflowDao; @@ -398,6 +400,69 @@ public void testProcessWithDelayedBomProcessedNotificationWithoutCompletedBomPro assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE.name())); } + @Test + public void testProcessWithDelayedBomProcessedNotificationWhenVulnerabilityScanTargetIsComponent() throws Exception { + final var project = new Project(); + project.setName("acme-app"); + qm.persist(project); + + final var component = new Component(); + component.setProject(project); + component.setName("acme-lib"); + component.setVersion("1.0.0"); + qm.persist(component); + + final UUID workflowToken = UUID.randomUUID(); + final var workflowState = new WorkflowState(); + workflowState.setToken(workflowToken); + workflowState.setStep(WorkflowStep.VULN_ANALYSIS); + workflowState.setStatus(WorkflowStatus.PENDING); + workflowState.setStartedAt(new Date()); + workflowState.setUpdatedAt(workflowState.getStartedAt()); + qm.persist(workflowState); + + final var vulnScan = new VulnerabilityScan(); + vulnScan.setToken(workflowToken.toString()); + vulnScan.setTargetType(VulnerabilityScan.TargetType.COMPONENT); + vulnScan.setTargetIdentifier(component.getUuid()); + vulnScan.setStatus(VulnerabilityScan.Status.IN_PROGRESS); + vulnScan.setExpectedResults(1); + vulnScan.setFailureThreshold(0.1); + vulnScan.setStartedAt(new Date()); + vulnScan.setUpdatedAt(vulnScan.getStartedAt()); + qm.persist(vulnScan); + + final var scanResult = ScanResult.newBuilder() + .addScannerResults(ScannerResult.newBuilder() + .setScanner(SCANNER_INTERNAL) + .setStatus(SCAN_STATUS_SUCCESSFUL)) + .build(); + + final var processor = new ProcessedVulnerabilityScanResultProcessor(/* shouldDispatchBomProcessedNotification */ true); + processor.process(List.of(aConsumerRecord(vulnScan.getToken(), scanResult).build())); + + qm.getPersistenceManager().refreshAll(vulnScan, workflowState); + assertThat(vulnScan.getStatus()).isEqualTo(VulnerabilityScan.Status.COMPLETED); + assertThat(workflowState.getStatus()).isEqualTo(WorkflowStatus.COMPLETED); + + assertThat(kafkaMockProducer.history()).isEmpty(); + + assertThat(EVENTS).satisfiesExactly( + event -> { + assertThat(event).isInstanceOf(ComponentPolicyEvaluationEvent.class); + final var policyEvalEvent = (ComponentPolicyEvaluationEvent) event; + assertThat(policyEvalEvent.getUuid()).isEqualTo(component.getUuid()); + assertThat(policyEvalEvent.getChainIdentifier()).isEqualTo(workflowToken); + }, + event -> { + assertThat(event).isInstanceOf(ComponentMetricsUpdateEvent.class); + final var metricsUpdateEvent = (ComponentMetricsUpdateEvent) event; + assertThat(metricsUpdateEvent.getUuid()).isEqualTo(component.getUuid()); + assertThat(metricsUpdateEvent.getChainIdentifier()).isEqualTo(workflowToken); + } + ); + } + private static final ConcurrentLinkedQueue EVENTS = new ConcurrentLinkedQueue<>(); public static class EventSubscriber implements Subscriber { From bf6522f00692195eaf85272294b4d90753db3ae2 Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 3 Apr 2024 11:32:50 +0200 Subject: [PATCH 18/24] Add property to control shutdown timeout for processors As a replacement for `KAFKA_STREAMS_DRAIN_TIMEOUT_DURATION`. Signed-off-by: nscuro --- .../event/kafka/processor/api/ProcessorManager.java | 7 +++++++ .../event/kafka/processor/api/ProcessorProperties.java | 2 ++ src/main/resources/application.properties | 5 +++++ 3 files changed, 14 insertions(+) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java index f4c65152b..b25058cae 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java @@ -78,6 +78,8 @@ import static org.dependencytrack.event.kafka.processor.api.ProcessorProperties.PROPERTY_RETRY_MULTIPLIER_DEFAULT; import static org.dependencytrack.event.kafka.processor.api.ProcessorProperties.PROPERTY_RETRY_RANDOMIZATION_FACTOR; import static org.dependencytrack.event.kafka.processor.api.ProcessorProperties.PROPERTY_RETRY_RANDOMIZATION_FACTOR_DEFAULT; +import static org.dependencytrack.event.kafka.processor.api.ProcessorProperties.PROPERTY_SHUTDOWN_TIMEOUT_MS; +import static org.dependencytrack.event.kafka.processor.api.ProcessorProperties.PROPERTY_SHUTDOWN_TIMEOUT_MS_DEFAULT; public class ProcessorManager implements AutoCloseable { @@ -289,6 +291,11 @@ private ParallelStreamProcessor createParallelConsumer(final Str return Duration.ofMillis(delayMillis); }); + final long shutdownTimeoutMs = Optional.ofNullable(properties.get(PROPERTY_SHUTDOWN_TIMEOUT_MS)) + .map(Long::parseLong) + .orElse(PROPERTY_SHUTDOWN_TIMEOUT_MS_DEFAULT); + optionsBuilder.shutdownTimeout(Duration.ofMillis(shutdownTimeoutMs)); + if (Config.getInstance().getPropertyAsBoolean(Config.AlpineKey.METRICS_ENABLED)) { optionsBuilder .meterRegistry(Metrics.getRegistry()) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorProperties.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorProperties.java index 815f26d91..62c504012 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorProperties.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorProperties.java @@ -36,6 +36,8 @@ final class ProcessorProperties { static final double PROPERTY_RETRY_RANDOMIZATION_FACTOR_DEFAULT = 0.3; static final String PROPERTY_RETRY_MAX_DELAY_MS = "retry.max.delay.ms"; static final long PROPERTY_RETRY_MAX_DELAY_MS_DEFAULT = 60 * 1000; // 60s + static final String PROPERTY_SHUTDOWN_TIMEOUT_MS = "shutdown.timeout.ms"; + static final long PROPERTY_SHUTDOWN_TIMEOUT_MS_DEFAULT = 10 * 1000; // 10s private ProcessorProperties() { } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a1ddddcb3..a75eb1ea5 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -453,6 +453,11 @@ application.id=dtrack-apiserver # alpine.kafka.processor..retry.randomization.factor=0.3 # alpine.kafka.processor..retry.max.delay.ms=60000 +# Optional +# Defines the timeout to wait for the processor to finish any pending work +# prior to being shut down. +# alpine.kafka.processor..shutdown.timeout.ms=10000 + # Optional # Allows for customization of the underlying Kafka consumer. # Refer to https://kafka.apache.org/documentation/#consumerconfigs for available options. From 5e1c9826bbf3a9a8ceb0116133c2ce896a58fa26 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 11 Apr 2024 14:33:08 +0200 Subject: [PATCH 19/24] Do not block when dispatching events in `VulnerabilityScanResultProcessor` Signed-off-by: nscuro --- .../kafka/processor/VulnerabilityScanResultProcessor.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java index 8ad802e97..d029a6c64 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java @@ -26,7 +26,6 @@ import com.google.protobuf.Timestamp; import com.google.protobuf.util.Timestamps; import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent; import org.dependencytrack.event.kafka.KafkaEvent; import org.dependencytrack.event.kafka.KafkaEventConverter; @@ -86,7 +85,6 @@ import java.util.ServiceLoader; import java.util.Set; import java.util.UUID; -import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; @@ -149,8 +147,9 @@ public void process(final ConsumerRecord record) { processInternal(scanKey, scanResult, analysisLevel, isNewComponent); - final List> dispatchedEvents = eventDispatcher.dispatchAll(eventsToDispatch.get()); - CompletableFuture.allOf(dispatchedEvents.toArray(new CompletableFuture[0])).join(); + // NB: Dispatching asynchronously here as blocking comes with a latency penalty + // that is too high, given the frequency at which records are processed. + eventDispatcher.dispatchAll(eventsToDispatch.get()); } finally { eventsToDispatch.get().clear(); } From 856c0ec670117aaf635125ae5a0af14c87e6cd54 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 11 Apr 2024 15:43:05 +0200 Subject: [PATCH 20/24] Harmonize processor names Signed-off-by: nscuro --- ...essedVulnerabilityScanResultProcessor.java | 2 +- .../VulnerabilityScanResultProcessor.java | 2 +- src/main/resources/application.properties | 38 +++++++++---------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java index d4b920948..21e04f7c3 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java @@ -75,7 +75,7 @@ */ public class ProcessedVulnerabilityScanResultProcessor implements BatchProcessor { - static final String PROCESSOR_NAME = "processed.vuln.scan.result"; + static final String PROCESSOR_NAME = "vuln.scan.result.processed"; private static final Logger LOGGER = Logger.getLogger(ProcessedVulnerabilityScanResultProcessor.class); diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java index d029a6c64..435a6e6d2 100644 --- a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java +++ b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java @@ -116,7 +116,7 @@ */ public class VulnerabilityScanResultProcessor implements Processor { - static String PROCESSOR_NAME = "vuln.analysis.result"; + static String PROCESSOR_NAME = "vuln.scan.result"; private static final Logger LOGGER = Logger.getLogger(VulnerabilityScanResultProcessor.class); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a75eb1ea5..54a0b4a5f 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -489,27 +489,27 @@ alpine.kafka.processor.repo.meta.analysis.result.consumer.group.id=dtrack-apiser alpine.kafka.processor.repo.meta.analysis.result.consumer.auto.offset.reset=earliest # Required -alpine.kafka.processor.vuln.analysis.result.max.concurrency=-1 -alpine.kafka.processor.vuln.analysis.result.processing.order=key -alpine.kafka.processor.vuln.analysis.result.retry.initial.delay.ms=1000 -alpine.kafka.processor.vuln.analysis.result.retry.multiplier=2 -alpine.kafka.processor.vuln.analysis.result.retry.randomization.factor=0.3 -alpine.kafka.processor.vuln.analysis.result.retry.max.delay.ms=180000 -alpine.kafka.processor.vuln.analysis.result.consumer.group.id=dtrack-apiserver-processor -alpine.kafka.processor.vuln.analysis.result.consumer.auto.offset.reset=earliest +alpine.kafka.processor.vuln.scan.result.max.concurrency=-1 +alpine.kafka.processor.vuln.scan.result.processing.order=key +alpine.kafka.processor.vuln.scan.result.retry.initial.delay.ms=1000 +alpine.kafka.processor.vuln.scan.result.retry.multiplier=2 +alpine.kafka.processor.vuln.scan.result.retry.randomization.factor=0.3 +alpine.kafka.processor.vuln.scan.result.retry.max.delay.ms=180000 +alpine.kafka.processor.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor +alpine.kafka.processor.vuln.scan.result.consumer.auto.offset.reset=earliest # Required -alpine.kafka.processor.processed.vuln.scan.result.max.batch.size=1000 -alpine.kafka.processor.processed.vuln.scan.result.max.concurrency=1 -alpine.kafka.processor.processed.vuln.scan.result.processing.order=unordered -alpine.kafka.processor.processed.vuln.scan.result.retry.initial.delay.ms=3000 -alpine.kafka.processor.processed.vuln.scan.result.retry.multiplier=2 -alpine.kafka.processor.processed.vuln.scan.result.retry.randomization.factor=0.3 -alpine.kafka.processor.processed.vuln.scan.result.retry.max.delay.ms=180000 -alpine.kafka.processor.processed.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor -alpine.kafka.processor.processed.vuln.scan.result.consumer.auto.offset.reset=earliest -alpine.kafka.processor.processed.vuln.scan.result.consumer.max.poll.records=10000 -alpine.kafka.processor.processed.vuln.scan.result.consumer.fetch.min.bytes=524288 +alpine.kafka.processor.vuln.scan.result.processed.max.batch.size=1000 +alpine.kafka.processor.vuln.scan.result.processed.max.concurrency=1 +alpine.kafka.processor.vuln.scan.result.processed.processing.order=unordered +alpine.kafka.processor.vuln.scan.result.processed.retry.initial.delay.ms=3000 +alpine.kafka.processor.vuln.scan.result.processed.retry.multiplier=2 +alpine.kafka.processor.vuln.scan.result.processed.retry.randomization.factor=0.3 +alpine.kafka.processor.vuln.scan.result.processed.retry.max.delay.ms=180000 +alpine.kafka.processor.vuln.scan.result.processed.consumer.group.id=dtrack-apiserver-processor +alpine.kafka.processor.vuln.scan.result.processed.consumer.auto.offset.reset=earliest +alpine.kafka.processor.vuln.scan.result.processed.consumer.max.poll.records=10000 +alpine.kafka.processor.vuln.scan.result.processed.consumer.fetch.min.bytes=524288 # Scheduling tasks after 3 minutes (3*60*1000) of starting application task.scheduler.initial.delay=180000 From b4c5be652aafac0ab6ce7327b222cc7e2ce014d6 Mon Sep 17 00:00:00 2001 From: nscuro Date: Thu, 11 Apr 2024 16:14:31 +0200 Subject: [PATCH 21/24] Set default producer `linger.ms` to `100` This is to improve throughput, and to match the default used by Kafka Streams, which we are replacing: https://kafka.apache.org/37/documentation/streams/developer-guide/config-streams.html#default-values Signed-off-by: nscuro --- .../dependencytrack/event/kafka/KafkaProducerInitializer.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaProducerInitializer.java b/src/main/java/org/dependencytrack/event/kafka/KafkaProducerInitializer.java index 9ed05f434..f018a956d 100644 --- a/src/main/java/org/dependencytrack/event/kafka/KafkaProducerInitializer.java +++ b/src/main/java/org/dependencytrack/event/kafka/KafkaProducerInitializer.java @@ -107,6 +107,7 @@ private static Producer createProducer() { properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, CompressionType.SNAPPY.name); properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true"); + properties.put(ProducerConfig.LINGER_MS_CONFIG, "100"); properties.put(ProducerConfig.ACKS_CONFIG, "all"); if (Config.getInstance().getPropertyAsBoolean(ConfigKey.KAFKA_TLS_ENABLED)) { properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, Config.getInstance().getProperty(ConfigKey.KAFKA_TLS_PROTOCOL)); From 861585e6402a450f900aefaeab47672579a0a4a9 Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 17 Apr 2024 11:46:13 +0200 Subject: [PATCH 22/24] Add annotations for new config properties Signed-off-by: nscuro --- src/main/resources/application.properties | 80 ++++++++++++++++++++++- 1 file changed, 78 insertions(+), 2 deletions(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 597cb3b22..b4743d019 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -793,27 +793,103 @@ alpine.kafka.processor.repo.meta.analysis.result.consumer.group.id=dtrack-apiser # @required alpine.kafka.processor.repo.meta.analysis.result.consumer.auto.offset.reset=earliest -# Required +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.max.concurrency=-1 + +# @category: Kafka +# @type: enum +# @valid-values: [key, partition, unordered] +# @required alpine.kafka.processor.vuln.scan.result.processing.order=key + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.retry.initial.delay.ms=1000 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.retry.multiplier=2 + +# @category: Kafka +# @type: double +# @required alpine.kafka.processor.vuln.scan.result.retry.randomization.factor=0.3 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.retry.max.delay.ms=180000 + +# @category: Kafka +# @type: string +# @required alpine.kafka.processor.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor + +# @category: Kafka +# @type: enum +# @valid-values: [earliest, latest, none] +# @required alpine.kafka.processor.vuln.scan.result.consumer.auto.offset.reset=earliest -# Required +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.max.batch.size=1000 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.max.concurrency=1 + +# @category: Kafka +# @type: enum +# @valid-values: [key, partition, unordered] +# @required alpine.kafka.processor.vuln.scan.result.processed.processing.order=unordered + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.retry.initial.delay.ms=3000 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.retry.multiplier=2 + +# @category: Kafka +# @type: double +# @required alpine.kafka.processor.vuln.scan.result.processed.retry.randomization.factor=0.3 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.retry.max.delay.ms=180000 + +# @category: Kafka +# @type: string +# @required alpine.kafka.processor.vuln.scan.result.processed.consumer.group.id=dtrack-apiserver-processor + +# @category: Kafka +# @type: enum +# @valid-values: [earliest, latest, none] +# @required alpine.kafka.processor.vuln.scan.result.processed.consumer.auto.offset.reset=earliest + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.consumer.max.poll.records=10000 + +# @category: Kafka +# @type: integer +# @required alpine.kafka.processor.vuln.scan.result.processed.consumer.fetch.min.bytes=524288 # Scheduling tasks after 3 minutes (3*60*1000) of starting application From 093284b369db514cec9a365edb67d0455951a035 Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 17 Apr 2024 11:46:49 +0200 Subject: [PATCH 23/24] Revert unrelated `Dockerfile` change Signed-off-by: nscuro --- src/main/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 9c39687c1..3c597901b 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -69,7 +69,7 @@ RUN mkdir -p ${APP_DIR} ${DATA_DIR} \ COPY --from=jre-build /opt/java/openjdk $JAVA_HOME # Copy the compiled WAR to the application directory created above -COPY --chown=${UID}:${GID} ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} +COPY ./target/${WAR_FILENAME} ./src/main/docker/logback-json.xml ${APP_DIR} # Specify the user to run as (in numeric format for compatibility with Kubernetes/OpenShift's SCC) USER ${UID} From 7179af58998fd0408ce9ac625cc0c681187216e4 Mon Sep 17 00:00:00 2001 From: nscuro Date: Wed, 17 Apr 2024 11:47:51 +0200 Subject: [PATCH 24/24] Remove `kafka.num.stream.threads` property Signed-off-by: nscuro --- src/main/resources/application.properties | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index b4743d019..b687788bc 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -631,11 +631,6 @@ kafka.bootstrap.servers= # @valid-values: [earliest, latest, none] kafka.auto.offset.reset=earliest -# @category: Kafka -# @type: integer -# @required -kafka.num.stream.threads=3 - # @category: Kafka # @type: boolean kafka.tls.enabled=false