diff --git a/include/nighthawk/adaptive_load/BUILD b/include/nighthawk/adaptive_load/BUILD index 07e6522c7..32ed22151 100644 --- a/include/nighthawk/adaptive_load/BUILD +++ b/include/nighthawk/adaptive_load/BUILD @@ -52,6 +52,21 @@ envoy_basic_cc_library( ], ) +envoy_basic_cc_library( + name = "metrics_evaluator", + hdrs = [ + "metrics_evaluator.h", + ], + include_prefix = "nighthawk/adaptive_load", + deps = [ + ":metrics_plugin", + "//api/adaptive_load:adaptive_load_proto_cc_proto", + "@envoy//include/envoy/common:base_includes", + "@envoy//include/envoy/config:typed_config_interface", + "@envoy//source/common/common:statusor_lib_with_external_headers", + ], +) + envoy_basic_cc_library( name = "metrics_plugin", hdrs = [ diff --git a/include/nighthawk/adaptive_load/metrics_evaluator.h b/include/nighthawk/adaptive_load/metrics_evaluator.h new file mode 100644 index 000000000..cc969a9dd --- /dev/null +++ b/include/nighthawk/adaptive_load/metrics_evaluator.h @@ -0,0 +1,90 @@ +#include "envoy/config/core/v3/base.pb.h" + +#include "nighthawk/adaptive_load/metrics_plugin.h" + +#include "external/envoy/source/common/common/logger.h" +#include "external/envoy/source/common/common/statusor.h" +#include "external/envoy/source/common/protobuf/protobuf.h" + +#include "api/adaptive_load/adaptive_load.pb.h" +#include "api/adaptive_load/benchmark_result.pb.h" +#include "api/adaptive_load/metric_spec.pb.h" +#include "api/client/options.pb.h" +#include "api/client/output.pb.h" +#include "api/client/service.pb.h" + +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/strings/str_join.h" + +namespace Nighthawk { + +/** + * A utility for calling MetricsPlugins and scoring metrics according to ThresholdSpecs. + * + * AnalyzeNighthawkBenchmark() is intended to be called repeatedly from the adaptive load controller + * main loop after each Nighthawk Service call. The controller maintains a set of shared + * MetricsPlugins that are initialized once for the whole session. AnalyzeNighthawkBenchmark() calls + * EvaluateMetric() and ExtractMetricSpecs() internally. The AdaptiveLoadSessionSpec is consulted + * for MetricSpec, ThresholdSpec, and MetricsPlugin information. + */ +class MetricsEvaluator { +public: + virtual ~MetricsEvaluator() = default; + + /** + * Calls a MetricPlugin to obtain the metric value defined by the MetricSpec, then scores the + * value according to a ThresholdSpec if one is present. + * + * @param metric_spec The MetricSpec identifying the metric by name and plugin name. + * @param metrics_plugin A MetricsPlugin that will be queried. The plugin must correspond to the + * plugin name in the MetricSpec, and it should support the requested metric name in the + * MetricSpec. + * @param threshold_spec A proto describing the threshold and scoring function. Nullptr if the + * metric is informational only. + * + * @return StatusOr A proto containing the metric value (and its score if a + * threshold was specified), or an error status if the metric could not be obtained from the + * MetricsPlugin. + */ + virtual absl::StatusOr + EvaluateMetric(const nighthawk::adaptive_load::MetricSpec& metric_spec, + MetricsPlugin& metrics_plugin, + const nighthawk::adaptive_load::ThresholdSpec* threshold_spec) const PURE; + + /** + * Extracts pointers to metric descriptors and corresponding thresholds from a top-level adaptive + * load session spec to an ordered list and a map. Allows for uniform treatment of scored and + * informational metrics. + * + * @param spec The adaptive load session spec. + * @return Vector of pairs of pointers to MetricSpec and ThresholdSpec within |spec|. For + * informational metrics, the ThresholdSpec pointer is nullptr. + */ + virtual const std::vector> + ExtractMetricSpecs(const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec) const PURE; + + /** + * Analyzes a Nighthawk Service benchmark against configured MetricThresholds. For each + * MetricSpec, queries a MetricsPlugin for the current metric value. Assumes that the values from + * MetricsPlugins correspond timewise with the Nighthawk benchmark. + * + * @param nighthawk_response Proto returned from Nighthawk Service describing the latest single + * benchmark session. To be translated into scorable metrics by the "nighthawk.builtin" + * MetricsPlugin. + * @param spec Top-level proto defining the adaptive load session. + * @param name_to_custom_metrics_plugin_map Map from plugin names to initialized MetricsPlugins. + * Must include all MetricsPlugins referenced in the spec other than "nighthawk.builtin". + * + * @return StatusOr A proto containing all metric scores for this Nighthawk + * Service benchmark session, or an error propagated from a MetricsPlugin. + */ + virtual absl::StatusOr + AnalyzeNighthawkBenchmark(const nighthawk::client::ExecutionResponse& nighthawk_response, + const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec, + const absl::flat_hash_map& + name_to_custom_metrics_plugin_map) const PURE; +}; + +} // namespace Nighthawk diff --git a/source/adaptive_load/BUILD b/source/adaptive_load/BUILD index 3eca6c792..c6e0fcf7e 100644 --- a/source/adaptive_load/BUILD +++ b/source/adaptive_load/BUILD @@ -42,6 +42,27 @@ envoy_cc_library( ], ) +envoy_cc_library( + name = "metrics_evaluator_impl", + srcs = [ + "metrics_evaluator_impl.cc", + ], + hdrs = [ + "metrics_evaluator_impl.h", + ], + repository = "@envoy", + visibility = ["//visibility:public"], + deps = [ + ":metrics_plugin_impl", + ":plugin_loader", + "//api/adaptive_load:adaptive_load_proto_cc_proto", + "//api/client:base_cc_proto", + "//include/nighthawk/adaptive_load:adaptive_load_controller", + "//include/nighthawk/adaptive_load:metrics_evaluator", + "//include/nighthawk/adaptive_load:scoring_function", + ], +) + envoy_cc_library( name = "metrics_plugin_impl", srcs = [ diff --git a/source/adaptive_load/metrics_evaluator_impl.cc b/source/adaptive_load/metrics_evaluator_impl.cc new file mode 100644 index 000000000..54378edfa --- /dev/null +++ b/source/adaptive_load/metrics_evaluator_impl.cc @@ -0,0 +1,114 @@ +#include "adaptive_load/metrics_evaluator_impl.h" + +#include + +#include "api/adaptive_load/metric_spec.pb.h" + +#include "adaptive_load/metrics_plugin_impl.h" +#include "adaptive_load/plugin_loader.h" + +namespace Nighthawk { + +namespace { + +using ::nighthawk::adaptive_load::MetricSpec; +using ::nighthawk::adaptive_load::MetricSpecWithThreshold; +using ::nighthawk::adaptive_load::ThresholdSpec; + +} // namespace + +absl::StatusOr +MetricsEvaluatorImpl::EvaluateMetric(const MetricSpec& metric_spec, MetricsPlugin& metrics_plugin, + const ThresholdSpec* threshold_spec) const { + nighthawk::adaptive_load::MetricEvaluation evaluation; + evaluation.set_metric_id( + absl::StrCat(metric_spec.metrics_plugin_name(), "/", metric_spec.metric_name())); + const absl::StatusOr metric_value_or = + metrics_plugin.GetMetricByName(metric_spec.metric_name()); + if (!metric_value_or.ok()) { + return absl::Status(static_cast(metric_value_or.status().code()), + absl::StrCat("Error calling MetricsPlugin '", + metric_spec.metrics_plugin_name(), ": ", + metric_value_or.status().message())); + } + const double metric_value = metric_value_or.value(); + evaluation.set_metric_value(metric_value); + if (threshold_spec == nullptr) { + // Informational metric. + evaluation.set_weight(0.0); + } else { + evaluation.set_weight(threshold_spec->weight().value()); + absl::StatusOr scoring_function_or = + LoadScoringFunctionPlugin(threshold_spec->scoring_function()); + RELEASE_ASSERT(scoring_function_or.ok(), + absl::StrCat("ScoringFunction plugin loading error should have been caught " + "during input validation: ", + scoring_function_or.status().message())); + ScoringFunctionPtr scoring_function = std::move(scoring_function_or.value()); + evaluation.set_threshold_score(scoring_function->EvaluateMetric(metric_value)); + } + return evaluation; +} + +const std::vector> +MetricsEvaluatorImpl::ExtractMetricSpecs( + const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec) const { + std::vector> spec_threshold_pairs; + for (const MetricSpecWithThreshold& metric_threshold : spec.metric_thresholds()) { + spec_threshold_pairs.emplace_back(&metric_threshold.metric_spec(), + &metric_threshold.threshold_spec()); + } + for (const MetricSpec& metric_spec : spec.informational_metric_specs()) { + spec_threshold_pairs.emplace_back(&metric_spec, nullptr); + } + return spec_threshold_pairs; +} + +absl::StatusOr +MetricsEvaluatorImpl::AnalyzeNighthawkBenchmark( + const nighthawk::client::ExecutionResponse& nighthawk_response, + const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec, + const absl::flat_hash_map& name_to_custom_metrics_plugin_map) + const { + if (nighthawk_response.error_detail().code() != static_cast(absl::StatusCode::kOk)) { + return absl::Status(static_cast(nighthawk_response.error_detail().code()), + nighthawk_response.error_detail().message()); + } + + nighthawk::adaptive_load::BenchmarkResult benchmark_result; + *benchmark_result.mutable_nighthawk_service_output() = nighthawk_response.output(); + + // A map containing all available MetricsPlugins: preloaded custom plugins shared across all + // benchmarks, and a freshly instantiated builtin plugin for this benchmark only. + absl::flat_hash_map name_to_plugin_map; + for (const auto& name_plugin_pair : name_to_custom_metrics_plugin_map) { + name_to_plugin_map[name_plugin_pair.first] = name_plugin_pair.second.get(); + } + auto builtin_plugin = + std::make_unique(nighthawk_response.output()); + name_to_plugin_map["nighthawk.builtin"] = builtin_plugin.get(); + + const std::vector> spec_threshold_pairs = + ExtractMetricSpecs(spec); + + std::vector errors; + for (const std::pair& spec_threshold_pair : + spec_threshold_pairs) { + absl::StatusOr evaluation_or = + EvaluateMetric(*spec_threshold_pair.first, + *name_to_plugin_map[spec_threshold_pair.first->metrics_plugin_name()], + spec_threshold_pair.second); + if (!evaluation_or.ok()) { + errors.emplace_back(absl::StrCat("Error evaluating metric: ", evaluation_or.status().code(), + ": ", evaluation_or.status().message())); + continue; + } + *benchmark_result.mutable_metric_evaluations()->Add() = evaluation_or.value(); + } + if (!errors.empty()) { + return absl::InternalError(absl::StrJoin(errors, "\n")); + } + return benchmark_result; +} + +} // namespace Nighthawk diff --git a/source/adaptive_load/metrics_evaluator_impl.h b/source/adaptive_load/metrics_evaluator_impl.h new file mode 100644 index 000000000..b9020c14c --- /dev/null +++ b/source/adaptive_load/metrics_evaluator_impl.h @@ -0,0 +1,23 @@ +#include "nighthawk/adaptive_load/metrics_evaluator.h" + +namespace Nighthawk { + +class MetricsEvaluatorImpl : public MetricsEvaluator { +public: + absl::StatusOr + EvaluateMetric(const nighthawk::adaptive_load::MetricSpec& metric_spec, + MetricsPlugin& metrics_plugin, + const nighthawk::adaptive_load::ThresholdSpec* threshold_spec) const override; + + const std::vector> + ExtractMetricSpecs(const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec) const override; + + absl::StatusOr + AnalyzeNighthawkBenchmark(const nighthawk::client::ExecutionResponse& nighthawk_response, + const nighthawk::adaptive_load::AdaptiveLoadSessionSpec& spec, + const absl::flat_hash_map& + name_to_custom_metrics_plugin_map) const override; +}; + +} // namespace Nighthawk diff --git a/test/adaptive_load/BUILD b/test/adaptive_load/BUILD index ff10152fe..a8639f34e 100644 --- a/test/adaptive_load/BUILD +++ b/test/adaptive_load/BUILD @@ -30,6 +30,18 @@ envoy_cc_test( ], ) +envoy_cc_test( + name = "metrics_evaluator_test", + srcs = ["metrics_evaluator_test.cc"], + repository = "@envoy", + deps = [ + ":minimal_output", + "//source/adaptive_load:metrics_evaluator_impl", + "//source/adaptive_load:scoring_function_impl", + "//test/adaptive_load/fake_plugins/fake_metrics_plugin", + ], +) + envoy_cc_test( name = "metrics_plugin_test", srcs = ["metrics_plugin_test.cc"], diff --git a/test/adaptive_load/metrics_evaluator_test.cc b/test/adaptive_load/metrics_evaluator_test.cc new file mode 100644 index 000000000..1bf632400 --- /dev/null +++ b/test/adaptive_load/metrics_evaluator_test.cc @@ -0,0 +1,368 @@ +#include "external/envoy/source/common/protobuf/protobuf.h" + +#include "api/adaptive_load/benchmark_result.pb.h" +#include "api/adaptive_load/metric_spec.pb.h" +#include "api/adaptive_load/scoring_function_impl.pb.h" +#include "api/client/service.pb.h" + +#include "test/adaptive_load/fake_plugins/fake_metrics_plugin/fake_metrics_plugin.h" +#include "test/adaptive_load/minimal_output.h" + +#include "adaptive_load/metrics_evaluator_impl.h" +#include "gmock/gmock.h" +#include "gtest/gtest.h" + +namespace Nighthawk { +namespace { + +using ::Envoy::Protobuf::util::MessageDifferencer; +using ::nighthawk::adaptive_load::BenchmarkResult; +using ::nighthawk::adaptive_load::FakeMetricsPluginConfig; +using ::nighthawk::adaptive_load::MetricEvaluation; +using ::nighthawk::adaptive_load::MetricSpec; +using ::nighthawk::adaptive_load::ThresholdSpec; +using ::testing::HasSubstr; + +/** + * Creates a valid TypedExtensionConfig proto selecting the real BinaryScoringFunction plugin + * and configuring it with a threshold. + * + * @param lower_threshold Threshold value to set within the config proto. + * + * @return TypedExtensionConfig Full scoring function plugin spec that selects + * nighthawk.binary_scoring and provides a config. + */ +envoy::config::core::v3::TypedExtensionConfig +MakeLowerThresholdBinaryScoringFunctionConfig(double lower_threshold) { + envoy::config::core::v3::TypedExtensionConfig config; + config.set_name("nighthawk.binary_scoring"); + nighthawk::adaptive_load::BinaryScoringFunctionConfig inner_config; + inner_config.mutable_lower_threshold()->set_value(lower_threshold); + config.mutable_typed_config()->PackFrom(inner_config); + return config; +} + +/** + * Creates a simulated Nighthawk Service response that reflects the specified send rate. + * + * @param send_rate The send rate that the proto values should represent. + * + * @return ExecutionResponse A simulated Nighthawk Service response with counters representing the + * specified send rate, along with other dummy counters and stats. + */ +nighthawk::client::ExecutionResponse MakeNighthawkResponseWithSendRate(double send_rate) { + nighthawk::client::ExecutionResponse response; + nighthawk::client::Output output = MakeSimpleNighthawkOutput({ + /*concurrency=*/"auto", + /*requests_per_second=*/1024, + /*actual_duration_seconds=*/10, + /*upstream_rq_total=*/static_cast(10 * 1024 * send_rate), + /*response_count_2xx=*/320, + /*min_ns=*/400, + /*mean_ns=*/500, + /*max_ns=*/600, + /*pstdev_ns=*/11, + }); + *response.mutable_output() = output; + return response; +} + +TEST(EvaluateMetric, SetsMetricIdForValidMetric) { + const std::string kMetricName = "good-metric"; + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, /*threshold_spec=*/nullptr); + ASSERT_TRUE(evaluation_or.ok()); + nighthawk::adaptive_load::MetricEvaluation evaluation = evaluation_or.value(); + EXPECT_EQ(evaluation.metric_id(), "nighthawk.fake_metrics_plugin/good-metric"); +} + +TEST(EvaluateMetric, PropagatesMetricsPluginError) { + const int kExpectedStatusCode = static_cast(absl::StatusCode::kFailedPrecondition); + const std::string kMetricName = "bad-metric"; + const std::string kExpectedStatusMessage = "artificial metric error"; + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->mutable_error_status()->set_code(kExpectedStatusCode); + fake_metric->mutable_error_status()->set_message(kExpectedStatusMessage); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, /*threshold_spec=*/nullptr); + ASSERT_FALSE(evaluation_or.ok()); + EXPECT_EQ(static_cast(evaluation_or.status().code()), kExpectedStatusCode); + EXPECT_THAT(evaluation_or.status().message(), HasSubstr(kExpectedStatusMessage)); +} + +TEST(EvaluateMetric, StoresMetricValueForValidMetric) { + const std::string kMetricName = "good-metric"; + const double kExpectedValue = 123.0; + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->set_value(kExpectedValue); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, /*threshold_spec=*/nullptr); + ASSERT_TRUE(evaluation_or.ok()); + EXPECT_EQ(evaluation_or.value().metric_value(), kExpectedValue); +} + +TEST(EvaluateMetric, SetsWeightToZeroForValidInformationalMetric) { + const std::string kMetricName = "good-metric"; + const double kExpectedValue = 123.0; + + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->set_value(kExpectedValue); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, /*threshold_spec=*/nullptr); + ASSERT_TRUE(evaluation_or.ok()); + EXPECT_EQ(evaluation_or.value().weight(), 0.0); +} + +TEST(EvaluateMetric, SetsWeightForValidScoredMetric) { + const std::string kMetricName = "good-metric"; + const double kExpectedValue = 123.0; + const double kExpectedWeight = 1.5; + const double kLowerThreshold = 200.0; + + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->set_value(kExpectedValue); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + ThresholdSpec threshold_spec; + threshold_spec.mutable_weight()->set_value(kExpectedWeight); + *threshold_spec.mutable_scoring_function() = + MakeLowerThresholdBinaryScoringFunctionConfig(kLowerThreshold); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, &threshold_spec); + ASSERT_TRUE(evaluation_or.ok()); + EXPECT_EQ(evaluation_or.value().weight(), kExpectedWeight); +} + +TEST(EvaluateMetric, SetsScoreForValidMetric) { + const std::string kMetricName = "good-metric"; + const double kExpectedValue = 123.0; + const double kLowerThreshold = 200.0; + + FakeMetricsPluginConfig config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->set_value(kExpectedValue); + FakeMetricsPlugin fake_plugin(config); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + ThresholdSpec threshold_spec; + *threshold_spec.mutable_scoring_function() = + MakeLowerThresholdBinaryScoringFunctionConfig(kLowerThreshold); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr evaluation_or = + evaluator.EvaluateMetric(metric_spec, fake_plugin, &threshold_spec); + ASSERT_TRUE(evaluation_or.ok()); + EXPECT_EQ(evaluation_or.value().threshold_score(), -1.0); +} + +TEST(ExtractMetricSpecs, ExtractsScoredMetricAndThresholdForValidMetric) { + const std::string kExpectedMetricName = "a"; + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + nighthawk::adaptive_load::MetricSpecWithThreshold* metric_threshold = + spec.mutable_metric_thresholds()->Add(); + metric_threshold->mutable_metric_spec()->set_metric_name(kExpectedMetricName); + nighthawk::adaptive_load::ThresholdSpec threshold_spec; + threshold_spec.mutable_weight()->set_value(123.0); + *metric_threshold->mutable_threshold_spec() = threshold_spec; + + MetricsEvaluatorImpl evaluator; + const std::vector> + spec_threshold_pairs = evaluator.ExtractMetricSpecs(spec); + + ASSERT_GT(spec_threshold_pairs.size(), 0); + EXPECT_EQ(spec_threshold_pairs[0].first->metric_name(), kExpectedMetricName); + ASSERT_NE(spec_threshold_pairs[0].second, nullptr); + EXPECT_TRUE(MessageDifferencer::Equivalent(*spec_threshold_pairs[0].second, threshold_spec)); + EXPECT_EQ(spec_threshold_pairs[0].second->DebugString(), threshold_spec.DebugString()); +} + +TEST(ExtractMetricSpecs, ExtractsValueForValidInformationalMetric) { + const std::string kExpectedMetricName = "a"; + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + nighthawk::adaptive_load::MetricSpec* metric_spec = + spec.mutable_informational_metric_specs()->Add(); + metric_spec->set_metric_name(kExpectedMetricName); + + MetricsEvaluatorImpl evaluator; + const std::vector> + spec_threshold_pairs = evaluator.ExtractMetricSpecs(spec); + + ASSERT_GT(spec_threshold_pairs.size(), 0); + EXPECT_EQ(spec_threshold_pairs[0].first->metric_name(), kExpectedMetricName); + EXPECT_EQ(spec_threshold_pairs[0].second, nullptr); +} + +TEST(AnalyzeNighthawkBenchmark, PropagatesNighthawkServiceError) { + const std::string kExpectedErrorMessage = "artificial nighthawk service error"; + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + nighthawk::client::ExecutionResponse bad_nighthawk_response; + bad_nighthawk_response.mutable_error_detail()->set_code( + static_cast(absl::StatusCode::kUnavailable)); + bad_nighthawk_response.mutable_error_detail()->set_message(kExpectedErrorMessage); + absl::flat_hash_map name_to_custom_metrics_plugin_map; + + MetricsEvaluatorImpl evaluator; + absl::StatusOr result_or = evaluator.AnalyzeNighthawkBenchmark( + bad_nighthawk_response, spec, name_to_custom_metrics_plugin_map); + ASSERT_FALSE(result_or.ok()); + EXPECT_EQ(result_or.status().code(), absl::StatusCode::kUnavailable); + EXPECT_EQ(result_or.status().message(), kExpectedErrorMessage); +} + +TEST(AnalyzeNighthawkBenchmark, StoresNighthawkResultForSuccessfulMetricEvaluation) { + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + nighthawk::client::ExecutionResponse nighthawk_response = MakeNighthawkResponseWithSendRate(1.0); + absl::flat_hash_map name_to_custom_metrics_plugin_map; + + MetricsEvaluatorImpl evaluator; + absl::StatusOr result_or = evaluator.AnalyzeNighthawkBenchmark( + nighthawk_response, spec, name_to_custom_metrics_plugin_map); + ASSERT_TRUE(result_or.ok()); + + EXPECT_TRUE(MessageDifferencer::Equivalent(result_or.value().nighthawk_service_output(), + nighthawk_response.output())); + EXPECT_EQ(result_or.value().nighthawk_service_output().DebugString(), + nighthawk_response.output().DebugString()); +} + +TEST(AnalyzeNighthawkBenchmark, StoresScoreForSuccessfulMetricEvaluation) { + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + + const std::string kMetricName = "good-metric"; + const double kExpectedValue = 123.0; + + FakeMetricsPluginConfig metrics_plugin_config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = + metrics_plugin_config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->set_value(kExpectedValue); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + + *spec.mutable_informational_metric_specs()->Add() = metric_spec; + + nighthawk::client::ExecutionResponse nighthawk_response = MakeNighthawkResponseWithSendRate(1.0); + absl::flat_hash_map name_to_custom_metrics_plugin_map; + name_to_custom_metrics_plugin_map["nighthawk.fake_metrics_plugin"] = + std::make_unique(metrics_plugin_config); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr result_or = evaluator.AnalyzeNighthawkBenchmark( + nighthawk_response, spec, name_to_custom_metrics_plugin_map); + ASSERT_TRUE(result_or.ok()); + ASSERT_GT(result_or.value().metric_evaluations().size(), 0); + EXPECT_EQ(result_or.value().metric_evaluations()[0].metric_value(), kExpectedValue); +} + +TEST(AnalyzeNighthawkBenchmark, PropagatesErrorFromFailedMetricEvaluation) { + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + + const std::string kMetricName = "bad-metric"; + const std::string kExpectedStatusMessage = "artificial metric error"; + + FakeMetricsPluginConfig metrics_plugin_config; + FakeMetricsPluginConfig::FakeMetric* fake_metric = + metrics_plugin_config.mutable_fake_metrics()->Add(); + fake_metric->set_name(kMetricName); + fake_metric->mutable_error_status()->set_code( + static_cast(absl::StatusCode::kPermissionDenied)); + fake_metric->mutable_error_status()->set_message(kExpectedStatusMessage); + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.fake_metrics_plugin"); + metric_spec.set_metric_name(kMetricName); + *spec.mutable_informational_metric_specs()->Add() = metric_spec; + + nighthawk::client::ExecutionResponse nighthawk_response = MakeNighthawkResponseWithSendRate(1.0); + absl::flat_hash_map name_to_custom_metrics_plugin_map; + name_to_custom_metrics_plugin_map["nighthawk.fake_metrics_plugin"] = + std::make_unique(metrics_plugin_config); + + MetricsEvaluatorImpl evaluator; + absl::StatusOr result_or = evaluator.AnalyzeNighthawkBenchmark( + nighthawk_response, spec, name_to_custom_metrics_plugin_map); + ASSERT_FALSE(result_or.ok()); + // All errors during evaluation are rolled up into a single InternalError. + EXPECT_EQ(result_or.status().code(), absl::StatusCode::kInternal); + EXPECT_THAT(result_or.status().message(), HasSubstr(kExpectedStatusMessage)); +} + +TEST(AnalyzeNighthawkBenchmark, UsesBuiltinMetricsPluginForUnspecifiedPluginName) { + nighthawk::adaptive_load::AdaptiveLoadSessionSpec spec; + + const std::string kMetricName = "send-rate"; + const double kExpectedSendRate = 0.5; + + MetricSpec metric_spec; + metric_spec.set_metrics_plugin_name("nighthawk.builtin"); + metric_spec.set_metric_name(kMetricName); + + *spec.mutable_informational_metric_specs()->Add() = metric_spec; + + nighthawk::client::ExecutionResponse nighthawk_response = + MakeNighthawkResponseWithSendRate(kExpectedSendRate); + absl::flat_hash_map name_to_custom_metrics_plugin_map; + + MetricsEvaluatorImpl evaluator; + absl::StatusOr result_or = evaluator.AnalyzeNighthawkBenchmark( + nighthawk_response, spec, name_to_custom_metrics_plugin_map); + ASSERT_TRUE(result_or.ok()); + ASSERT_GT(result_or.value().metric_evaluations().size(), 0); + EXPECT_EQ(result_or.value().metric_evaluations()[0].metric_value(), kExpectedSendRate); +} + +} // namespace +} // namespace Nighthawk