Skip to content

Commit

Permalink
[ML] Switching to new datafeed preview (elastic#101780)
Browse files Browse the repository at this point in the history
* [ML] Switching to new datafeed preview

* fixing wizard test button

* adding schema validator

* fixing tests

* adding check for empty detectors list

Co-authored-by: Kibana Machine <[email protected]>
  • Loading branch information
2 people authored and John Dorlus committed Jun 15, 2021
1 parent a3edf98 commit 01a3a70
Show file tree
Hide file tree
Showing 11 changed files with 124 additions and 296 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,8 @@ import { parseInterval } from '../../../../../common/util/parse_interval';
import { replaceTokensInUrlValue, isValidLabel } from '../../../util/custom_url_utils';
import { getIndexPatternIdFromName } from '../../../util/index_utils';
import { ml } from '../../../services/ml_api_service';
import { mlJobService } from '../../../services/job_service';
import { escapeForElasticsearchQuery } from '../../../util/string_utils';
import { getSavedObjectsClient, getGetUrlGenerator } from '../../../util/dependency_cache';
import { getProcessedFields } from '../../../components/data_grid';

export function getNewCustomUrlDefaults(job, dashboards, indexPatterns) {
// Returns the settings object in the format used by the custom URL editor
Expand Down Expand Up @@ -266,8 +264,7 @@ function buildAppStateQueryParam(queryFieldNames) {
// Builds the full URL for testing out a custom URL configuration, which
// may contain dollar delimited partition / influencer entity tokens and
// drilldown time range settings.
export function getTestUrl(job, customUrl) {
const urlValue = customUrl.url_value;
export async function getTestUrl(job, customUrl) {
const bucketSpanSecs = parseInterval(job.analysis_config.bucket_span).asSeconds();

// By default, return configured url_value. Look to substitute any dollar-delimited
Expand All @@ -289,64 +286,55 @@ export function getTestUrl(job, customUrl) {
sort: [{ record_score: { order: 'desc' } }],
};

return new Promise((resolve, reject) => {
ml.results
.anomalySearch(
{
body,
},
[job.job_id]
)
.then((resp) => {
if (resp.hits.total.value > 0) {
const record = resp.hits.hits[0]._source;
testUrl = replaceTokensInUrlValue(customUrl, bucketSpanSecs, record, 'timestamp');
resolve(testUrl);
} else {
// No anomalies yet for this job, so do a preview of the search
// configured in the job datafeed to obtain sample docs.
mlJobService.searchPreview(job).then((response) => {
let testDoc;
const docTimeFieldName = job.data_description.time_field;

// Handle datafeeds which use aggregations or documents.
if (response.aggregations) {
// Create a dummy object which contains the fields necessary to build the URL.
const firstBucket = response.aggregations.buckets.buckets[0];
testDoc = {
[docTimeFieldName]: firstBucket.key,
};

// Look for bucket aggregations which match the tokens in the URL.
urlValue.replace(/\$([^?&$\'"]{1,40})\$/g, (match, name) => {
if (name !== 'earliest' && name !== 'latest' && firstBucket[name] !== undefined) {
const tokenBuckets = firstBucket[name];
if (tokenBuckets.buckets) {
testDoc[name] = tokenBuckets.buckets[0].key;
}
}
});
} else {
if (response.hits.total.value > 0) {
testDoc = getProcessedFields(response.hits.hits[0].fields);
}
}

if (testDoc !== undefined) {
testUrl = replaceTokensInUrlValue(
customUrl,
bucketSpanSecs,
testDoc,
docTimeFieldName
);
}
let resp;
try {
resp = await ml.results.anomalySearch(
{
body,
},
[job.job_id]
);
} catch (error) {
// search may fail if the job doesn't already exist
// ignore this error as the outer function call will raise a toast
}

resolve(testUrl);
});
}
})
.catch((resp) => {
reject(resp);
});
});
if (resp && resp.hits.total.value > 0) {
const record = resp.hits.hits[0]._source;
testUrl = replaceTokensInUrlValue(customUrl, bucketSpanSecs, record, 'timestamp');
return testUrl;
} else {
// No anomalies yet for this job, so do a preview of the search
// configured in the job datafeed to obtain sample docs.

let { datafeed_config: datafeedConfig, ...jobConfig } = job;
try {
// attempt load the non-combined job and datafeed so they can be used in the datafeed preview
const [{ jobs }, { datafeeds }] = await Promise.all([
ml.getJobs({ jobId: job.job_id }),
ml.getDatafeeds({ datafeedId: job.datafeed_config.datafeed_id }),
]);
datafeedConfig = datafeeds[0];
jobConfig = jobs[0];
} catch (error) {
// jobs may not exist as this might be called from the AD job wizards
// ignore this error as the outer function call will raise a toast
}

if (jobConfig === undefined || datafeedConfig === undefined) {
return testUrl;
}

const preview = await ml.jobs.datafeedPreview(undefined, jobConfig, datafeedConfig);

const docTimeFieldName = job.data_description.time_field;

// Create a dummy object which contains the fields necessary to build the URL.
const firstBucket = preview[0];
if (firstBucket !== undefined) {
testUrl = replaceTokensInUrlValue(customUrl, bucketSpanSecs, firstBucket, docTimeFieldName);
}

return testUrl;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import React, { Component } from 'react';

import { EuiSpacer, EuiCallOut, EuiLoadingSpinner } from '@elastic/eui';

import { mlJobService } from '../../../../services/job_service';
import { ml } from '../../../../services/ml_api_service';
import { checkPermission } from '../../../../capabilities/check_capabilities';
import { ML_DATA_PREVIEW_COUNT } from '../../../../../../common/util/job_utils';
import { MLJobEditor } from '../ml_job_editor';
Expand Down Expand Up @@ -88,8 +88,8 @@ DatafeedPreviewPane.propTypes = {
function updateDatafeedPreview(job, canPreviewDatafeed) {
return new Promise((resolve, reject) => {
if (canPreviewDatafeed) {
mlJobService
.getDatafeedPreview(job.datafeed_config.datafeed_id)
ml.jobs
.datafeedPreview(job.datafeed_config.datafeed_id)
.then((resp) => {
if (Array.isArray(resp)) {
resolve(JSON.stringify(resp.slice(0, ML_DATA_PREVIEW_COUNT), null, 2));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@ import {

import { CombinedJob } from '../../../../../../../../common/types/anomaly_detection_jobs';
import { MLJobEditor } from '../../../../../jobs_list/components/ml_job_editor';
import { mlJobService } from '../../../../../../services/job_service';
import { ML_DATA_PREVIEW_COUNT } from '../../../../../../../../common/util/job_utils';
import { isPopulatedObject } from '../../../../../../../../common/util/object_utils';
import { isMultiBucketAggregate } from '../../../../../../../../common/types/es_client';
import { useMlApiContext } from '../../../../../../contexts/kibana';

export const DatafeedPreview: FC<{
combinedJob: CombinedJob | null;
heightOffset?: number;
}> = ({ combinedJob, heightOffset = 0 }) => {
const {
jobs: { datafeedPreview },
} = useMlApiContext();
// the ace editor requires a fixed height
const editorHeight = useMemo(() => `${window.innerHeight - 230 - heightOffset}px`, [
heightOffset,
Expand Down Expand Up @@ -63,18 +63,17 @@ export const DatafeedPreview: FC<{

if (combinedJob.datafeed_config && combinedJob.datafeed_config.indices.length) {
try {
const resp = await mlJobService.searchPreview(combinedJob);
let data = resp.hits.hits;
// the first item under aggregations can be any name
if (isPopulatedObject(resp.aggregations)) {
const accessor = Object.keys(resp.aggregations)[0];
const aggregate = resp.aggregations[accessor];
if (isMultiBucketAggregate(aggregate)) {
data = aggregate.buckets.slice(0, ML_DATA_PREVIEW_COUNT);
}
const { datafeed_config: datafeed, ...job } = combinedJob;
if (job.analysis_config.detectors.length === 0) {
setPreviewJsonString(
i18n.translate('xpack.ml.newJob.wizard.datafeedPreviewFlyout.noDetectors', {
defaultMessage: 'No detectors configured',
})
);
} else {
const preview = await datafeedPreview(undefined, job, datafeed);
setPreviewJsonString(JSON.stringify(preview, null, 2));
}

setPreviewJsonString(JSON.stringify(data, null, 2));
} catch (error) {
setPreviewJsonString(JSON.stringify(error, null, 2));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ declare interface JobService {
): Promise<any>;
createResultsUrl(jobId: string[], start: number, end: number, location: string): string;
getJobAndGroupIds(): Promise<ExistingJobsAndGroups>;
searchPreview(job: CombinedJob): Promise<estypes.SearchResponse<any>>;
getJob(jobId: string): CombinedJob;
loadJobsWrapper(): Promise<CombinedJob[]>;
}
Expand Down
9 changes: 0 additions & 9 deletions x-pack/plugins/ml/public/application/services/job_service.js
Original file line number Diff line number Diff line change
Expand Up @@ -347,11 +347,6 @@ class JobService {
return job;
}

searchPreview(combinedJob) {
const { datafeed_config: datafeed, ...job } = combinedJob;
return ml.jobs.datafeedPreview(job, datafeed);
}

openJob(jobId) {
return ml.openJob({ jobId });
}
Expand Down Expand Up @@ -435,10 +430,6 @@ class JobService {
return datafeedId;
}

getDatafeedPreview(datafeedId) {
return ml.datafeedPreview({ datafeedId });
}

// get the list of job group ids as well as how many jobs are in each group
getJobGroups() {
const groups = [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -330,8 +330,8 @@ export const jobsApiProvider = (httpService: HttpService) => ({
});
},

datafeedPreview(job: Job, datafeed: Datafeed) {
const body = JSON.stringify({ job, datafeed });
datafeedPreview(datafeedId?: string, job?: Job, datafeed?: Datafeed) {
const body = JSON.stringify({ datafeedId, job, datafeed });
return httpService.http<{
total: number;
categories: Array<{ count?: number; category: Category }>;
Expand Down
Loading

0 comments on commit 01a3a70

Please sign in to comment.