diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md
deleted file mode 100644
index 01f8d4c951465..0000000000000
--- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [body](./kibana-plugin-public.httperrorresponse.body.md)
-
-## HttpErrorResponse.body property
-
-Signature:
-
-```typescript
-body?: HttpBody;
-```
diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md
index 1955bb57c50bf..aa669df796a09 100644
--- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md
+++ b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md
@@ -8,15 +8,12 @@
Signature:
```typescript
-export interface HttpErrorResponse
+export interface HttpErrorResponse extends HttpResponse
```
## Properties
| Property | Type | Description |
| --- | --- | --- |
-| [body](./kibana-plugin-public.httperrorresponse.body.md) | HttpBody
| |
| [error](./kibana-plugin-public.httperrorresponse.error.md) | Error | IHttpFetchError
| |
-| [request](./kibana-plugin-public.httperrorresponse.request.md) | Request
| |
-| [response](./kibana-plugin-public.httperrorresponse.response.md) | Response
| |
diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md
deleted file mode 100644
index fcb33fc12fbf4..0000000000000
--- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [request](./kibana-plugin-public.httperrorresponse.request.md)
-
-## HttpErrorResponse.request property
-
-Signature:
-
-```typescript
-request?: Request;
-```
diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md
deleted file mode 100644
index e6c7f9675a1d7..0000000000000
--- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [response](./kibana-plugin-public.httperrorresponse.response.md)
-
-## HttpErrorResponse.response property
-
-Signature:
-
-```typescript
-response?: Response;
-```
diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md
index 6f4205f3362fe..ca43ea31f0e2e 100644
--- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md
+++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md
@@ -9,7 +9,7 @@ Define an interceptor to be executed after a response is received.
Signature:
```typescript
-response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void;
+response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void;
```
## Parameters
@@ -21,5 +21,5 @@ response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Pro
Returns:
-`Promise | HttpResponse | void`
+`Promise | InterceptedHttpResponse | void`
diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md
index 1e7cd5e61186e..b8abd50e45461 100644
--- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md
+++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md
@@ -9,7 +9,7 @@ Define an interceptor to be executed if a response interceptor throws an error o
Signature:
```typescript
-responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void;
+responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void;
```
## Parameters
@@ -21,5 +21,5 @@ responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptC
Returns:
-`Promise | HttpResponse | void`
+`Promise | InterceptedHttpResponse | void`
diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md b/docs/development/core/public/kibana-plugin-public.httpresponse.body.md
deleted file mode 100644
index c590c9ec49d1b..0000000000000
--- a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [body](./kibana-plugin-public.httpresponse.body.md)
-
-## HttpResponse.body property
-
-Signature:
-
-```typescript
-body?: HttpBody;
-```
diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.md b/docs/development/core/public/kibana-plugin-public.httpresponse.md
index b2ec48fd4d6b5..e44515cc8a1e0 100644
--- a/docs/development/core/public/kibana-plugin-public.httpresponse.md
+++ b/docs/development/core/public/kibana-plugin-public.httpresponse.md
@@ -8,14 +8,12 @@
Signature:
```typescript
-export interface HttpResponse
+export interface HttpResponse extends InterceptedHttpResponse
```
## Properties
| Property | Type | Description |
| --- | --- | --- |
-| [body](./kibana-plugin-public.httpresponse.body.md) | HttpBody
| |
-| [request](./kibana-plugin-public.httpresponse.request.md) | Request
| |
-| [response](./kibana-plugin-public.httpresponse.response.md) | Response
| |
+| [request](./kibana-plugin-public.httpresponse.request.md) | Readonly<Request>
| |
diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md
index 4cb1ded29152e..84ab1bc7af853 100644
--- a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md
+++ b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md
@@ -7,5 +7,5 @@
Signature:
```typescript
-request?: Request;
+request: Readonly;
```
diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md b/docs/development/core/public/kibana-plugin-public.httpresponse.response.md
deleted file mode 100644
index 44c8eb4295f1c..0000000000000
--- a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [response](./kibana-plugin-public.httpresponse.response.md)
-
-## HttpResponse.response property
-
-Signature:
-
-```typescript
-response?: Response;
-```
diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md
new file mode 100644
index 0000000000000..fc6d34c0b74f2
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [body](./kibana-plugin-public.interceptedhttpresponse.body.md)
+
+## InterceptedHttpResponse.body property
+
+Signature:
+
+```typescript
+body?: HttpBody;
+```
diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md
new file mode 100644
index 0000000000000..c4a7f4d6b2afa
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md
@@ -0,0 +1,20 @@
+
+
+[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md)
+
+## InterceptedHttpResponse interface
+
+
+Signature:
+
+```typescript
+export interface InterceptedHttpResponse
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [body](./kibana-plugin-public.interceptedhttpresponse.body.md) | HttpBody
| |
+| [response](./kibana-plugin-public.interceptedhttpresponse.response.md) | Response
| |
+
diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md
new file mode 100644
index 0000000000000..dceb55113ee78
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [response](./kibana-plugin-public.interceptedhttpresponse.response.md)
+
+## InterceptedHttpResponse.response property
+
+Signature:
+
+```typescript
+response?: Response;
+```
diff --git a/docs/development/core/public/kibana-plugin-public.md b/docs/development/core/public/kibana-plugin-public.md
index eeea889e262b3..e787621c3aaf9 100644
--- a/docs/development/core/public/kibana-plugin-public.md
+++ b/docs/development/core/public/kibana-plugin-public.md
@@ -61,6 +61,7 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [IContextContainer](./kibana-plugin-public.icontextcontainer.md) | An object that handles registration of context providers and configuring handlers with context. |
| [IHttpFetchError](./kibana-plugin-public.ihttpfetcherror.md) | |
| [IHttpInterceptController](./kibana-plugin-public.ihttpinterceptcontroller.md) | Used to halt a request Promise chain in a [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md). |
+| [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) | |
| [LegacyCoreSetup](./kibana-plugin-public.legacycoresetup.md) | Setup interface exposed to the legacy platform via the ui/new_platform
module. |
| [LegacyCoreStart](./kibana-plugin-public.legacycorestart.md) | Start interface exposed to the legacy platform via the ui/new_platform
module. |
| [LegacyNavLink](./kibana-plugin-public.legacynavlink.md) | |
diff --git a/package.json b/package.json
index 674092c95161a..9bbe9a48a6c53 100644
--- a/package.json
+++ b/package.json
@@ -171,7 +171,7 @@
"hapi": "^17.5.3",
"hapi-auth-cookie": "^9.0.0",
"history": "^4.9.0",
- "hjson": "3.1.2",
+ "hjson": "3.2.0",
"hoek": "^5.0.4",
"http-proxy-agent": "^2.1.0",
"https-proxy-agent": "^2.2.2",
@@ -443,7 +443,7 @@
"strip-ansi": "^3.0.1",
"supertest": "^3.1.0",
"supertest-as-promised": "^4.0.2",
- "tree-kill": "^1.1.0",
+ "tree-kill": "^1.2.1",
"typescript": "3.5.3",
"typings-tester": "^0.3.2",
"vinyl-fs": "^3.0.3",
diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json
index 2e7e4c5500a3c..5c64be294f707 100644
--- a/packages/kbn-dev-utils/package.json
+++ b/packages/kbn-dev-utils/package.json
@@ -17,7 +17,7 @@
"getopts": "^2.2.5",
"moment": "^2.20.1",
"rxjs": "^6.2.1",
- "tree-kill": "^1.2.0",
+ "tree-kill": "^1.2.1",
"tslib": "^1.9.3"
},
"devDependencies": {
diff --git a/packages/kbn-dev-utils/src/proc_runner/proc.ts b/packages/kbn-dev-utils/src/proc_runner/proc.ts
index 3b7d595e4b8cf..f29fb5f4b17f6 100644
--- a/packages/kbn-dev-utils/src/proc_runner/proc.ts
+++ b/packages/kbn-dev-utils/src/proc_runner/proc.ts
@@ -26,7 +26,7 @@ import chalk from 'chalk';
import treeKill from 'tree-kill';
import { promisify } from 'util';
-const treeKillAsync = promisify(treeKill);
+const treeKillAsync = promisify((...args: [number, string, any]) => treeKill(...args));
import { ToolingLog } from '../tooling_log';
import { observeLines } from './observe_lines';
diff --git a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js
index 59e5f4d6faf8a..676992e4dddc8 100644
--- a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js
+++ b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js
@@ -55,6 +55,32 @@ describe('build query', function () {
expect(result.filter).to.eql(expectedESQueries);
});
+ it('should remove disabled filters', function () {
+ const filters = [
+ {
+ match_all: {},
+ meta: { type: 'match_all', negate: true, disabled: true },
+ },
+ ];
+
+ const expectedESQueries = [];
+
+ const result = buildQueryFromFilters(filters);
+
+ expect(result.must_not).to.eql(expectedESQueries);
+ });
+
+ it('should remove falsy filters', function () {
+ const filters = [null, undefined];
+
+ const expectedESQueries = [];
+
+ const result = buildQueryFromFilters(filters);
+
+ expect(result.must_not).to.eql(expectedESQueries);
+ expect(result.must).to.eql(expectedESQueries);
+ });
+
it('should place negated filters in the must_not clause', function () {
const filters = [
{
diff --git a/packages/kbn-es-query/src/es_query/from_filters.js b/packages/kbn-es-query/src/es_query/from_filters.js
index b8193b7469a20..10f9cf82fc972 100644
--- a/packages/kbn-es-query/src/es_query/from_filters.js
+++ b/packages/kbn-es-query/src/es_query/from_filters.js
@@ -60,6 +60,7 @@ const cleanFilter = function (filter) {
};
export function buildQueryFromFilters(filters = [], indexPattern, ignoreFilterIfFieldNotInIndex) {
+ filters = filters.filter(filter => filter && !_.get(filter, ['meta', 'disabled']));
return {
must: [],
filter: filters
diff --git a/packages/kbn-es/package.json b/packages/kbn-es/package.json
index 5521d57c22e86..5280c671450fa 100644
--- a/packages/kbn-es/package.json
+++ b/packages/kbn-es/package.json
@@ -17,7 +17,7 @@
"node-fetch": "^2.6.0",
"simple-git": "^1.91.0",
"tar-fs": "^1.16.3",
- "tree-kill": "^1.1.0",
+ "tree-kill": "^1.2.1",
"yauzl": "^2.10.0"
}
}
diff --git a/src/core/public/chrome/ui/header/header.tsx b/src/core/public/chrome/ui/header/header.tsx
index f24b0ed1681aa..4e73f49527856 100644
--- a/src/core/public/chrome/ui/header/header.tsx
+++ b/src/core/public/chrome/ui/header/header.tsx
@@ -406,12 +406,26 @@ class HeaderUI extends Component {
data-test-subj="navDrawer"
isLocked={isLocked}
onIsLockedUpdate={onIsLockedUpdate}
+ aria-label={i18n.translate('core.ui.primaryNav.screenReaderLabel', {
+ defaultMessage: 'Primary',
+ })}
>
-
-
+ <>
+
-
-
+
+ >
);
diff --git a/src/core/public/http/http_service.test.ts b/src/core/public/http/http_service.test.ts
index dddd2cc5ec36f..13906b91ed8df 100644
--- a/src/core/public/http/http_service.test.ts
+++ b/src/core/public/http/http_service.test.ts
@@ -24,6 +24,7 @@ import fetchMock from 'fetch-mock/es5/client';
import { readFileSync } from 'fs';
import { join } from 'path';
import { setup, SetupTap } from '../../../test_utils/public/http_test_setup';
+import { HttpResponse } from './types';
function delay(duration: number) {
return new Promise(r => setTimeout(r, duration));
@@ -394,12 +395,12 @@ describe('interception', () => {
const unusedSpy = jest.fn();
- http.intercept({ response: unusedSpy });
http.intercept({
responseError(response, controller) {
controller.halt();
},
});
+ http.intercept({ response: unusedSpy, responseError: unusedSpy });
http.post('/my/path').then(unusedSpy, unusedSpy);
await delay(1000);
@@ -416,21 +417,21 @@ describe('interception', () => {
request: unusedSpy,
requestError: usedSpy,
response: unusedSpy,
- responseError: usedSpy,
+ responseError: unusedSpy,
});
http.intercept({
request() {
throw new Error('Interception Error');
},
response: unusedSpy,
- responseError: usedSpy,
+ responseError: unusedSpy,
});
- http.intercept({ request: usedSpy, response: unusedSpy, responseError: usedSpy });
+ http.intercept({ request: usedSpy, response: unusedSpy, responseError: unusedSpy });
await expect(http.fetch('/my/path')).rejects.toThrow(/Interception Error/);
expect(fetchMock.called()).toBe(false);
expect(unusedSpy).toHaveBeenCalledTimes(0);
- expect(usedSpy).toHaveBeenCalledTimes(5);
+ expect(usedSpy).toHaveBeenCalledTimes(2);
});
it('should succeed if request throws but caught by interceptor', async () => {
@@ -458,26 +459,76 @@ describe('interception', () => {
expect(usedSpy).toHaveBeenCalledTimes(4);
});
- describe('request availability during interception', () => {
- it('should not be available to responseError when request throws', async () => {
- expect.assertions(3);
+ it('should accumulate request information', async () => {
+ const routes = ['alpha', 'beta', 'gamma'];
+ const createRequest = jest.fn(
+ (request: Request) => new Request(`/api/${routes.shift()}`, request)
+ );
- let spiedRequest: Request | undefined;
+ http.intercept({
+ request: createRequest,
+ });
+ http.intercept({
+ requestError(httpErrorRequest) {
+ return httpErrorRequest.request;
+ },
+ });
+ http.intercept({
+ request(request) {
+ throw new Error('Invalid');
+ },
+ });
+ http.intercept({
+ request: createRequest,
+ });
+ http.intercept({
+ request: createRequest,
+ });
- http.intercept({
- request() {
- throw new Error('Internal Server Error');
- },
- responseError({ request }) {
- spiedRequest = request;
- },
- });
+ await expect(http.fetch('/my/route')).resolves.toEqual({ foo: 'bar' });
+ expect(fetchMock.called()).toBe(true);
+ expect(routes.length).toBe(0);
+ expect(createRequest.mock.calls[0][0].url).toContain('/my/route');
+ expect(createRequest.mock.calls[1][0].url).toContain('/api/alpha');
+ expect(createRequest.mock.calls[2][0].url).toContain('/api/beta');
+ expect(fetchMock.lastCall()!.request.url).toContain('/api/gamma');
+ });
- await expect(http.fetch('/my/path')).rejects.toThrow();
- expect(fetchMock.called()).toBe(false);
- expect(spiedRequest).toBeUndefined();
+ it('should accumulate response information', async () => {
+ const bodies = ['alpha', 'beta', 'gamma'];
+ const createResponse = jest.fn((httpResponse: HttpResponse) => ({
+ body: bodies.shift(),
+ }));
+
+ http.intercept({
+ response: createResponse,
+ });
+ http.intercept({
+ response: createResponse,
});
+ http.intercept({
+ response(httpResponse) {
+ throw new Error('Invalid');
+ },
+ });
+ http.intercept({
+ responseError({ error, ...httpResponse }) {
+ return httpResponse;
+ },
+ });
+ http.intercept({
+ response: createResponse,
+ });
+
+ await expect(http.fetch('/my/route')).resolves.toEqual('gamma');
+ expect(fetchMock.called()).toBe(true);
+ expect(bodies.length).toBe(0);
+ expect(createResponse.mock.calls[0][0].body).toEqual({ foo: 'bar' });
+ expect(createResponse.mock.calls[1][0].body).toBe('alpha');
+ expect(createResponse.mock.calls[2][0].body).toBe('beta');
+ });
+ describe('request availability during interception', () => {
it('should be available to responseError when response throws', async () => {
let spiedRequest: Request | undefined;
@@ -514,22 +565,6 @@ describe('interception', () => {
await expect(http.fetch('/my/path')).rejects.toThrow();
expect(spiedResponse).toBeDefined();
});
-
- it('should not be available to responseError when request throws', async () => {
- let spiedResponse: Response | undefined;
-
- http.intercept({
- request() {
- throw new Error('Internal Server Error');
- },
- responseError({ response }) {
- spiedResponse = response;
- },
- });
-
- await expect(http.fetch('/my/path')).rejects.toThrow();
- expect(spiedResponse).toBeUndefined();
- });
});
it('should actually halt request interceptors in reverse order', async () => {
diff --git a/src/core/public/http/http_setup.ts b/src/core/public/http/http_setup.ts
index 5ca3b23c5a69c..a10358926de1f 100644
--- a/src/core/public/http/http_setup.ts
+++ b/src/core/public/http/http_setup.ts
@@ -110,15 +110,14 @@ export const setup = (
(promise, interceptor) =>
promise.then(
async (current: Request) => {
+ next = current;
checkHalt(controller);
if (!interceptor.request) {
return current;
}
- next = (await interceptor.request(current, controller)) || current;
-
- return next;
+ return (await interceptor.request(current, controller)) || current;
},
async error => {
checkHalt(controller, error);
@@ -155,17 +154,21 @@ export const setup = (
(promise, interceptor) =>
promise.then(
async httpResponse => {
+ current = httpResponse;
checkHalt(controller);
if (!interceptor.response) {
return httpResponse;
}
- current = (await interceptor.response(httpResponse, controller)) || httpResponse;
-
- return current;
+ return {
+ ...httpResponse,
+ ...((await interceptor.response(httpResponse, controller)) || {}),
+ };
},
async error => {
+ const request = error.request || (current && current.request);
+
checkHalt(controller, error);
if (!interceptor.responseError) {
@@ -176,7 +179,7 @@ export const setup = (
const next = await interceptor.responseError(
{
error,
- request: error.request || (current && current.request),
+ request,
response: error.response || (current && current.response),
body: error.body || (current && current.body),
},
@@ -189,17 +192,14 @@ export const setup = (
throw error;
}
- return next;
+ return { ...next, request };
} catch (err) {
checkHalt(controller, err);
throw err;
}
}
),
- responsePromise.then(httpResponse => {
- current = httpResponse;
- return httpResponse;
- })
+ responsePromise
);
return finalHttpResponse.body;
@@ -249,18 +249,23 @@ export const setup = (
// We wrap the interception in a separate promise to ensure that when
// a halt is called we do not resolve or reject, halting handling of the promise.
return new Promise(async (resolve, reject) => {
- try {
- const value = await interceptResponse(
- interceptRequest(initialRequest, controller).then(fetcher),
- controller
- );
-
- resolve(value);
- } catch (err) {
+ function rejectIfNotHalted(err: any) {
if (!(err instanceof HttpInterceptHaltError)) {
reject(err);
}
}
+
+ try {
+ const request = await interceptRequest(initialRequest, controller);
+
+ try {
+ resolve(await interceptResponse(fetcher(request), controller));
+ } catch (err) {
+ rejectIfNotHalted(err);
+ }
+ } catch (err) {
+ rejectIfNotHalted(err);
+ }
});
}
diff --git a/src/core/public/http/types.ts b/src/core/public/http/types.ts
index 793d03c6fde80..96500d566b3e5 100644
--- a/src/core/public/http/types.ts
+++ b/src/core/public/http/types.ts
@@ -226,12 +226,16 @@ export type HttpHandler = (path: string, options?: HttpFetchOptions) => Promise<
export type HttpBody = BodyInit | null | any;
/** @public */
-export interface HttpResponse {
- request?: Request;
+export interface InterceptedHttpResponse {
response?: Response;
body?: HttpBody;
}
+/** @public */
+export interface HttpResponse extends InterceptedHttpResponse {
+ request: Readonly;
+}
+
/** @public */
export interface IHttpFetchError extends Error {
readonly request: Request;
@@ -248,11 +252,8 @@ export interface IHttpFetchError extends Error {
}
/** @public */
-export interface HttpErrorResponse {
+export interface HttpErrorResponse extends HttpResponse {
error: Error | IHttpFetchError;
- request?: Request;
- response?: Response;
- body?: HttpBody;
}
/** @public */
export interface HttpErrorRequest {
@@ -295,7 +296,7 @@ export interface HttpInterceptor {
response?(
httpResponse: HttpResponse,
controller: IHttpInterceptController
- ): Promise | HttpResponse | void;
+ ): Promise | InterceptedHttpResponse | void;
/**
* Define an interceptor to be executed if a response interceptor throws an error or returns a rejected Promise.
@@ -305,7 +306,7 @@ export interface HttpInterceptor {
responseError?(
httpErrorResponse: HttpErrorResponse,
controller: IHttpInterceptController
- ): Promise | HttpResponse | void;
+ ): Promise | InterceptedHttpResponse | void;
}
/**
diff --git a/src/core/public/index.ts b/src/core/public/index.ts
index 054012fb82761..f1085b86395b8 100644
--- a/src/core/public/index.ts
+++ b/src/core/public/index.ts
@@ -113,6 +113,7 @@ export {
IBasePath,
IHttpInterceptController,
IHttpFetchError,
+ InterceptedHttpResponse,
} from './http';
export {
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 2eae59b709867..ec8a22fe5953c 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -426,15 +426,9 @@ export interface HttpErrorRequest {
}
// @public (undocumented)
-export interface HttpErrorResponse {
- // (undocumented)
- body?: HttpBody;
+export interface HttpErrorResponse extends HttpResponse {
// (undocumented)
error: Error | IHttpFetchError;
- // (undocumented)
- request?: Request;
- // (undocumented)
- response?: Response;
}
// @public
@@ -463,8 +457,8 @@ export interface HttpHeadersInit {
export interface HttpInterceptor {
request?(request: Request, controller: IHttpInterceptController): Promise | Request | void;
requestError?(httpErrorRequest: HttpErrorRequest, controller: IHttpInterceptController): Promise | Request | void;
- response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void;
- responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void;
+ response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void;
+ responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void;
}
// @public
@@ -486,13 +480,9 @@ export interface HttpRequestInit {
}
// @public (undocumented)
-export interface HttpResponse {
- // (undocumented)
- body?: HttpBody;
- // (undocumented)
- request?: Request;
+export interface HttpResponse extends InterceptedHttpResponse {
// (undocumented)
- response?: Response;
+ request: Readonly;
}
// @public (undocumented)
@@ -563,6 +553,14 @@ export interface IHttpInterceptController {
halted: boolean;
}
+// @public (undocumented)
+export interface InterceptedHttpResponse {
+ // (undocumented)
+ body?: HttpBody;
+ // (undocumented)
+ response?: Response;
+}
+
// @public
export type IToasts = Pick;
diff --git a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js
index 7d20d07ba05f6..cd4eac04df0e2 100644
--- a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js
+++ b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js
@@ -123,12 +123,12 @@ class ListControl extends Control {
this.useTimeFilter,
ancestorFilters
);
- this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued());
+ const abortSignal = this.abortController.signal;
this.lastQuery = query;
let resp;
try {
- resp = await searchSource.fetch();
+ resp = await searchSource.fetch({ abortSignal });
} catch(error) {
// If the fetch was aborted then no need to surface this error in the UI
if (error.name === 'AbortError') return;
diff --git a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js
index cb1c3111addf5..7febe228d614c 100644
--- a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js
+++ b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js
@@ -66,11 +66,11 @@ class RangeControl extends Control {
const aggs = minMaxAgg(indexPattern.fields.getByName(fieldName));
const searchSource = createSearchSource(this.kbnApi, null, indexPattern, aggs, this.useTimeFilter);
- this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued());
+ const abortSignal = this.abortController.signal;
let resp;
try {
- resp = await searchSource.fetch();
+ resp = await searchSource.fetch({ abortSignal });
} catch(error) {
// If the fetch was aborted then no need to surface this error in the UI
if (error.name === 'AbortError') return;
diff --git a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts
index 071861548a055..6fcfde0a5b06b 100644
--- a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts
+++ b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts
@@ -29,7 +29,6 @@ import chrome from 'ui/chrome';
import { TimeRange } from 'src/plugins/data/public';
import { SearchSource } from '../../../../ui/public/courier/search_source';
// @ts-ignore
-import { SearchSourceProvider } from '../../../../ui/public/courier/search_source';
import { FilterBarQueryFilterProvider } from '../../../../ui/public/filter_manager/query_filter';
import { buildTabularInspectorData } from '../../../../ui/public/inspector/build_tabular_inspector_data';
@@ -100,8 +99,8 @@ const handleCourierRequest = async ({
return aggs.toDsl(metricsAtAllLevels);
});
- requestSearchSource.onRequestStart((paramSearchSource: SearchSource, searchRequest: unknown) => {
- return aggs.onSearchRequestStart(paramSearchSource, searchRequest);
+ requestSearchSource.onRequestStart((paramSearchSource: SearchSource, options: any) => {
+ return aggs.onSearchRequestStart(paramSearchSource, options);
});
if (timeRange) {
@@ -118,7 +117,7 @@ const handleCourierRequest = async ({
const queryHash = calculateObjectHash(reqBody);
// We only need to reexecute the query, if forceFetch was true or the hash of the request body has changed
// since the last request
- const shouldQuery = forceFetch || searchSource.lastQuery !== queryHash;
+ const shouldQuery = forceFetch || (searchSource as any).lastQuery !== queryHash;
if (shouldQuery) {
inspectorAdapters.requests.reset();
@@ -139,18 +138,13 @@ const handleCourierRequest = async ({
request.stats(getRequestInspectorStats(requestSearchSource));
try {
- // Abort any in-progress requests before fetching again
- if (abortSignal) {
- abortSignal.addEventListener('abort', () => requestSearchSource.cancelQueued());
- }
-
- const response = await requestSearchSource.fetch();
+ const response = await requestSearchSource.fetch({ abortSignal });
- searchSource.lastQuery = queryHash;
+ (searchSource as any).lastQuery = queryHash;
request.stats(getResponseInspectorStats(searchSource, response)).ok({ json: response });
- searchSource.rawResponse = response;
+ (searchSource as any).rawResponse = response;
} catch (e) {
// Log any error during request to the inspector
request.error({ json: e });
@@ -166,7 +160,7 @@ const handleCourierRequest = async ({
// Note that rawResponse is not deeply cloned here, so downstream applications using courier
// must take care not to mutate it, or it could have unintended side effects, e.g. displaying
// response data incorrectly in the inspector.
- let resp = searchSource.rawResponse;
+ let resp = (searchSource as any).rawResponse;
for (const agg of aggs.aggs) {
if (has(agg, 'type.postFlightRequest')) {
resp = await agg.type.postFlightRequest(
@@ -180,7 +174,7 @@ const handleCourierRequest = async ({
}
}
- searchSource.finalResponse = resp;
+ (searchSource as any).finalResponse = resp;
const parsedTimeRange = timeRange ? getTime(aggs.indexPattern, timeRange) : null;
const tabifyParams = {
@@ -191,23 +185,24 @@ const handleCourierRequest = async ({
const tabifyCacheHash = calculateObjectHash({ tabifyAggs: aggs, ...tabifyParams });
// We only need to reexecute tabify, if either we did a new request or some input params to tabify changed
- const shouldCalculateNewTabify = shouldQuery || searchSource.lastTabifyHash !== tabifyCacheHash;
+ const shouldCalculateNewTabify =
+ shouldQuery || (searchSource as any).lastTabifyHash !== tabifyCacheHash;
if (shouldCalculateNewTabify) {
- searchSource.lastTabifyHash = tabifyCacheHash;
- searchSource.tabifiedResponse = tabifyAggResponse(
+ (searchSource as any).lastTabifyHash = tabifyCacheHash;
+ (searchSource as any).tabifiedResponse = tabifyAggResponse(
aggs,
- searchSource.finalResponse,
+ (searchSource as any).finalResponse,
tabifyParams
);
}
inspectorAdapters.data.setTabularLoader(
- () => buildTabularInspectorData(searchSource.tabifiedResponse, queryFilter),
+ () => buildTabularInspectorData((searchSource as any).tabifiedResponse, queryFilter),
{ returnsFormattedValues: true }
);
- return searchSource.tabifiedResponse;
+ return (searchSource as any).tabifiedResponse;
};
export const esaggs = (): ExpressionFunction => ({
@@ -249,7 +244,6 @@ export const esaggs = (): ExpressionFunction ({
const { visData, visConfig, params } = config;
const visType = config.visType || visConfig.type;
const $injector = await chrome.dangerouslyGetActiveInjector();
+ const $rootScope = $injector.get('$rootScope') as any;
const Private = $injector.get('Private') as any;
const Vis = Private(VisProvider);
if (handlers.vis) {
// special case in visualize, we need to render first (without executing the expression), for maps to work
if (visConfig) {
- handlers.vis.setCurrentState({ type: visType, params: visConfig });
+ $rootScope.$apply(() => {
+ handlers.vis.setCurrentState({ type: visType, params: visConfig });
+ });
}
} else {
handlers.vis = new Vis({
diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js
index b93cc8e936fd3..ecb22b20e4d86 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js
+++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js
@@ -19,6 +19,7 @@
import sinon from 'sinon';
import moment from 'moment';
+import { SearchSource } from 'ui/courier';
export function createIndexPatternsStub() {
return {
@@ -31,7 +32,10 @@ export function createIndexPatternsStub() {
};
}
-export function createSearchSourceStubProvider(hits, timeField) {
+/**
+ * A stubbed search source with a `fetch` method that returns all of `_stubHits`.
+ */
+export function createSearchSourceStub(hits, timeField) {
const searchSourceStub = {
_stubHits: hits,
_stubTimeField: timeField,
@@ -41,13 +45,37 @@ export function createSearchSourceStubProvider(hits, timeField) {
}),
};
- searchSourceStub.setParent = sinon.stub().returns(searchSourceStub);
- searchSourceStub.setField = sinon.stub().returns(searchSourceStub);
- searchSourceStub.getField = sinon.spy(key => {
+ searchSourceStub.setParent = sinon.stub(SearchSource.prototype, 'setParent').returns(searchSourceStub);
+ searchSourceStub.setField = sinon.stub(SearchSource.prototype, 'setField').returns(searchSourceStub);
+ searchSourceStub.getField = sinon.stub(SearchSource.prototype, 'getField').callsFake(key => {
const previousSetCall = searchSourceStub.setField.withArgs(key).lastCall;
return previousSetCall ? previousSetCall.args[1] : null;
});
- searchSourceStub.fetch = sinon.spy(() => {
+ searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => Promise.resolve({
+ hits: {
+ hits: searchSourceStub._stubHits,
+ total: searchSourceStub._stubHits.length,
+ },
+ }));
+
+ searchSourceStub._restore = () => {
+ searchSourceStub.setParent.restore();
+ searchSourceStub.setField.restore();
+ searchSourceStub.getField.restore();
+ searchSourceStub.fetch.restore();
+ };
+
+ return searchSourceStub;
+}
+
+/**
+ * A stubbed search source with a `fetch` method that returns a filtered set of `_stubHits`.
+ */
+export function createContextSearchSourceStub(hits, timeField = '@timestamp') {
+ const searchSourceStub = createSearchSourceStub(hits, timeField);
+
+ searchSourceStub.fetch.restore();
+ searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => {
const timeField = searchSourceStub._stubTimeField;
const lastQuery = searchSourceStub.setField.withArgs('query').lastCall.args[1];
const timeRange = lastQuery.query.constant_score.filter.range[timeField];
@@ -71,7 +99,5 @@ export function createSearchSourceStubProvider(hits, timeField) {
});
});
- return function SearchSourceStubProvider() {
- return searchSourceStub;
- };
+ return searchSourceStub;
}
diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js
index 582de1c8fa74c..46e66177b516a 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js
+++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js
@@ -19,55 +19,34 @@
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
-import sinon from 'sinon';
-import { createIndexPatternsStub } from './_stubs';
-import { SearchSourceProvider } from 'ui/courier';
+import { createIndexPatternsStub, createSearchSourceStub } from './_stubs';
import { fetchAnchorProvider } from '../anchor';
-function createSearchSourceStubProvider(hits) {
- const searchSourceStub = {
- _stubHits: hits,
- };
-
- searchSourceStub.setParent = sinon.stub().returns(searchSourceStub);
- searchSourceStub.setField = sinon.stub().returns(searchSourceStub);
- searchSourceStub.fetch = sinon.spy(() => Promise.resolve({
- hits: {
- hits: searchSourceStub._stubHits,
- total: searchSourceStub._stubHits.length,
- },
- }));
-
- return function SearchSourceStubProvider() {
- return searchSourceStub;
- };
-}
-
describe('context app', function () {
beforeEach(ngMock.module('kibana'));
describe('function fetchAnchor', function () {
let fetchAnchor;
- let SearchSourceStub;
+ let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
- SearchSourceStub = createSearchSourceStubProvider([
+ searchSourceStub = createSearchSourceStub([
{ _id: 'hit1' },
]);
- Private.stub(SearchSourceProvider, SearchSourceStub);
-
fetchAnchor = Private(fetchAnchorProvider);
}));
- it('should use the `fetch` method of the SearchSource', function () {
- const searchSourceStub = new SearchSourceStub();
+ afterEach(() => {
+ searchSourceStub._restore();
+ });
+ it('should use the `fetch` method of the SearchSource', function () {
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
expect(searchSourceStub.fetch.calledOnce).to.be(true);
@@ -75,8 +54,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setParentSpy = searchSourceStub.setParent;
@@ -86,8 +63,6 @@ describe('context app', function () {
});
it('should set the SearchSource index pattern', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setFieldSpy = searchSourceStub.setField;
@@ -96,8 +71,6 @@ describe('context app', function () {
});
it('should set the SearchSource version flag to true', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setVersionSpy = searchSourceStub.setField.withArgs('version');
@@ -107,8 +80,6 @@ describe('context app', function () {
});
it('should set the SearchSource size to 1', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setSizeSpy = searchSourceStub.setField.withArgs('size');
@@ -118,8 +89,6 @@ describe('context app', function () {
});
it('should set the SearchSource query to an ids query', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setQuerySpy = searchSourceStub.setField.withArgs('query');
@@ -140,8 +109,6 @@ describe('context app', function () {
});
it('should set the SearchSource sort order', function () {
- const searchSourceStub = new SearchSourceStub();
-
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setSortSpy = searchSourceStub.setField.withArgs('sort');
@@ -154,7 +121,6 @@ describe('context app', function () {
});
it('should reject with an error when no hits were found', function () {
- const searchSourceStub = new SearchSourceStub();
searchSourceStub._stubHits = [];
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
@@ -169,7 +135,6 @@ describe('context app', function () {
});
it('should return the first hit after adding an anchor marker', function () {
- const searchSourceStub = new SearchSourceStub();
searchSourceStub._stubHits = [
{ property1: 'value1' },
{ property2: 'value2' },
diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js
index 88efc8efc5d30..2bf3da42e24e5 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js
+++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js
@@ -22,8 +22,7 @@ import ngMock from 'ng_mock';
import moment from 'moment';
import * as _ from 'lodash';
-import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs';
-import { SearchSourceProvider } from 'ui/courier';
+import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
import { fetchContextProvider } from '../context';
@@ -38,16 +37,14 @@ describe('context app', function () {
describe('function fetchPredecessors', function () {
let fetchPredecessors;
- let getSearchSourceStub;
+ let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
- getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp', MS_PER_DAY * 8);
- Private.stub(SearchSourceProvider, getSearchSourceStub);
-
+ searchSourceStub = createContextSearchSourceStub([], '@timestamp', MS_PER_DAY * 8);
fetchPredecessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
const anchor = {
_source: {
@@ -69,8 +66,11 @@ describe('context app', function () {
};
}));
+ afterEach(() => {
+ searchSourceStub._restore();
+ });
+
it('should perform exactly one query when enough hits are returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 2),
searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 1),
@@ -97,7 +97,6 @@ describe('context app', function () {
});
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3010),
searchSourceStub._createStubHit(MS_PER_DAY * 3002),
@@ -134,7 +133,6 @@ describe('context app', function () {
});
it('should perform multiple queries until the expected hit count is returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 1700),
searchSourceStub._createStubHit(MS_PER_DAY * 1200),
@@ -185,8 +183,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
- const searchSourceStub = getSearchSourceStub();
-
return fetchPredecessors(
'INDEX_PATTERN_ID',
'@timestamp',
@@ -206,8 +202,6 @@ describe('context app', function () {
});
it('should set the tiebreaker sort order to the opposite as the time field', function () {
- const searchSourceStub = getSearchSourceStub();
-
return fetchPredecessors(
'INDEX_PATTERN_ID',
'@timestamp',
diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js
index 57f7673d31183..b8bec40f2859c 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js
+++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js
@@ -22,8 +22,7 @@ import ngMock from 'ng_mock';
import moment from 'moment';
import * as _ from 'lodash';
-import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs';
-import { SearchSourceProvider } from 'ui/courier';
+import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
import { fetchContextProvider } from '../context';
@@ -37,15 +36,14 @@ describe('context app', function () {
describe('function fetchSuccessors', function () {
let fetchSuccessors;
- let getSearchSourceStub;
+ let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
- getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp');
- Private.stub(SearchSourceProvider, getSearchSourceStub);
+ searchSourceStub = createContextSearchSourceStub([], '@timestamp');
fetchSuccessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
const anchor = {
@@ -68,8 +66,11 @@ describe('context app', function () {
};
}));
+ afterEach(() => {
+ searchSourceStub._restore();
+ });
+
it('should perform exactly one query when enough hits are returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 5000),
searchSourceStub._createStubHit(MS_PER_DAY * 4000),
@@ -96,7 +97,6 @@ describe('context app', function () {
});
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3010),
searchSourceStub._createStubHit(MS_PER_DAY * 3002),
@@ -133,7 +133,6 @@ describe('context app', function () {
});
it('should perform multiple queries until the expected hit count is returned', function () {
- const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3000),
searchSourceStub._createStubHit(MS_PER_DAY * 3000 - 1),
@@ -187,8 +186,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
- const searchSourceStub = getSearchSourceStub();
-
return fetchSuccessors(
'INDEX_PATTERN_ID',
'@timestamp',
@@ -208,8 +205,6 @@ describe('context app', function () {
});
it('should set the tiebreaker sort order to the same as the time field', function () {
- const searchSourceStub = getSearchSourceStub();
-
return fetchSuccessors(
'INDEX_PATTERN_ID',
'@timestamp',
diff --git a/src/legacy/core_plugins/kibana/public/context/api/anchor.js b/src/legacy/core_plugins/kibana/public/context/api/anchor.js
index bab75e14e8ed3..02a309eaa0165 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/anchor.js
+++ b/src/legacy/core_plugins/kibana/public/context/api/anchor.js
@@ -21,11 +21,9 @@ import _ from 'lodash';
import { i18n } from '@kbn/i18n';
-import { SearchSourceProvider } from 'ui/courier';
-
-export function fetchAnchorProvider(indexPatterns, Private) {
- const SearchSource = Private(SearchSourceProvider);
+import { SearchSource } from 'ui/courier';
+export function fetchAnchorProvider(indexPatterns) {
return async function fetchAnchor(
indexPatternId,
anchorId,
diff --git a/src/legacy/core_plugins/kibana/public/context/api/context.ts b/src/legacy/core_plugins/kibana/public/context/api/context.ts
index baecf8a673521..48ac59f1f0855 100644
--- a/src/legacy/core_plugins/kibana/public/context/api/context.ts
+++ b/src/legacy/core_plugins/kibana/public/context/api/context.ts
@@ -18,8 +18,7 @@
*/
// @ts-ignore
-import { SearchSourceProvider, SearchSource } from 'ui/courier';
-import { IPrivate } from 'ui/private';
+import { SearchSource } from 'ui/courier';
import { Filter } from '@kbn/es-query';
import { IndexPatterns, IndexPattern } from 'ui/index_patterns';
import { reverseSortDir, SortDirection } from './utils/sorting';
@@ -42,9 +41,7 @@ const DAY_MILLIS = 24 * 60 * 60 * 1000;
// look from 1 day up to 10000 days into the past and future
const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000].map(days => days * DAY_MILLIS);
-function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) {
- const SearchSourcePrivate: any = Private(SearchSourceProvider);
-
+function fetchContextProvider(indexPatterns: IndexPatterns) {
return {
fetchSurroundingDocs,
};
@@ -116,7 +113,7 @@ function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) {
}
async function createSearchSource(indexPattern: IndexPattern, filters: Filter[]) {
- return new SearchSourcePrivate()
+ return new SearchSource()
.setParent(false)
.setField('index', indexPattern)
.setField('filter', filters);
diff --git a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts
index f9f5cfe0214b2..01468eadffb84 100644
--- a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts
+++ b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts
@@ -17,6 +17,7 @@
* under the License.
*/
+import { searchSourceMock } from '../../../../../ui/public/courier/search_source/mocks';
import { SavedObjectDashboard } from '../saved_dashboard/saved_dashboard';
export function getSavedDashboardMock(
@@ -26,10 +27,7 @@ export function getSavedDashboardMock(
id: '123',
title: 'my dashboard',
panelsJSON: '[]',
- searchSource: {
- getOwnField: (param: any) => param,
- setField: () => {},
- },
+ searchSource: searchSourceMock,
copyOnSave: false,
timeRestore: false,
timeTo: 'now',
diff --git a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html
index e9424534cd9d2..6c076092c76d5 100644
--- a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html
+++ b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html
@@ -1,9 +1,9 @@
-
+
+
diff --git a/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js b/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js
index bd07b53e44801..735189dbd4c86 100644
--- a/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js
+++ b/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js
@@ -229,7 +229,10 @@ function discoverController(
// the saved savedSearch
const savedSearch = $route.current.locals.savedSearch;
+
+ let abortController;
$scope.$on('$destroy', () => {
+ if (abortController) abortController.abort();
savedSearch.destroy();
subscriptions.unsubscribe();
});
@@ -752,7 +755,8 @@ function discoverController(
$scope.updateTime();
// Abort any in-progress requests before fetching again
- $scope.searchSource.cancelQueued();
+ if (abortController) abortController.abort();
+ abortController = new AbortController();
$scope.updateDataSource()
.then(setupVisualization)
@@ -760,7 +764,9 @@ function discoverController(
$state.save();
$scope.fetchStatus = fetchStatuses.LOADING;
logInspectorRequest();
- return $scope.searchSource.fetch();
+ return $scope.searchSource.fetch({
+ abortSignal: abortController.signal
+ });
})
.then(onResults)
.catch((error) => {
@@ -1039,8 +1045,8 @@ function discoverController(
);
visSavedObject.vis = $scope.vis;
- $scope.searchSource.onRequestStart((searchSource, searchRequest) => {
- return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, searchRequest);
+ $scope.searchSource.onRequestStart((searchSource, options) => {
+ return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, options);
});
$scope.searchSource.setField('aggs', function () {
diff --git a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts
index d5bf868f3bf72..eaec11ff893ed 100644
--- a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts
+++ b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts
@@ -102,12 +102,13 @@ export class SearchEmbeddable extends Embeddable
private inspectorAdaptors: Adapters;
private searchScope?: SearchScope;
private panelTitle: string = '';
- private filtersSearchSource: SearchSource;
+ private filtersSearchSource?: SearchSource;
private searchInstance?: JQLite;
private autoRefreshFetchSubscription?: Subscription;
private subscription?: Subscription;
public readonly type = SEARCH_EMBEDDABLE_TYPE;
private filterGen: FilterManager;
+ private abortController?: AbortController;
private prevTimeRange?: TimeRange;
private prevFilters?: Filter[];
@@ -193,7 +194,7 @@ export class SearchEmbeddable extends Embeddable
if (this.autoRefreshFetchSubscription) {
this.autoRefreshFetchSubscription.unsubscribe();
}
- this.savedSearch.searchSource.cancelQueued();
+ if (this.abortController) this.abortController.abort();
}
private initializeSearchScope() {
@@ -273,7 +274,8 @@ export class SearchEmbeddable extends Embeddable
const { searchSource } = this.savedSearch;
// Abort any in-progress requests
- searchSource.cancelQueued();
+ if (this.abortController) this.abortController.abort();
+ this.abortController = new AbortController();
searchSource.setField('size', config.get('discover:sampleSize'));
searchSource.setField(
@@ -299,7 +301,9 @@ export class SearchEmbeddable extends Embeddable
try {
// Make the request
- const resp = await searchSource.fetch();
+ const resp = await searchSource.fetch({
+ abortSignal: this.abortController.signal,
+ });
this.searchScope.isLoading = false;
@@ -337,8 +341,8 @@ export class SearchEmbeddable extends Embeddable
searchScope.sharedItemTitle = this.panelTitle;
if (isFetchRequired) {
- this.filtersSearchSource.setField('filter', this.input.filters);
- this.filtersSearchSource.setField('query', this.input.query);
+ this.filtersSearchSource!.setField('filter', this.input.filters);
+ this.filtersSearchSource!.setField('query', this.input.query);
this.fetch();
diff --git a/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts b/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts
index 202fca6ee7b52..fa6145c45f55f 100644
--- a/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts
+++ b/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts
@@ -22,15 +22,26 @@ import { uiModules } from 'ui/modules';
import { DocViewer } from './doc_viewer';
uiModules.get('apps/discover').directive('docViewer', (reactDirective: any) => {
- return reactDirective(DocViewer, undefined, {
- restrict: 'E',
- scope: {
- hit: '=',
- indexPattern: '=',
- filter: '=?',
- columns: '=?',
- onAddColumn: '=?',
- onRemoveColumn: '=?',
- },
- });
+ return reactDirective(
+ DocViewer,
+ [
+ 'hit',
+ ['indexPattern', { watchDepth: 'reference' }],
+ ['filter', { watchDepth: 'reference' }],
+ ['columns', { watchDepth: 'collection' }],
+ ['onAddColumn', { watchDepth: 'reference' }],
+ ['onRemoveColumn', { watchDepth: 'reference' }],
+ ],
+ {
+ restrict: 'E',
+ scope: {
+ hit: '=',
+ indexPattern: '=',
+ filter: '=?',
+ columns: '=?',
+ onAddColumn: '=?',
+ onRemoveColumn: '=?',
+ },
+ }
+ );
});
diff --git a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js
index f362347118dd8..e08f3a064da52 100644
--- a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js
+++ b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js
@@ -27,7 +27,7 @@ import { fatalError, toastNotifications } from 'ui/notify';
import uiRoutes from 'ui/routes';
import { uiModules } from 'ui/modules';
import template from './edit_index_pattern.html';
-import { FieldWildcardProvider } from 'ui/field_wildcard';
+import { fieldWildcardMatcher } from 'ui/field_wildcard';
import { IndexPatternListFactory } from 'ui/management/index_pattern_list';
import React from 'react';
import { render, unmountComponentAtNode } from 'react-dom';
@@ -173,10 +173,9 @@ uiModules.get('apps/management')
.controller('managementIndexPatternsEdit', function (
$scope, $location, $route, Promise, config, indexPatterns, Private, AppState, confirmModal) {
const $state = $scope.state = new AppState();
- const { fieldWildcardMatcher } = Private(FieldWildcardProvider);
const indexPatternListProvider = Private(IndexPatternListFactory)();
- $scope.fieldWildcardMatcher = fieldWildcardMatcher;
+ $scope.fieldWildcardMatcher = (...args) => fieldWildcardMatcher(...args, config.get('metaFields'));
$scope.editSectionsProvider = Private(IndicesEditSectionsProvider);
$scope.kbnUrl = Private(KbnUrlProvider);
$scope.indexPattern = $route.current.locals.indexPattern;
diff --git a/src/legacy/ui/public/_index.scss b/src/legacy/ui/public/_index.scss
index 2ce9a0a8aa06f..98675402b43cc 100644
--- a/src/legacy/ui/public/_index.scss
+++ b/src/legacy/ui/public/_index.scss
@@ -13,7 +13,6 @@
@import './courier/index';
@import './collapsible_sidebar/index';
@import './directives/index';
-@import './error_allow_explicit_index/index';
@import './error_auto_create_index/index';
@import './error_url_overflow/index';
@import './exit_full_screen/index';
diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js
index 3c8fde7eb7135..e4ca6075c624b 100644
--- a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js
+++ b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js
@@ -203,7 +203,6 @@ describe('parent pipeline aggs', function () {
});
const searchSource = {};
- const request = {};
const customMetricSpy = sinon.spy();
const customMetric = aggConfig.params.customMetric;
@@ -211,9 +210,9 @@ describe('parent pipeline aggs', function () {
customMetric.type.params[0].modifyAggConfigOnSearchRequestStart = customMetricSpy;
aggConfig.type.params.forEach(param => {
- param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request);
+ param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource);
});
- expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true);
+ expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true);
});
});
});
diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js
index fef69155d2351..aba5db9cedadf 100644
--- a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js
+++ b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js
@@ -145,7 +145,6 @@ describe('sibling pipeline aggs', function () {
init();
const searchSource = {};
- const request = {};
const customMetricSpy = sinon.spy();
const customBucketSpy = sinon.spy();
const { customMetric, customBucket } = aggConfig.params;
@@ -155,10 +154,10 @@ describe('sibling pipeline aggs', function () {
customBucket.type.params[0].modifyAggConfigOnSearchRequestStart = customBucketSpy;
aggConfig.type.params.forEach(param => {
- param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request);
+ param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource);
});
- expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true);
- expect(customBucketSpy.calledWith(customBucket, searchSource, request)).to.be(true);
+ expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true);
+ expect(customBucketSpy.calledWith(customBucket, searchSource)).to.be(true);
});
});
diff --git a/src/legacy/ui/public/agg_types/agg_config.ts b/src/legacy/ui/public/agg_types/agg_config.ts
index 9898682b5d558..a5b1aa7cf9c0b 100644
--- a/src/legacy/ui/public/agg_types/agg_config.ts
+++ b/src/legacy/ui/public/agg_types/agg_config.ts
@@ -238,14 +238,14 @@ export class AggConfig {
* @param {Courier.SearchRequest} searchRequest
* @return {Promise}
*/
- onSearchRequestStart(searchSource: any, searchRequest: any) {
+ onSearchRequestStart(searchSource: any, options: any) {
if (!this.type) {
return Promise.resolve();
}
return Promise.all(
this.type.params.map((param: any) =>
- param.modifyAggConfigOnSearchRequestStart(this, searchSource, searchRequest)
+ param.modifyAggConfigOnSearchRequestStart(this, searchSource, options)
)
);
}
diff --git a/src/legacy/ui/public/agg_types/agg_configs.ts b/src/legacy/ui/public/agg_types/agg_configs.ts
index e90d91eb7fd7f..675d37d05c33c 100644
--- a/src/legacy/ui/public/agg_types/agg_configs.ts
+++ b/src/legacy/ui/public/agg_types/agg_configs.ts
@@ -307,12 +307,10 @@ export class AggConfigs {
return _.find(reqAgg.getResponseAggs(), { id });
}
- onSearchRequestStart(searchSource: any, searchRequest: any) {
+ onSearchRequestStart(searchSource: any, options: any) {
return Promise.all(
// @ts-ignore
- this.getRequestAggs().map((agg: AggConfig) =>
- agg.onSearchRequestStart(searchSource, searchRequest)
- )
+ this.getRequestAggs().map((agg: AggConfig) => agg.onSearchRequestStart(searchSource, options))
);
}
}
diff --git a/src/legacy/ui/public/agg_types/buckets/histogram.ts b/src/legacy/ui/public/agg_types/buckets/histogram.ts
index 516f17be0643e..23edefc67d506 100644
--- a/src/legacy/ui/public/agg_types/buckets/histogram.ts
+++ b/src/legacy/ui/public/agg_types/buckets/histogram.ts
@@ -92,7 +92,7 @@ export const histogramBucketAgg = new BucketAggType({
modifyAggConfigOnSearchRequestStart(
aggConfig: IBucketHistogramAggConfig,
searchSource: any,
- searchRequest: any
+ options: any
) {
const field = aggConfig.getField();
const aggBody = field.scripted
@@ -111,10 +111,8 @@ export const histogramBucketAgg = new BucketAggType({
},
});
- searchRequest.whenAborted(() => childSearchSource.cancelQueued());
-
return childSearchSource
- .fetch()
+ .fetch(options)
.then((resp: any) => {
aggConfig.setAutoBounds({
min: _.get(resp, 'aggregations.minAgg.value'),
diff --git a/src/legacy/ui/public/agg_types/buckets/terms.ts b/src/legacy/ui/public/agg_types/buckets/terms.ts
index ad470c8f64b84..bc6dd4860561e 100644
--- a/src/legacy/ui/public/agg_types/buckets/terms.ts
+++ b/src/legacy/ui/public/agg_types/buckets/terms.ts
@@ -111,9 +111,6 @@ export const termsBucketAgg = new BucketAggType({
if (aggConfig.params.otherBucket) {
const filterAgg = buildOtherBucketAgg(aggConfigs, aggConfig, resp);
if (!filterAgg) return resp;
- if (abortSignal) {
- abortSignal.addEventListener('abort', () => nestedSearchSource.cancelQueued());
- }
nestedSearchSource.setField('aggs', filterAgg);
@@ -134,7 +131,7 @@ export const termsBucketAgg = new BucketAggType({
});
request.stats(getRequestInspectorStats(nestedSearchSource));
- const response = await nestedSearchSource.fetch();
+ const response = await nestedSearchSource.fetch({ abortSignal });
request.stats(getResponseInspectorStats(nestedSearchSource, response)).ok({ json: response });
resp = mergeOtherBucketAggResponse(aggConfigs, resp, response, aggConfig, filterAgg());
}
diff --git a/src/legacy/ui/public/agg_types/param_types/base.ts b/src/legacy/ui/public/agg_types/param_types/base.ts
index 88fc24eeb53f5..bc8ed5d485bd4 100644
--- a/src/legacy/ui/public/agg_types/param_types/base.ts
+++ b/src/legacy/ui/public/agg_types/param_types/base.ts
@@ -46,18 +46,17 @@ export class BaseParamType implements AggParam {
/**
* A function that will be called before an aggConfig is serialized and sent to ES.
- * Allows aggConfig to retrieve values needed for serialization by creating a {SearchRequest}
+ * Allows aggConfig to retrieve values needed for serialization
* Example usage: an aggregation needs to know the min/max of a field to determine an appropriate interval
*
- * @param {AggConfig} aggconfig
+ * @param {AggConfig} aggConfig
* @param {Courier.SearchSource} searchSource
- * @param {Courier.SearchRequest} searchRequest
* @returns {Promise|undefined}
*/
modifyAggConfigOnSearchRequestStart: (
- aggconfig: AggConfig,
+ aggConfig: AggConfig,
searchSource?: SearchSource,
- searchRequest?: any
+ options?: any
) => void;
constructor(config: Record) {
diff --git a/src/legacy/ui/public/chrome/directives/kbn_chrome.html b/src/legacy/ui/public/chrome/directives/kbn_chrome.html
index ced89287d310f..541082e68de58 100644
--- a/src/legacy/ui/public/chrome/directives/kbn_chrome.html
+++ b/src/legacy/ui/public/chrome/directives/kbn_chrome.html
@@ -1,9 +1,9 @@
diff --git a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js b/src/legacy/ui/public/courier/fetch/__tests__/call_client.js
deleted file mode 100644
index 1a473446df872..0000000000000
--- a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js
+++ /dev/null
@@ -1,349 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-import ngMock from 'ng_mock';
-import NoDigestPromises from 'test_utils/no_digest_promises';
-import { delay } from 'bluebird';
-
-import { CallClientProvider } from '../call_client';
-import { RequestStatus } from '../req_status';
-import { SearchRequestProvider } from '../request';
-import { addSearchStrategy } from '../../search_strategy';
-
-describe('callClient', () => {
- NoDigestPromises.activateForSuite();
-
- const ABORTED = RequestStatus.ABORTED;
-
- let SearchRequest;
- let callClient;
- let fakeSearch;
- let searchRequests;
- let esRequestDelay;
- let esShouldError;
- let esPromiseAbortSpy;
-
- const createSearchRequest = (id, overrides = {}, errorHandler = () => {}) => {
- const { source: overrideSource, ...rest } = overrides;
-
- const source = {
- _flatten: () => Promise.resolve({
- index: id
- }),
- requestIsStopped: () => {},
- getField: () => 'indexPattern',
- getPreferredSearchStrategyId: () => undefined,
- ...overrideSource
- };
-
- const searchRequest = new SearchRequest({ source, errorHandler, ...rest });
- searchRequest.__testId__ = id;
- return searchRequest;
- };
-
- beforeEach(ngMock.module('kibana'));
-
- beforeEach(ngMock.module(function stubEs($provide) {
- esRequestDelay = 0;
- esShouldError = false;
-
- $provide.service('es', (Promise) => {
- fakeSearch = sinon.spy(({ index }) => {
- const esPromise = new Promise((resolve, reject) => {
- if (esShouldError) {
- return reject('fake es error');
- }
-
- setTimeout(() => {
- resolve(index);
- }, esRequestDelay);
- });
-
- esPromise.abort = esPromiseAbortSpy = sinon.spy();
- return esPromise;
- });
-
- return {
- search: fakeSearch
- };
- });
- }));
-
- beforeEach(ngMock.inject(Private => {
- callClient = Private(CallClientProvider);
- SearchRequest = Private(SearchRequestProvider);
- }));
-
- describe('basic contract', () => {
- it('returns a promise', () => {
- searchRequests = [ createSearchRequest() ];
- const callingClient = callClient(searchRequests);
- expect(callingClient.then).to.be.a('function');
- });
-
- it(`resolves the promise with the 'responses' property of the es.search() result`, () => {
- searchRequests = [ createSearchRequest(1) ];
-
- return callClient(searchRequests).then(results => {
- expect(results).to.eql([1]);
- });
- });
-
- describe('for failing requests', () => {
- beforeEach(() => {
- addSearchStrategy({
- id: 'fail',
- isViable: indexPattern => {
- return indexPattern.type === 'fail';
- },
- search: () => {
- return {
- searching: Promise.reject(new Error('Search failed')),
- failedSearchRequests: [],
- abort: () => {},
- };
- },
- });
- });
-
- it(`still bubbles up the failure`, () => {
- const searchRequestFail1 = createSearchRequest('fail1', {
- source: {
- getField: () => ({ type: 'fail' }),
- },
- });
-
- const searchRequestFail2 = createSearchRequest('fail2', {
- source: {
- getField: () => ({ type: 'fail' }),
- },
- });
-
- searchRequests = [ searchRequestFail1, searchRequestFail2 ];
-
- return callClient(searchRequests).then(results => {
- expect(results).to.eql([
- { error: new Error('Search failed') },
- { error: new Error('Search failed') },
- ]);
- });
- });
- });
- });
-
- describe('implementation', () => {
- it('calls searchRequest.whenAborted() as part of setup', async () => {
- const whenAbortedSpy = sinon.spy();
- const searchRequest = createSearchRequest();
- searchRequest.whenAborted = whenAbortedSpy;
- searchRequests = [ searchRequest ];
-
- return callClient(searchRequests).then(() => {
- expect(whenAbortedSpy.callCount).to.be(1);
- });
- });
- });
-
- describe('aborting at different points in the request lifecycle:', () => {
- it('while the search body is being formed rejects with an AbortError', () => {
- const searchRequest = createSearchRequest(1, {
- source: {
- _flatten: () => {
- return new Promise(resolve => {
- setTimeout(() => {
- resolve({});
- }, 100);
- });
- },
- requestIsStopped: () => {},
- },
- });
-
- searchRequests = [ searchRequest ];
- const callingClient = callClient(searchRequests);
-
- // Abort the request while the search body is being formed.
- setTimeout(() => {
- searchRequest.abort();
- }, 20);
-
- return callingClient.catch(error => {
- expect(error.name).to.be('AbortError');
- });
- });
-
- it('while the search is in flight rejects with an AbortError', () => {
- esRequestDelay = 100;
-
- const searchRequest = createSearchRequest();
- searchRequests = [ searchRequest ];
- const callingClient = callClient(searchRequests);
-
- // Abort the request while the search is in flight..
- setTimeout(() => {
- searchRequest.abort();
- }, 80);
-
- return callingClient.catch(error => {
- expect(error.name).to.be('AbortError');
- });
- });
- });
-
- describe('aborting number of requests:', () => {
- it(`aborting all searchRequests rejects with an AbortError`, () => {
- const searchRequest1 = createSearchRequest();
- const searchRequest2 = createSearchRequest();
- searchRequests = [ searchRequest1, searchRequest2 ];
- const callingClient = callClient(searchRequests);
-
- searchRequest1.abort();
- searchRequest2.abort();
-
- return callingClient.catch(error => {
- expect(error.name).to.be('AbortError');
- });
- });
-
- it(`aborting all searchRequests calls abort() on the promise returned by searchStrategy.search()`, () => {
- esRequestDelay = 100;
-
- const searchRequest1 = createSearchRequest();
- const searchRequest2 = createSearchRequest();
- searchRequests = [ searchRequest1, searchRequest2 ];
-
- const callingClient = callClient(searchRequests);
-
- return Promise.all([
- delay(70).then(() => {
- // At this point we expect the request to be in flight.
- expect(esPromiseAbortSpy.callCount).to.be(0);
- searchRequest1.abort();
- searchRequest2.abort();
- }),
- callingClient.catch(() => {
- expect(esPromiseAbortSpy.callCount).to.be(1);
- }),
- ]);
- });
-
- it('aborting some searchRequests rejects with an AbortError', () => {
- const searchRequest1 = createSearchRequest(1);
- const searchRequest2 = createSearchRequest(2);
- searchRequests = [ searchRequest1, searchRequest2 ];
- const callingClient = callClient(searchRequests);
- searchRequest2.abort();
-
- return callingClient.catch(error => {
- expect(error.name).to.be('AbortError');
- });
- });
- });
-
- describe('searchRequests with multiple searchStrategies map correctly to their responses', () => {
- const search = ({ searchRequests }) => {
- return {
- searching: Promise.resolve(searchRequests.map(searchRequest => searchRequest.__testId__)),
- failedSearchRequests: [],
- abort: () => {},
- };
- };
-
- const searchStrategyA = {
- id: 'a',
- isViable: indexPattern => {
- return indexPattern.type === 'a';
- },
- search,
- };
-
- const searchStrategyB = {
- id: 'b',
- isViable: indexPattern => {
- return indexPattern.type === 'b';
- },
- search,
- };
-
- let searchRequestA;
- let searchRequestB;
- let searchRequestA2;
-
- beforeEach(() => {
- addSearchStrategy(searchStrategyA);
- addSearchStrategy(searchStrategyB);
-
- searchRequestA = createSearchRequest('a', {
- source: {
- getField: () => ({ type: 'a' }),
- getSearchStrategyForSearchRequest: () => {},
- getPreferredSearchStrategyId: () => {},
- },
- });
-
- searchRequestB = createSearchRequest('b', {
- source: {
- getField: () => ({ type: 'b' }),
- getSearchStrategyForSearchRequest: () => {},
- getPreferredSearchStrategyId: () => {},
- },
- });
-
- searchRequestA2 = createSearchRequest('a2', {
- source: {
- getField: () => ({ type: 'a' }),
- getSearchStrategyForSearchRequest: () => {},
- getPreferredSearchStrategyId: () => {},
- },
- });
- });
-
- it('if the searchRequests are reordered by the searchStrategies', () => {
- // Add requests in an order which will be reordered by the strategies.
- searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ];
- const callingClient = callClient(searchRequests);
-
- return callingClient.then(results => {
- expect(results).to.eql(['a', 'b', 'a2']);
- });
- });
-
- it('if one is aborted after being provided', () => {
- // Add requests in an order which will be reordered by the strategies.
- searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ];
- const callingClient = callClient(searchRequests);
- searchRequestA2.abort();
-
- return callingClient.then(results => {
- expect(results).to.eql(['a', 'b', ABORTED]);
- });
- });
-
- it(`if one is already aborted when it's provided`, () => {
- searchRequests = [ searchRequestA, searchRequestB, ABORTED, searchRequestA2 ];
- const callingClient = callClient(searchRequests);
-
- return callingClient.then(results => {
- expect(results).to.eql(['a', 'b', ABORTED, 'a2']);
- });
- });
- });
-});
diff --git a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js b/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js
deleted file mode 100644
index 19032ce1f4ca3..0000000000000
--- a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-import ngMock from 'ng_mock';
-
-import { CallClientProvider } from '../call_client';
-import { CallResponseHandlersProvider } from '../call_response_handlers';
-import { ContinueIncompleteProvider } from '../continue_incomplete';
-import { FetchNowProvider } from '../fetch_now';
-
-function mockRequest() {
- return {
- strategy: 'mock',
- started: true,
- aborted: false,
- handleFailure: sinon.spy(),
- retry: sinon.spy(function () { return this; }),
- continue: sinon.spy(function () { return this; }),
- start: sinon.spy(function () { return this; })
- };
-}
-
-describe('FetchNowProvider', () => {
-
- let Promise;
- let $rootScope;
- let fetchNow;
- let request;
- let requests;
- let fakeResponses;
-
- beforeEach(ngMock.module('kibana', (PrivateProvider) => {
- function FakeResponsesProvider(Promise) {
- fakeResponses = sinon.spy(function () {
- return Promise.map(requests, mockRequest => {
- return { mockRequest };
- });
- });
- return fakeResponses;
- }
-
- PrivateProvider.swap(CallClientProvider, FakeResponsesProvider);
- PrivateProvider.swap(CallResponseHandlersProvider, FakeResponsesProvider);
- PrivateProvider.swap(ContinueIncompleteProvider, FakeResponsesProvider);
- }));
-
- beforeEach(ngMock.inject((Private, $injector) => {
- $rootScope = $injector.get('$rootScope');
- Promise = $injector.get('Promise');
- fetchNow = Private(FetchNowProvider);
- request = mockRequest();
- requests = [ request ];
- }));
-
- describe('when request has not started', () => {
- beforeEach(() => requests.forEach(req => req.started = false));
-
- it('starts request', () => {
- fetchNow(requests);
- expect(request.start.called).to.be(true);
- expect(request.continue.called).to.be(false);
- });
-
- it('waits for returned promise from start() to be fulfilled', () => {
- request.start = sinon.stub().returns(Promise.resolve(request));
- fetchNow(requests);
-
- expect(request.start.callCount).to.be(1);
- expect(fakeResponses.callCount).to.be(0);
- $rootScope.$apply();
- expect(fakeResponses.callCount).to.be(3);
- });
-
- it('invokes request failure handler if starting fails', () => {
- request.start = sinon.stub().returns(Promise.reject('some error'));
- fetchNow(requests);
- $rootScope.$apply();
- sinon.assert.calledWith(request.handleFailure, 'some error');
- });
- });
-
- describe('when request has already started', () => {
- it('continues request', () => {
- fetchNow(requests);
- expect(request.start.called).to.be(false);
- expect(request.continue.called).to.be(true);
- });
- it('waits for returned promise to be fulfilled', () => {
- request.continue = sinon.stub().returns(Promise.resolve(request));
- fetchNow(requests);
-
- expect(request.continue.callCount).to.be(1);
- expect(fakeResponses.callCount).to.be(0);
- $rootScope.$apply();
- expect(fakeResponses.callCount).to.be(3);
- });
- it('invokes request failure handler if continuing fails', () => {
- request.continue = sinon.stub().returns(Promise.reject('some error'));
- fetchNow(requests);
- $rootScope.$apply();
- sinon.assert.calledWith(request.handleFailure, 'some error');
- });
- });
-});
diff --git a/src/legacy/ui/public/courier/fetch/call_client.js b/src/legacy/ui/public/courier/fetch/call_client.js
index 7ba73e741c074..971ae4c49a604 100644
--- a/src/legacy/ui/public/courier/fetch/call_client.js
+++ b/src/legacy/ui/public/courier/fetch/call_client.js
@@ -17,187 +17,37 @@
* under the License.
*/
-import { ErrorAllowExplicitIndexProvider } from '../../error_allow_explicit_index';
-import { assignSearchRequestsToSearchStrategies } from '../search_strategy';
-import { IsRequestProvider } from './is_request';
-import { RequestStatus } from './req_status';
-import { SerializeFetchParamsProvider } from './request/serialize_fetch_params';
-import { i18n } from '@kbn/i18n';
-import { createDefer } from 'ui/promises';
-
-export function CallClientProvider(Private, Promise, es, config, sessionId, esShardTimeout) {
- const errorAllowExplicitIndex = Private(ErrorAllowExplicitIndexProvider);
- const isRequest = Private(IsRequestProvider);
- const serializeFetchParams = Private(SerializeFetchParamsProvider);
-
- const ABORTED = RequestStatus.ABORTED;
-
- function callClient(searchRequests) {
- // get the actual list of requests that we will be fetching
- const requestsToFetch = searchRequests.filter(isRequest);
- let requestsToFetchCount = requestsToFetch.length;
-
- if (requestsToFetchCount === 0) {
- return Promise.resolve([]);
- }
-
- // This is how we'll provide the consumer with search responses. Resolved by
- // respondToSearchRequests.
- const defer = createDefer(Promise);
-
- const abortableSearches = [];
- let areAllSearchRequestsAborted = false;
-
- // When we traverse our search requests and send out searches, some of them may fail. We'll
- // store those that don't fail here.
- const activeSearchRequests = [];
-
- // Respond to each searchRequest with the response or ABORTED.
- const respondToSearchRequests = (responsesInOriginalRequestOrder = []) => {
- // We map over searchRequests because if we were originally provided an ABORTED
- // request then we'll return that value.
- return Promise.map(searchRequests, function (searchRequest, searchRequestIndex) {
- if (searchRequest.aborted) {
- return ABORTED;
- }
-
- const status = searchRequests[searchRequestIndex];
-
- if (status === ABORTED) {
- return ABORTED;
- }
-
- const activeSearchRequestIndex = activeSearchRequests.indexOf(searchRequest);
- const isFailedSearchRequest = activeSearchRequestIndex === -1;
-
- if (isFailedSearchRequest) {
- return ABORTED;
- }
-
- return responsesInOriginalRequestOrder[searchRequestIndex];
- })
- .then(
- (res) => defer.resolve(res),
- (err) => defer.reject(err)
- );
- };
-
- // handle a request being aborted while being fetched
- const requestWasAborted = Promise.method(function (searchRequest, index) {
- if (searchRequests[index] === ABORTED) {
- defer.reject(new Error(
- i18n.translate('common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage', {
- defaultMessage: 'Request was aborted twice?',
- })
- ));
- }
-
- requestsToFetchCount--;
-
- if (requestsToFetchCount !== 0) {
- // We can't resolve early unless all searchRequests have been aborted.
- return;
- }
-
- abortableSearches.forEach(({ abort }) => {
- abort();
- });
-
- areAllSearchRequestsAborted = true;
-
- return respondToSearchRequests();
- });
-
- // attach abort handlers, close over request index
- searchRequests.forEach(function (searchRequest, index) {
- if (!isRequest(searchRequest)) {
- return;
- }
-
- searchRequest.whenAborted(function () {
- requestWasAborted(searchRequest, index).catch(defer.reject);
- });
- });
-
- const searchStrategiesWithRequests = assignSearchRequestsToSearchStrategies(requestsToFetch);
-
- // We're going to create a new async context here, so that the logic within it can execute
- // asynchronously after we've returned a reference to defer.promise.
- Promise.resolve().then(async () => {
- // Execute each request using its search strategy.
- for (let i = 0; i < searchStrategiesWithRequests.length; i++) {
- const searchStrategyWithSearchRequests = searchStrategiesWithRequests[i];
- const { searchStrategy, searchRequests } = searchStrategyWithSearchRequests;
- const {
- searching,
- abort,
- failedSearchRequests,
- } = await searchStrategy.search({ searchRequests, es, Promise, serializeFetchParams, config, sessionId, esShardTimeout });
-
- // Collect searchRequests which have successfully been sent.
- searchRequests.forEach(searchRequest => {
- if (failedSearchRequests.includes(searchRequest)) {
- return;
- }
-
- activeSearchRequests.push(searchRequest);
- });
-
- abortableSearches.push({
- searching,
- abort,
- requestsCount: searchRequests.length,
- });
- }
-
- try {
- // The request was aborted while we were doing the above logic.
- if (areAllSearchRequestsAborted) {
- return;
- }
-
- const segregatedResponses = await Promise.all(abortableSearches.map(async ({ searching, requestsCount }) => {
- return searching.catch((e) => {
- // Duplicate errors so that they correspond to the original requests.
- return new Array(requestsCount).fill({ error: e });
- });
- }));
-
- // Assigning searchRequests to strategies means that the responses come back in a different
- // order than the original searchRequests. So we'll put them back in order so that we can
- // use the order to associate each response with the original request.
- const responsesInOriginalRequestOrder = new Array(searchRequests.length);
- segregatedResponses.forEach((responses, strategyIndex) => {
- responses.forEach((response, responseIndex) => {
- const searchRequest = searchStrategiesWithRequests[strategyIndex].searchRequests[responseIndex];
- const requestIndex = searchRequests.indexOf(searchRequest);
- responsesInOriginalRequestOrder[requestIndex] = response;
- });
- });
-
- await respondToSearchRequests(responsesInOriginalRequestOrder);
- } catch(error) {
- if (errorAllowExplicitIndex.test(error)) {
- return errorAllowExplicitIndex.takeover();
- }
-
- defer.reject(error);
- }
+import { groupBy } from 'lodash';
+import { getSearchStrategyForSearchRequest, getSearchStrategyById } from '../search_strategy';
+import { handleResponse } from './handle_response';
+
+export function callClient(searchRequests, requestsOptions = [], { es, config, esShardTimeout } = {}) {
+ // Correlate the options with the request that they're associated with
+ const requestOptionEntries = searchRequests.map((request, i) => [request, requestsOptions[i]]);
+ const requestOptionsMap = new Map(requestOptionEntries);
+
+ // Group the requests by the strategy used to search that specific request
+ const searchStrategyMap = groupBy(searchRequests, (request, i) => {
+ const searchStrategy = getSearchStrategyForSearchRequest(request, requestsOptions[i]);
+ return searchStrategy.id;
+ });
+
+ // Execute each search strategy with the group of requests, but return the responses in the same
+ // order in which they were received. We use a map to correlate the original request with its
+ // response.
+ const requestResponseMap = new Map();
+ Object.keys(searchStrategyMap).forEach(searchStrategyId => {
+ const searchStrategy = getSearchStrategyById(searchStrategyId);
+ const requests = searchStrategyMap[searchStrategyId];
+ const { searching, abort } = searchStrategy.search({ searchRequests: requests, es, config, esShardTimeout });
+ requests.forEach((request, i) => {
+ const response = searching.then(results => handleResponse(request, results[i]));
+ const { abortSignal } = requestOptionsMap.get(request) || {};
+ if (abortSignal) abortSignal.addEventListener('abort', abort);
+ requestResponseMap.set(request, response);
});
+ }, []);
+ return searchRequests.map(request => requestResponseMap.get(request));
+}
- // Return the promise which acts as our vehicle for providing search responses to the consumer.
- // However, if there are any errors, notify the searchRequests of them *instead* of bubbling
- // them up to the consumer.
- return defer.promise.catch((err) => {
- // By returning the return value of this catch() without rethrowing the error, we delegate
- // error-handling to the searchRequest instead of the consumer.
- searchRequests.forEach((searchRequest, index) => {
- if (searchRequests[index] !== ABORTED) {
- searchRequest.handleFailure(err);
- }
- });
- });
- }
- return callClient;
-}
diff --git a/src/legacy/ui/public/courier/fetch/call_client.test.js b/src/legacy/ui/public/courier/fetch/call_client.test.js
new file mode 100644
index 0000000000000..463d6c59e479e
--- /dev/null
+++ b/src/legacy/ui/public/courier/fetch/call_client.test.js
@@ -0,0 +1,128 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { callClient } from './call_client';
+import { handleResponse } from './handle_response';
+
+const mockResponses = [{}, {}];
+const mockAbortFns = [jest.fn(), jest.fn()];
+const mockSearchFns = [
+ jest.fn(({ searchRequests }) => ({
+ searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[0])),
+ abort: mockAbortFns[0]
+ })),
+ jest.fn(({ searchRequests }) => ({
+ searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[1])),
+ abort: mockAbortFns[1]
+ }))
+];
+const mockSearchStrategies = mockSearchFns.map((search, i) => ({ search, id: i }));
+
+jest.mock('./handle_response', () => ({
+ handleResponse: jest.fn((request, response) => response)
+}));
+
+jest.mock('../search_strategy', () => ({
+ getSearchStrategyForSearchRequest: request => mockSearchStrategies[request._searchStrategyId],
+ getSearchStrategyById: id => mockSearchStrategies[id]
+}));
+
+describe('callClient', () => {
+ beforeEach(() => {
+ handleResponse.mockClear();
+ mockAbortFns.forEach(fn => fn.mockClear());
+ mockSearchFns.forEach(fn => fn.mockClear());
+ });
+
+ test('Executes each search strategy with its group of matching requests', () => {
+ const searchRequests = [{
+ _searchStrategyId: 0
+ }, {
+ _searchStrategyId: 1
+ }, {
+ _searchStrategyId: 0
+ }, {
+ _searchStrategyId: 1
+ }];
+
+ callClient(searchRequests);
+
+ expect(mockSearchFns[0]).toBeCalled();
+ expect(mockSearchFns[0].mock.calls[0][0].searchRequests).toEqual([searchRequests[0], searchRequests[2]]);
+ expect(mockSearchFns[1]).toBeCalled();
+ expect(mockSearchFns[1].mock.calls[0][0].searchRequests).toEqual([searchRequests[1], searchRequests[3]]);
+ });
+
+ test('Passes the additional arguments it is given to the search strategy', () => {
+ const searchRequests = [{
+ _searchStrategyId: 0
+ }];
+ const args = { es: {}, config: {}, esShardTimeout: 0 };
+
+ callClient(searchRequests, [], args);
+
+ expect(mockSearchFns[0]).toBeCalled();
+ expect(mockSearchFns[0].mock.calls[0][0]).toEqual({ searchRequests, ...args });
+ });
+
+ test('Returns the responses in the original order', async () => {
+ const searchRequests = [{
+ _searchStrategyId: 1
+ }, {
+ _searchStrategyId: 0
+ }];
+
+ const responses = await Promise.all(callClient(searchRequests));
+
+ expect(responses).toEqual([mockResponses[1], mockResponses[0]]);
+ });
+
+ test('Calls handleResponse with each request and response', async () => {
+ const searchRequests = [{
+ _searchStrategyId: 0
+ }, {
+ _searchStrategyId: 1
+ }];
+
+ const responses = callClient(searchRequests);
+ await Promise.all(responses);
+
+ expect(handleResponse).toBeCalledTimes(2);
+ expect(handleResponse).toBeCalledWith(searchRequests[0], mockResponses[0]);
+ expect(handleResponse).toBeCalledWith(searchRequests[1], mockResponses[1]);
+ });
+
+ test('If passed an abortSignal, calls abort on the strategy if the signal is aborted', () => {
+ const searchRequests = [{
+ _searchStrategyId: 0
+ }, {
+ _searchStrategyId: 1
+ }];
+ const abortController = new AbortController();
+ const requestOptions = [{
+ abortSignal: abortController.signal
+ }];
+
+ callClient(searchRequests, requestOptions);
+ abortController.abort();
+
+ expect(mockAbortFns[0]).toBeCalled();
+ expect(mockAbortFns[1]).not.toBeCalled();
+ });
+});
diff --git a/src/legacy/ui/public/courier/fetch/call_response_handlers.js b/src/legacy/ui/public/courier/fetch/call_response_handlers.js
deleted file mode 100644
index aaf82168e385f..0000000000000
--- a/src/legacy/ui/public/courier/fetch/call_response_handlers.js
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-import React from 'react';
-import { i18n } from '@kbn/i18n';
-import { EuiSpacer } from '@elastic/eui';
-import { toastNotifications } from '../../notify';
-import { RequestFailure } from './errors';
-import { RequestStatus } from './req_status';
-import { SearchError } from '../search_strategy/search_error';
-import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button';
-
-export function CallResponseHandlersProvider(Promise) {
- const ABORTED = RequestStatus.ABORTED;
- const INCOMPLETE = RequestStatus.INCOMPLETE;
-
- function callResponseHandlers(searchRequests, responses) {
- return Promise.map(searchRequests, function (searchRequest, index) {
- if (searchRequest === ABORTED || searchRequest.aborted) {
- return ABORTED;
- }
-
- const response = responses[index];
-
- if (response.timed_out) {
- toastNotifications.addWarning({
- title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', {
- defaultMessage: 'Data might be incomplete because your request timed out',
- }),
- });
- }
-
- if (response._shards && response._shards.failed) {
- const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', {
- defaultMessage: '{shardsFailed} of {shardsTotal} shards failed',
- values: {
- shardsFailed: response._shards.failed,
- shardsTotal: response._shards.total,
- },
- });
- const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', {
- defaultMessage: 'The data you are seeing might be incomplete or wrong.',
- });
-
- const text = (
- <>
- {description}
-
-
- >
- );
-
- toastNotifications.addWarning({
- title,
- text,
- });
- }
-
- function progress() {
- if (searchRequest.isIncomplete()) {
- return INCOMPLETE;
- }
-
- searchRequest.complete();
- return response;
- }
-
- if (response.error) {
- if (searchRequest.filterError(response)) {
- return progress();
- } else {
- return searchRequest.handleFailure(
- response.error instanceof SearchError
- ? response.error
- : new RequestFailure(null, response)
- );
- }
- }
-
- return Promise.try(() => searchRequest.handleResponse(response)).then(progress);
- });
- }
-
- return callResponseHandlers;
-}
diff --git a/src/legacy/ui/public/courier/fetch/continue_incomplete.js b/src/legacy/ui/public/courier/fetch/continue_incomplete.js
deleted file mode 100644
index b40ebdb886748..0000000000000
--- a/src/legacy/ui/public/courier/fetch/continue_incomplete.js
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { RequestStatus } from './req_status';
-
-export function ContinueIncompleteProvider() {
- const INCOMPLETE = RequestStatus.INCOMPLETE;
-
- function continueIncompleteRequests(searchRequests, responses, fetchSearchResults) {
- const incompleteSearchRequests = [];
-
- responses.forEach(function (response, index) {
- if (response === INCOMPLETE) {
- incompleteSearchRequests.push(searchRequests[index]);
- }
- });
-
- if (!incompleteSearchRequests.length) {
- return responses;
- }
-
- return fetchSearchResults(incompleteSearchRequests)
- .then(function (completedResponses) {
- return responses.map(function (prevResponse) {
- if (prevResponse !== INCOMPLETE) {
- return prevResponse;
- }
-
- return completedResponses.shift();
- });
- });
- }
-
- return continueIncompleteRequests;
-}
diff --git a/src/legacy/ui/public/courier/fetch/fetch_now.js b/src/legacy/ui/public/courier/fetch/fetch_now.js
deleted file mode 100644
index de5704d4380f4..0000000000000
--- a/src/legacy/ui/public/courier/fetch/fetch_now.js
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { fatalError } from '../../notify';
-import { CallClientProvider } from './call_client';
-import { CallResponseHandlersProvider } from './call_response_handlers';
-import { ContinueIncompleteProvider } from './continue_incomplete';
-import { RequestStatus } from './req_status';
-import { i18n } from '@kbn/i18n';
-
-/**
- * Fetch now provider should be used if you want the results searched and returned immediately.
- * This can be slightly inefficient if a large number of requests are queued up, we can batch these
- * by using fetchSoon. This introduces a slight delay which allows other requests to queue up before
- * sending out requests in a batch.
- *
- * @param Private
- * @param Promise
- * @return {fetchNow}
- * @constructor
- */
-export function FetchNowProvider(Private, Promise) {
- // core tasks
- const callClient = Private(CallClientProvider);
- const callResponseHandlers = Private(CallResponseHandlersProvider);
- const continueIncomplete = Private(ContinueIncompleteProvider);
-
- const ABORTED = RequestStatus.ABORTED;
- const INCOMPLETE = RequestStatus.INCOMPLETE;
-
- function fetchNow(searchRequests) {
- return fetchSearchResults(searchRequests.map(function (searchRequest) {
- if (!searchRequest.started) {
- return searchRequest;
- }
-
- return searchRequest.retry();
- }))
- .catch(error => {
- // If any errors occur after the search requests have resolved, then we kill Kibana.
- fatalError(error, 'Courier fetch');
- });
- }
-
- function fetchSearchResults(searchRequests) {
- function replaceAbortedRequests() {
- searchRequests = searchRequests.map(searchRequest => {
- if (searchRequest.aborted) {
- return ABORTED;
- }
-
- return searchRequest;
- });
- }
-
- replaceAbortedRequests();
- return startRequests(searchRequests)
- .then(function () {
- replaceAbortedRequests();
- return callClient(searchRequests)
- .catch(() => {
- // Silently swallow errors that result from search requests so the consumer can surface
- // them as notifications instead of courier forcing fatal errors.
- });
- })
- .then(function (responses) {
- replaceAbortedRequests();
- return callResponseHandlers(searchRequests, responses);
- })
- .then(function (responses) {
- replaceAbortedRequests();
- return continueIncomplete(searchRequests, responses, fetchSearchResults);
- })
- .then(function (responses) {
- replaceAbortedRequests();
- return responses.map(function (resp) {
- switch (resp) {
- case ABORTED:
- return null;
- case INCOMPLETE:
- throw new Error(
- i18n.translate('common.ui.courier.fetch.failedToClearRequestErrorMessage', {
- defaultMessage: 'Failed to clear incomplete or duplicate request from responses.',
- })
- );
- default:
- return resp;
- }
- });
- });
- }
-
- function startRequests(searchRequests) {
- return Promise.map(searchRequests, function (searchRequest) {
- if (searchRequest === ABORTED) {
- return searchRequest;
- }
-
- return new Promise(function (resolve) {
- const action = searchRequest.started ? searchRequest.continue : searchRequest.start;
- resolve(action.call(searchRequest));
- })
- .catch(err => searchRequest.handleFailure(err));
- });
- }
-
- return fetchNow;
-}
diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.js b/src/legacy/ui/public/courier/fetch/fetch_soon.js
index 266d4a6d3c9e6..ef02beddcb59a 100644
--- a/src/legacy/ui/public/courier/fetch/fetch_soon.js
+++ b/src/legacy/ui/public/courier/fetch/fetch_soon.js
@@ -17,41 +17,54 @@
* under the License.
*/
-import _ from 'lodash';
-import { searchRequestQueue } from '../search_request_queue';
-import { FetchNowProvider } from './fetch_now';
+import { callClient } from './call_client';
/**
- * This is usually the right fetch provider to use, rather than FetchNowProvider, as this class introduces
- * a slight delay in the request process to allow multiple requests to queue up (e.g. when a dashboard
- * is loading).
+ * This function introduces a slight delay in the request process to allow multiple requests to queue
+ * up (e.g. when a dashboard is loading).
*/
-export function FetchSoonProvider(Private, Promise, config) {
-
- const fetchNow = Private(FetchNowProvider);
+export async function fetchSoon(request, options, { es, config, esShardTimeout }) {
+ const delay = config.get('courier:batchSearches') ? 50 : 0;
+ return delayedFetch(request, options, { es, config, esShardTimeout }, delay);
+}
- const fetch = () => fetchNow(searchRequestQueue.getPending());
- const debouncedFetch = _.debounce(fetch, {
- wait: 10,
- maxWait: 50
+/**
+ * Delays executing a function for a given amount of time, and returns a promise that resolves
+ * with the result.
+ * @param fn The function to invoke
+ * @param ms The number of milliseconds to wait
+ * @return Promise A promise that resolves with the result of executing the function
+ */
+function delay(fn, ms) {
+ return new Promise(resolve => {
+ setTimeout(() => resolve(fn()), ms);
});
+}
- /**
- * Fetch a list of requests
- * @param {array} requests - the requests to fetch
- * @async
- */
- this.fetchSearchRequests = (requests) => {
- requests.forEach(req => req._setFetchRequested());
- config.get('courier:batchSearches') ? debouncedFetch() : fetch();
- return Promise.all(requests.map(req => req.getCompletePromise()));
- };
+// The current batch/queue of requests to fetch
+let requestsToFetch = [];
+let requestOptions = [];
- /**
- * Return a promise that resembles the success of the fetch completing so we can execute
- * logic based on this state change. Individual errors are routed to their respective requests.
+// The in-progress fetch (if there is one)
+let fetchInProgress = null;
+
+/**
+ * Delay fetching for a given amount of time, while batching up the requests to be fetched.
+ * Returns a promise that resolves with the response for the given request.
+ * @param request The request to fetch
+ * @param ms The number of milliseconds to wait (and batch requests)
+ * @return Promise The response for the given request
*/
- this.fetchQueued = () => {
- return this.fetchSearchRequests(searchRequestQueue.getStartable());
- };
+async function delayedFetch(request, options, { es, config, esShardTimeout }, ms) {
+ const i = requestsToFetch.length;
+ requestsToFetch = [...requestsToFetch, request];
+ requestOptions = [...requestOptions, options];
+ const responses = await (fetchInProgress = fetchInProgress || delay(() => {
+ const response = callClient(requestsToFetch, requestOptions, { es, config, esShardTimeout });
+ requestsToFetch = [];
+ requestOptions = [];
+ fetchInProgress = null;
+ return response;
+ }, ms));
+ return responses[i];
}
diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.test.js b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js
new file mode 100644
index 0000000000000..824a4ab7e12e3
--- /dev/null
+++ b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js
@@ -0,0 +1,140 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { fetchSoon } from './fetch_soon';
+import { callClient } from './call_client';
+
+function getMockConfig(config) {
+ const entries = Object.entries(config);
+ return new Map(entries);
+}
+
+const mockResponses = {
+ 'foo': {},
+ 'bar': {},
+ 'baz': {},
+};
+
+jest.useFakeTimers();
+
+jest.mock('./call_client', () => ({
+ callClient: jest.fn(requests => {
+ // Allow a request object to specify which mockResponse it wants to receive (_mockResponseId)
+ // in addition to how long to simulate waiting before returning a response (_waitMs)
+ const responses = requests.map(request => {
+ const waitMs = requests.reduce((total, request) => request._waitMs || 0, 0);
+ return new Promise(resolve => {
+ resolve(mockResponses[request._mockResponseId]);
+ }, waitMs);
+ });
+ return Promise.resolve(responses);
+ })
+}));
+
+describe('fetchSoon', () => {
+ beforeEach(() => {
+ callClient.mockClear();
+ });
+
+ test('should delay by 0ms if config is set to not batch searches', () => {
+ const config = getMockConfig({
+ 'courier:batchSearches': false
+ });
+ const request = {};
+ const options = {};
+
+ fetchSoon(request, options, { config });
+
+ expect(callClient).not.toBeCalled();
+ jest.advanceTimersByTime(0);
+ expect(callClient).toBeCalled();
+ });
+
+ test('should delay by 50ms if config is set to batch searches', () => {
+ const config = getMockConfig({
+ 'courier:batchSearches': true
+ });
+ const request = {};
+ const options = {};
+
+ fetchSoon(request, options, { config });
+
+ expect(callClient).not.toBeCalled();
+ jest.advanceTimersByTime(0);
+ expect(callClient).not.toBeCalled();
+ jest.advanceTimersByTime(50);
+ expect(callClient).toBeCalled();
+ });
+
+ test('should send a batch of requests to callClient', () => {
+ const config = getMockConfig({
+ 'courier:batchSearches': true
+ });
+ const requests = [{ foo: 1 }, { foo: 2 }];
+ const options = [{ bar: 1 }, { bar: 2 }];
+
+ requests.forEach((request, i) => {
+ fetchSoon(request, options[i], { config });
+ });
+
+ jest.advanceTimersByTime(50);
+ expect(callClient).toBeCalledTimes(1);
+ expect(callClient.mock.calls[0][0]).toEqual(requests);
+ expect(callClient.mock.calls[0][1]).toEqual(options);
+ });
+
+ test('should return the response to the corresponding call for multiple batched requests', async () => {
+ const config = getMockConfig({
+ 'courier:batchSearches': true
+ });
+ const requests = [{ _mockResponseId: 'foo' }, { _mockResponseId: 'bar' }];
+
+ const promises = requests.map(request => {
+ return fetchSoon(request, {}, { config });
+ });
+ jest.advanceTimersByTime(50);
+ const results = await Promise.all(promises);
+
+ expect(results).toEqual([mockResponses.foo, mockResponses.bar]);
+ });
+
+ test('should wait for the previous batch to start before starting a new batch', () => {
+ const config = getMockConfig({
+ 'courier:batchSearches': true
+ });
+ const firstBatch = [{ foo: 1 }, { foo: 2 }];
+ const secondBatch = [{ bar: 1 }, { bar: 2 }];
+
+ firstBatch.forEach(request => {
+ fetchSoon(request, {}, { config });
+ });
+ jest.advanceTimersByTime(50);
+ secondBatch.forEach(request => {
+ fetchSoon(request, {}, { config });
+ });
+
+ expect(callClient).toBeCalledTimes(1);
+ expect(callClient.mock.calls[0][0]).toEqual(firstBatch);
+
+ jest.advanceTimersByTime(50);
+
+ expect(callClient).toBeCalledTimes(2);
+ expect(callClient.mock.calls[1][0]).toEqual(secondBatch);
+ });
+});
diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.js b/src/legacy/ui/public/courier/fetch/get_search_params.js
index 7561661d321fa..dd55201ba5540 100644
--- a/src/legacy/ui/public/courier/fetch/get_search_params.js
+++ b/src/legacy/ui/public/courier/fetch/get_search_params.js
@@ -17,6 +17,8 @@
* under the License.
*/
+const sessionId = Date.now();
+
export function getMSearchParams(config) {
return {
rest_total_hits_as_int: true,
@@ -25,13 +27,13 @@ export function getMSearchParams(config) {
};
}
-export function getSearchParams(config, sessionId, esShardTimeout) {
+export function getSearchParams(config, esShardTimeout) {
return {
rest_total_hits_as_int: true,
ignore_unavailable: true,
ignore_throttled: getIgnoreThrottled(config),
max_concurrent_shard_requests: getMaxConcurrentShardRequests(config),
- preference: getPreference(config, sessionId),
+ preference: getPreference(config),
timeout: getTimeout(esShardTimeout),
};
}
@@ -45,7 +47,7 @@ export function getMaxConcurrentShardRequests(config) {
return maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined;
}
-export function getPreference(config, sessionId) {
+export function getPreference(config) {
const setRequestPreference = config.get('courier:setRequestPreference');
if (setRequestPreference === 'sessionId') return sessionId;
return setRequestPreference === 'custom' ? config.get('courier:customRequestPreference') : undefined;
diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.test.js b/src/legacy/ui/public/courier/fetch/get_search_params.test.js
index 9129aea05f428..380d1da963ddf 100644
--- a/src/legacy/ui/public/courier/fetch/get_search_params.test.js
+++ b/src/legacy/ui/public/courier/fetch/get_search_params.test.js
@@ -99,10 +99,10 @@ describe('getSearchParams', () => {
test('includes timeout according to esShardTimeout if greater than 0', () => {
const config = getConfigStub();
- let searchParams = getSearchParams(config, null, 0);
+ let searchParams = getSearchParams(config, 0);
expect(searchParams.timeout).toBe(undefined);
- searchParams = getSearchParams(config, null, 100);
+ searchParams = getSearchParams(config, 100);
expect(searchParams.timeout).toBe('100ms');
});
});
diff --git a/src/legacy/ui/public/courier/fetch/handle_response.js b/src/legacy/ui/public/courier/fetch/handle_response.js
new file mode 100644
index 0000000000000..fb2797369d78f
--- /dev/null
+++ b/src/legacy/ui/public/courier/fetch/handle_response.js
@@ -0,0 +1,67 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import React from 'react';
+import { toastNotifications } from '../../notify/toasts';
+import { i18n } from '@kbn/i18n';
+import { EuiSpacer } from '@elastic/eui';
+import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button';
+
+export function handleResponse(request, response) {
+ if (response.timed_out) {
+ toastNotifications.addWarning({
+ title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', {
+ defaultMessage: 'Data might be incomplete because your request timed out',
+ }),
+ });
+ }
+
+ if (response._shards && response._shards.failed) {
+ const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', {
+ defaultMessage: '{shardsFailed} of {shardsTotal} shards failed',
+ values: {
+ shardsFailed: response._shards.failed,
+ shardsTotal: response._shards.total,
+ },
+ });
+ const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', {
+ defaultMessage: 'The data you are seeing might be incomplete or wrong.',
+ });
+
+ const text = (
+ <>
+ {description}
+
+
+ >
+ );
+
+ toastNotifications.addWarning({
+ title,
+ text,
+ });
+ }
+
+ return response;
+}
diff --git a/src/legacy/ui/public/courier/fetch/handle_response.test.js b/src/legacy/ui/public/courier/fetch/handle_response.test.js
new file mode 100644
index 0000000000000..0836832e6c05a
--- /dev/null
+++ b/src/legacy/ui/public/courier/fetch/handle_response.test.js
@@ -0,0 +1,74 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { handleResponse } from './handle_response';
+import { toastNotifications } from '../../notify/toasts';
+
+jest.mock('../../notify/toasts', () => {
+ return {
+ toastNotifications: {
+ addWarning: jest.fn()
+ }
+ };
+});
+
+jest.mock('@kbn/i18n', () => {
+ return {
+ i18n: {
+ translate: (id, { defaultMessage }) => defaultMessage
+ }
+ };
+});
+
+describe('handleResponse', () => {
+ beforeEach(() => {
+ toastNotifications.addWarning.mockReset();
+ });
+
+ test('should notify if timed out', () => {
+ const request = { body: {} };
+ const response = {
+ timed_out: true
+ };
+ const result = handleResponse(request, response);
+ expect(result).toBe(response);
+ expect(toastNotifications.addWarning).toBeCalled();
+ expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('request timed out');
+ });
+
+ test('should notify if shards failed', () => {
+ const request = { body: {} };
+ const response = {
+ _shards: {
+ failed: true
+ }
+ };
+ const result = handleResponse(request, response);
+ expect(result).toBe(response);
+ expect(toastNotifications.addWarning).toBeCalled();
+ expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('shards failed');
+ });
+
+ test('returns the response', () => {
+ const request = {};
+ const response = {};
+ const result = handleResponse(request, response);
+ expect(result).toBe(response);
+ });
+});
diff --git a/src/legacy/ui/public/courier/fetch/index.js b/src/legacy/ui/public/courier/fetch/index.js
index a5daaca5cb2c3..7b89dea1a110c 100644
--- a/src/legacy/ui/public/courier/fetch/index.js
+++ b/src/legacy/ui/public/courier/fetch/index.js
@@ -17,5 +17,5 @@
* under the License.
*/
-export { FetchSoonProvider } from './fetch_soon';
+export * from './fetch_soon';
export * from './get_search_params';
diff --git a/src/legacy/ui/public/courier/fetch/is_request.js b/src/legacy/ui/public/courier/fetch/is_request.js
deleted file mode 100644
index 73c54d6f4bca1..0000000000000
--- a/src/legacy/ui/public/courier/fetch/is_request.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { SearchRequestProvider } from './request';
-
-export function IsRequestProvider(Private) {
- const SearchRequest = Private(SearchRequestProvider);
-
- return function isRequest(obj) {
- return obj instanceof SearchRequest;
- };
-}
diff --git a/src/legacy/ui/public/courier/fetch/req_status.js b/src/legacy/ui/public/courier/fetch/req_status.js
deleted file mode 100644
index d56bc6d3ad360..0000000000000
--- a/src/legacy/ui/public/courier/fetch/req_status.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export const RequestStatus = {
- ABORTED: 'aborted',
- INCOMPLETE: 'incomplete',
-};
diff --git a/src/legacy/ui/public/courier/fetch/request/index.js b/src/legacy/ui/public/courier/fetch/request/index.js
deleted file mode 100644
index 6647d0e5b2e10..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { SearchRequestProvider } from './search_request';
diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js
deleted file mode 100644
index ecac8cd474098..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import ngMock from 'ng_mock';
-import sinon from 'sinon';
-import expect from '@kbn/expect';
-
-import { SearchRequestProvider } from '../search_request';
-import { searchRequestQueue } from '../../../../search_request_queue';
-
-describe('ui/courier/fetch search request', () => {
- beforeEach(ngMock.module('kibana'));
-
- afterEach(() => {
- searchRequestQueue.removeAll();
- });
-
- it('throws exception when created without errorHandler', ngMock.inject((Private) => {
- const SearchReq = Private(SearchRequestProvider);
-
- let caughtError = false;
- try {
- new SearchReq({ source: {} });
- } catch(error) {
- caughtError = true;
- }
- expect(caughtError).to.be(true);
- }));
-
- describe('start', () => {
- it('calls this.source.requestIsStarting(request)', ngMock.inject((Private) => {
- const SearchReq = Private(SearchRequestProvider);
-
- const spy = sinon.spy(() => Promise.resolve());
- const source = { requestIsStarting: spy };
-
- const req = new SearchReq({ source, errorHandler: () => {} });
- expect(req.start()).to.have.property('then').a('function');
- sinon.assert.calledOnce(spy);
- sinon.assert.calledWithExactly(spy, req);
- }));
- });
-
- describe('clone', () => {
- it('returns a search request with identical constructor arguments', ngMock.inject((Private) => {
- const SearchRequest = Private(SearchRequestProvider);
-
- const source = {};
- const errorHandler = () => {};
- const defer = {};
-
- const originalRequest = new SearchRequest({ source, errorHandler, defer });
- const clonedRequest = originalRequest.clone();
-
- expect(clonedRequest).not.to.be(originalRequest);
- expect(clonedRequest.source).to.be(source);
- expect(clonedRequest.errorHandler).to.be(errorHandler);
- expect(clonedRequest.defer).to.be(defer);
- }));
-
- });
-});
diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/index.js b/src/legacy/ui/public/courier/fetch/request/search_request/index.js
deleted file mode 100644
index 6647d0e5b2e10..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/search_request/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { SearchRequestProvider } from './search_request';
diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js
deleted file mode 100644
index a6ce562e462d8..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import moment from 'moment';
-
-import { searchRequestQueue } from '../../../search_request_queue';
-
-import { createDefer } from 'ui/promises';
-import { i18n } from '@kbn/i18n';
-
-export function SearchRequestProvider(Promise) {
- class SearchRequest {
- constructor({ source, defer, errorHandler }) {
- if (!errorHandler) {
- throw new Error(
- i18n.translate('common.ui.courier.fetch.requireErrorHandlerErrorMessage', {
- defaultMessage: '{errorHandler} is required',
- values: { errorHandler: 'errorHandler' }
- })
- );
- }
-
- this.errorHandler = errorHandler;
- this.source = source;
- this.defer = defer || createDefer(Promise);
- this.abortedDefer = createDefer(Promise);
- this.type = 'search';
-
- // Track execution time.
- this.moment = undefined;
- this.ms = undefined;
-
- // Lifecycle state.
- this.started = false;
- this.stopped = false;
- this._isFetchRequested = false;
-
- searchRequestQueue.add(this);
- }
-
- /**
- * Called by the searchPoll to find requests that should be sent to the
- * fetchSoon module. When a module is sent to fetchSoon its _isFetchRequested flag
- * is set, and this consults that flag so requests are not send to fetchSoon
- * multiple times.
- *
- * @return {Boolean}
- */
- canStart() {
- if (this.source._fetchDisabled) {
- return false;
- }
-
- if (this.stopped) {
- return false;
- }
-
- if (this._isFetchRequested) {
- return false;
- }
-
- return true;
- }
-
- /**
- * Used to find requests that were previously sent to the fetchSoon module but
- * have not been started yet, so they can be started.
- *
- * @return {Boolean}
- */
- isFetchRequestedAndPending() {
- if (this.started) {
- return false;
- }
-
- return this._isFetchRequested;
- }
-
- /**
- * Called by the fetchSoon module when this request has been sent to
- * be fetched. At that point the request is somewhere between `ready-to-start`
- * and `started`. The fetch module then waits a short period of time to
- * allow requests to build up in the request queue, and then immediately
- * fetches all requests that return true from `isFetchRequestedAndPending()`
- *
- * @return {undefined}
- */
- _setFetchRequested() {
- this._isFetchRequested = true;
- }
-
- start() {
- if (this.started) {
- throw new TypeError(
- i18n.translate('common.ui.courier.fetch.unableStartRequestErrorMessage', {
- defaultMessage: 'Unable to start request because it has already started',
- })
- );
- }
-
- this.started = true;
- this.moment = moment();
-
- return this.source.requestIsStarting(this);
- }
-
- getFetchParams() {
- return this.source._flatten();
- }
-
- filterError() {
- return false;
- }
-
- handleResponse(resp) {
- this.success = true;
- this.resp = resp;
- }
-
- handleFailure(error) {
- this.success = false;
- this.resp = error;
- this.resp = (error && error.resp) || error;
- return this.errorHandler(this, error);
- }
-
- isIncomplete() {
- return false;
- }
-
- continue() {
- throw new Error(
- i18n.translate('common.ui.courier.fetch.unableContinueRequestErrorMessage', {
- defaultMessage: 'Unable to continue {type} request',
- values: { type: this.type }
- })
- );
- }
-
- retry() {
- const clone = this.clone();
- this.abort();
- return clone;
- }
-
- _markStopped() {
- if (this.stopped) return;
- this.stopped = true;
- this.source.requestIsStopped(this);
- searchRequestQueue.remove(this);
- }
-
- abort() {
- this._markStopped();
- this.aborted = true;
- const error = new Error('The request was aborted.');
- error.name = 'AbortError';
- this.abortedDefer.resolve(error);
- this.abortedDefer = null;
- this.defer.reject(error);
- this.defer = null;
- }
-
- whenAborted(cb) {
- this.abortedDefer.promise.then(cb);
- }
-
- complete() {
- this._markStopped();
- this.ms = this.moment.diff() * -1;
- this.defer.resolve(this.resp);
- }
-
- getCompletePromise() {
- return this.defer.promise;
- }
-
- getCompleteOrAbortedPromise() {
- return Promise.race([ this.defer.promise, this.abortedDefer.promise ]);
- }
-
- clone = () => {
- const { source, defer, errorHandler } = this;
- return new SearchRequest({ source, defer, errorHandler });
- };
- }
-
- return SearchRequest;
-}
diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js
deleted file mode 100644
index 807d53086e106..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { SerializeFetchParamsProvider } from './serialize_fetch_params_provider';
diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js
deleted file mode 100644
index ba8912c966e3e..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { getPreference, getTimeout } from '../../get_search_params';
-
-/**
- *
- * @param requestsFetchParams {Array.}
- * @param Promise
- * @param sessionId
- * @return {Promise.}
- */
-export function serializeFetchParams(
- requestsFetchParams,
- Promise,
- sessionId,
- config,
- esShardTimeout) {
- const promises = requestsFetchParams.map(function (fetchParams) {
- return Promise.resolve(fetchParams.index)
- .then(function (indexPattern) {
- const body = {
- timeout: getTimeout(esShardTimeout),
- ...fetchParams.body || {},
- };
-
- const index = (indexPattern && indexPattern.getIndex) ? indexPattern.getIndex() : indexPattern;
-
- const header = {
- index,
- search_type: fetchParams.search_type,
- ignore_unavailable: true,
- preference: getPreference(config, sessionId)
- };
-
- return `${JSON.stringify(header)}\n${JSON.stringify(body)}`;
- });
- });
-
- return Promise.all(promises).then(function (requests) {
- return requests.join('\n') + '\n';
- });
-}
-
diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js
deleted file mode 100644
index 5f4c5bf9ef45a..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { serializeFetchParams } from './serialize_fetch_params';
-import _ from 'lodash';
-
-const DEFAULT_SESSION_ID = '1';
-
-function serializeFetchParamsWithDefaults(paramOverrides) {
- const paramDefaults = {
- requestFetchParams: [],
- Promise,
- sessionId: DEFAULT_SESSION_ID,
- config: {
- get: () => {
- return 'sessionId';
- }
- },
- timeout: 100,
- };
- const params = { ...paramDefaults, ...paramOverrides };
-
- return serializeFetchParams(
- params.requestFetchParams,
- Promise,
- params.sessionId,
- params.config,
- params.timeout,
- );
-}
-
-describe('when indexList is not empty', () => {
- test('includes the index', () => {
- const requestFetchParams = [
- {
- index: ['logstash-123'],
- type: 'blah',
- search_type: 'blah2',
- body: { foo: 'bar', $foo: 'bar' }
- }
- ];
- return serializeFetchParamsWithDefaults({ requestFetchParams }).then(value => {
- expect(_.includes(value, '"index":["logstash-123"]')).toBe(true);
- });
- });
-});
-
-describe('headers', () => {
-
- const requestFetchParams = [
- {
- index: ['logstash-123'],
- type: 'blah',
- search_type: 'blah2',
- body: { foo: 'bar' }
- }
- ];
-
- const getHeader = async (paramOverrides) => {
- const request = await serializeFetchParamsWithDefaults(paramOverrides);
- const requestParts = request.split('\n');
- if (requestParts.length < 2) {
- throw new Error('fetch Body does not contain expected format header newline body.');
- }
- return JSON.parse(requestParts[0]);
- };
-
- describe('search request preference', () => {
- test('should be set to sessionId when courier:setRequestPreference is "sessionId"', async () => {
- const config = {
- get: () => {
- return 'sessionId';
- }
- };
- const header = await getHeader({ requestFetchParams, config });
- expect(header.preference).toBe(DEFAULT_SESSION_ID);
- });
-
- test('should be set to custom string when courier:setRequestPreference is "custom"', async () => {
- const CUSTOM_PREFERENCE = '_local';
- const config = {
- get: (key) => {
- if (key === 'courier:setRequestPreference') {
- return 'custom';
- } else if (key === 'courier:customRequestPreference') {
- return CUSTOM_PREFERENCE;
- }
- }
- };
- const header = await getHeader({ requestFetchParams, config });
- expect(header.preference).toBe(CUSTOM_PREFERENCE);
- });
-
- test('should not be set when courier:setRequestPreference is "none"', async () => {
- const config = {
- get: () => {
- return 'none';
- }
- };
- const header = await getHeader({ requestFetchParams, config });
- expect(header.preference).toBe(undefined);
- });
- });
-});
-
-describe('body', () => {
- const requestFetchParams = [
- {
- index: ['logstash-123'],
- type: 'blah',
- search_type: 'blah2',
- body: { foo: 'bar' }
- }
- ];
-
- const getBody = async (paramOverrides) => {
- const request = await serializeFetchParamsWithDefaults(paramOverrides);
- const requestParts = request.split('\n');
- if (requestParts.length < 2) {
- throw new Error('fetch Body does not contain expected format: header newline body.');
- }
- return JSON.parse(requestParts[1]);
- };
-
- describe('timeout', () => {
- test('should set a timeout as specified', async () => {
- const request = await getBody({ requestFetchParams, timeout: 200 });
- expect(request).toHaveProperty('timeout', '200ms');
- });
-
- test('should not set a timeout when timeout is 0', async () => {
- const request = await getBody({ requestFetchParams, timeout: 0 });
- expect(request.timeout).toBe(undefined);
- });
- });
-});
diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js
deleted file mode 100644
index 4ddcc05b927ff..0000000000000
--- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { serializeFetchParams } from './serialize_fetch_params';
-
-export function SerializeFetchParamsProvider(Promise, sessionId, config, esShardTimeout) {
- return (fetchParams) => (
- serializeFetchParams(
- fetchParams,
- Promise,
- sessionId,
- config,
- esShardTimeout)
- );
-}
diff --git a/src/legacy/ui/public/courier/index.js b/src/legacy/ui/public/courier/index.js
index cb14298a9a3b4..5647af3d0d645 100644
--- a/src/legacy/ui/public/courier/index.js
+++ b/src/legacy/ui/public/courier/index.js
@@ -17,7 +17,7 @@
* under the License.
*/
-export { SearchSourceProvider } from './search_source';
+export { SearchSource } from './search_source';
export {
addSearchStrategy,
diff --git a/src/legacy/ui/public/courier/search_poll/search_poll.js b/src/legacy/ui/public/courier/search_poll/search_poll.js
index 91c866c14aa49..f00c2a32e0ec6 100644
--- a/src/legacy/ui/public/courier/search_poll/search_poll.js
+++ b/src/legacy/ui/public/courier/search_poll/search_poll.js
@@ -19,98 +19,50 @@
import _ from 'lodash';
-import { fatalError } from '../../notify';
-import '../../promises';
-import { searchRequestQueue } from '../search_request_queue';
-import { FetchSoonProvider } from '../fetch';
import { timefilter } from 'ui/timefilter';
-export function SearchPollProvider(Private, Promise) {
- const fetchSoon = Private(FetchSoonProvider);
-
- class SearchPoll {
- constructor() {
- this._isPolling = false;
- this._intervalInMs = undefined;
- this._timerId = null;
- this._searchPromise = null;
- this._isIntervalFasterThanSearch = false;
- }
-
- setIntervalInMs = intervalInMs => {
- this._intervalInMs = _.parseInt(intervalInMs);
- };
-
- resume = () => {
- this._isPolling = true;
- this.resetTimer();
- };
-
- pause = () => {
- this._isPolling = false;
- this.clearTimer();
- };
-
- resetTimer = () => {
- // Cancel the pending search and schedule a new one.
- this.clearTimer();
-
- if (this._isPolling) {
- this._timerId = setTimeout(this._search, this._intervalInMs);
- }
- };
+export class SearchPoll {
+ constructor() {
+ this._isPolling = false;
+ this._intervalInMs = undefined;
+ this._timerId = null;
+ }
- clearTimer = () => {
- // Cancel the pending search, if there is one.
- if (this._timerId) {
- clearTimeout(this._timerId);
- this._timerId = null;
- }
- };
+ setIntervalInMs = intervalInMs => {
+ this._intervalInMs = _.parseInt(intervalInMs);
+ };
- _search = () => {
- // If our interval is faster than the rate at which searches return results, then trigger
- // a new search as soon as the results come back.
- if (this._searchPromise) {
- this._isIntervalFasterThanSearch = true;
- return;
- }
+ resume = () => {
+ this._isPolling = true;
+ this.resetTimer();
+ };
- // Schedule another search.
- this.resetTimer();
+ pause = () => {
+ this._isPolling = false;
+ this.clearTimer();
+ };
- // We use resolve() here instead of try() because the latter won't trigger a $digest
- // when the promise resolves.
- this._searchPromise = Promise.resolve().then(() => {
- timefilter.notifyShouldFetch();
- const requests = searchRequestQueue.getInactive();
+ resetTimer = () => {
+ // Cancel the pending search and schedule a new one.
+ this.clearTimer();
- // The promise returned from fetchSearchRequests() only resolves when the requests complete.
- // We want to continue even if the requests abort so we return a different promise.
- fetchSoon.fetchSearchRequests(requests);
+ if (this._isPolling) {
+ this._timerId = setTimeout(this._search, this._intervalInMs);
+ }
+ };
- return Promise.all(
- requests.map(request => request.getCompleteOrAbortedPromise())
- );
- })
- .then(() => {
- this._searchPromise = null;
+ clearTimer = () => {
+ // Cancel the pending search, if there is one.
+ if (this._timerId) {
+ clearTimeout(this._timerId);
+ this._timerId = null;
+ }
+ };
- // If the search response comes back before the interval fires, then we'll wait
- // for the interval and let it kick off the next search. But if the interval fires before
- // the search returns results, then we'll need to wait for the search to return results
- // and then kick off another search again. A new search will also reset the interval.
- if (this._isIntervalFasterThanSearch) {
- this._isIntervalFasterThanSearch = false;
- this._search();
- }
- })
- .catch(err => {
- // If there was a problem, then kill Kibana.
- fatalError(err);
- });
- };
- }
+ _search = () => {
+ // Schedule another search.
+ this.resetTimer();
- return new SearchPoll();
+ timefilter.notifyShouldFetch();
+ };
}
diff --git a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js
deleted file mode 100644
index f6b4e4bef20c2..0000000000000
--- a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import ngMock from 'ng_mock';
-import expect from '@kbn/expect';
-import sinon from 'sinon';
-
-import { searchRequestQueue } from '../search_request_queue';
-
-describe('Courier Request Queue', function () {
- beforeEach(ngMock.module('kibana'));
- beforeEach(() => searchRequestQueue.removeAll());
- after(() => searchRequestQueue.removeAll());
-
- class MockReq {
- constructor(startable = true) {
- this.source = {};
- this.canStart = sinon.stub().returns(startable);
- }
- }
-
- describe('#getStartable()', function () {
- it('returns only startable requests', function () {
- searchRequestQueue.add(new MockReq(false));
- searchRequestQueue.add(new MockReq(true));
- expect(searchRequestQueue.getStartable()).to.have.length(1);
- });
- });
-
- // Note: I'm not convinced this discrepancy between how we calculate startable vs inactive requests makes any sense.
- // I'm only testing here that the current, (very old) code continues to behave how it always did, but it may turn out
- // that we can clean this up, or remove this.
- describe('#getInactive()', function () {
- it('returns only requests with started = false', function () {
- searchRequestQueue.add({ started: true });
- searchRequestQueue.add({ started: false });
- searchRequestQueue.add({ started: true });
- expect(searchRequestQueue.getInactive()).to.have.length(1);
- });
- });
-});
diff --git a/src/legacy/ui/public/courier/search_request_queue/index.js b/src/legacy/ui/public/courier/search_request_queue/index.js
deleted file mode 100644
index 785a59fce73d5..0000000000000
--- a/src/legacy/ui/public/courier/search_request_queue/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { searchRequestQueue } from './search_request_queue';
diff --git a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js
deleted file mode 100644
index 80d74cdad94fe..0000000000000
--- a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-class SearchRequestQueue {
- constructor() {
- // Queue of pending requests, requests are removed as they are processed by fetch.[sourceType]().
- this._searchRequests = [];
- }
-
- getCount() {
- return this._searchRequests.length;
- }
-
- add(searchRequest) {
- this._searchRequests.push(searchRequest);
- }
-
- remove(searchRequest) {
- // Remove all matching search requests.
- this._searchRequests = this._searchRequests.filter(
- existingSearchRequest => existingSearchRequest !== searchRequest
- );
- }
-
- removeAll() {
- this._searchRequests.length = 0;
- }
-
- abortAll() {
- this._searchRequests.forEach(searchRequest => searchRequest.abort());
- }
-
- getAll() {
- return this._searchRequests;
- }
-
- getSearchRequestAt(index) {
- return this._searchRequests[index];
- }
-
- getInactive() {
- return this._searchRequests.filter(searchRequest => !searchRequest.started);
- }
-
- getStartable() {
- return this._searchRequests.filter(searchRequest => searchRequest.canStart());
- }
-
- getPending() {
- return this._searchRequests.filter(searchRequest => searchRequest.isFetchRequestedAndPending());
- }
-}
-
-export const searchRequestQueue = new SearchRequestQueue();
diff --git a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js
index ca3d21a330ce1..279e389dec114 100644
--- a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js
+++ b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js
@@ -20,18 +20,17 @@
import '../../../private';
import ngMock from 'ng_mock';
import expect from '@kbn/expect';
-import { NormalizeSortRequestProvider } from '../_normalize_sort_request';
+import { normalizeSortRequest } from '../_normalize_sort_request';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
import _ from 'lodash';
describe('SearchSource#normalizeSortRequest', function () {
- let normalizeSortRequest;
let indexPattern;
let normalizedSort;
+ const defaultSortOptions = { unmapped_type: 'boolean' };
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
- normalizeSortRequest = Private(NormalizeSortRequestProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
normalizedSort = [{
@@ -44,7 +43,7 @@ describe('SearchSource#normalizeSortRequest', function () {
it('should return an array', function () {
const sortable = { someField: 'desc' };
- const result = normalizeSortRequest(sortable, indexPattern);
+ const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions);
expect(result).to.be.an(Array);
expect(result).to.eql(normalizedSort);
// ensure object passed in is not mutated
@@ -53,7 +52,7 @@ describe('SearchSource#normalizeSortRequest', function () {
});
it('should make plain string sort into the more verbose format', function () {
- const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern);
+ const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern, defaultSortOptions);
expect(result).to.eql(normalizedSort);
});
@@ -64,7 +63,7 @@ describe('SearchSource#normalizeSortRequest', function () {
unmapped_type: 'boolean'
}
}];
- const result = normalizeSortRequest(sortState, indexPattern);
+ const result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions);
expect(result).to.eql(normalizedSort);
});
@@ -86,11 +85,11 @@ describe('SearchSource#normalizeSortRequest', function () {
}
};
- let result = normalizeSortRequest(sortState, indexPattern);
+ let result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
sortState[fieldName] = { order: direction };
- result = normalizeSortRequest([sortState], indexPattern);
+ result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
});
@@ -105,7 +104,7 @@ describe('SearchSource#normalizeSortRequest', function () {
order: direction,
unmapped_type: 'boolean'
};
- const result = normalizeSortRequest([sortState], indexPattern);
+ const result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
});
@@ -118,7 +117,7 @@ describe('SearchSource#normalizeSortRequest', function () {
}
}];
- const result = normalizeSortRequest(sortable, indexPattern);
+ const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions);
expect(_.isEqual(result, expected)).to.be.ok();
});
diff --git a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js b/src/legacy/ui/public/courier/search_source/__tests__/search_source.js
deleted file mode 100644
index ccb3c55b7a381..0000000000000
--- a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js
+++ /dev/null
@@ -1,351 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import ngMock from 'ng_mock';
-import expect from '@kbn/expect';
-import sinon from 'sinon';
-
-import { searchRequestQueue } from '../../search_request_queue';
-import { SearchSourceProvider } from '../search_source';
-import StubIndexPattern from 'test_utils/stub_index_pattern';
-
-function timeout() {
- return new Promise(resolve => {
- setTimeout(resolve);
- });
-}
-
-describe('SearchSource', function () {
- require('test_utils/no_digest_promises').activateForSuite();
-
- let config;
- let SearchSource;
- let indexPattern;
- let indexPattern2;
-
- beforeEach(ngMock.module('kibana'));
- beforeEach(ngMock.inject(function (Private, _config_) {
- config = _config_;
- SearchSource = Private(SearchSourceProvider);
-
- indexPattern = new StubIndexPattern('test-*', cfg => cfg, null, []);
- indexPattern2 = new StubIndexPattern('test2-*', cfg => cfg, null, []);
- expect(indexPattern).to.not.be(indexPattern2);
- }));
- beforeEach(() => searchRequestQueue.removeAll());
- after(() => searchRequestQueue.removeAll());
-
- describe('#onResults()', function () {
- it('adds a request to the searchRequestQueue', function () {
- const searchSource = new SearchSource();
-
- expect(searchRequestQueue.getCount()).to.be(0);
- searchSource.onResults();
- expect(searchRequestQueue.getCount()).to.be(1);
- });
-
- it('returns a promise that is resolved with the results', function () {
- const searchSource = new SearchSource();
- const fakeResults = {};
-
- const promise = searchSource.onResults().then((results) => {
- expect(results).to.be(fakeResults);
- });
-
- const searchRequest = searchRequestQueue.getSearchRequestAt(0);
- searchRequest.defer.resolve(fakeResults);
- return promise;
- });
- });
-
- describe('#destroy()', function () {
- it('aborts all startable requests', function () {
- const searchSource = new SearchSource();
- searchSource.onResults();
- const searchRequest = searchRequestQueue.getSearchRequestAt(0);
- sinon.stub(searchRequest, 'canStart').returns(true);
- searchSource.destroy();
- expect(searchRequestQueue.getCount()).to.be(0);
- });
-
- it('aborts all non-startable requests', function () {
- const searchSource = new SearchSource();
- searchSource.onResults();
- const searchRequest = searchRequestQueue.getSearchRequestAt(0);
- sinon.stub(searchRequest, 'canStart').returns(false);
- searchSource.destroy();
- expect(searchRequestQueue.getCount()).to.be(0);
- });
- });
-
- describe('#setField()', function () {
- it('sets the value for the property', function () {
- const searchSource = new SearchSource();
- searchSource.setField('aggs', 5);
- expect(searchSource.getField('aggs')).to.be(5);
- });
-
- it('throws an error if the property is not accepted', function () {
- const searchSource = new SearchSource();
- expect(() => searchSource.setField('index', 5)).to.throwError();
- });
- });
-
- describe('#getField()', function () {
- it('gets the value for the property', function () {
- const searchSource = new SearchSource();
- searchSource.setField('aggs', 5);
- expect(searchSource.getField('aggs')).to.be(5);
- });
-
- it('throws an error if the property is not accepted', function () {
- const searchSource = new SearchSource();
- expect(() => searchSource.getField('unacceptablePropName')).to.throwError();
- });
- });
-
- describe(`#setField('index')`, function () {
- describe('auto-sourceFiltering', function () {
- describe('new index pattern assigned', function () {
- it('generates a searchSource filter', function () {
- const searchSource = new SearchSource();
- expect(searchSource.getField('index')).to.be(undefined);
- expect(searchSource.getField('source')).to.be(undefined);
- searchSource.setField('index', indexPattern);
- expect(searchSource.getField('index')).to.be(indexPattern);
- expect(searchSource.getField('source')).to.be.a('function');
- });
-
- it('removes created searchSource filter on removal', function () {
- const searchSource = new SearchSource();
- searchSource.setField('index', indexPattern);
- searchSource.setField('index', null);
- expect(searchSource.getField('index')).to.be(undefined);
- expect(searchSource.getField('source')).to.be(undefined);
- });
- });
-
- describe('new index pattern assigned over another', function () {
- it('replaces searchSource filter with new', function () {
- const searchSource = new SearchSource();
- searchSource.setField('index', indexPattern);
- const searchSourceFilter1 = searchSource.getField('source');
- searchSource.setField('index', indexPattern2);
- expect(searchSource.getField('index')).to.be(indexPattern2);
- expect(searchSource.getField('source')).to.be.a('function');
- expect(searchSource.getField('source')).to.not.be(searchSourceFilter1);
- });
-
- it('removes created searchSource filter on removal', function () {
- const searchSource = new SearchSource();
- searchSource.setField('index', indexPattern);
- searchSource.setField('index', indexPattern2);
- searchSource.setField('index', null);
- expect(searchSource.getField('index')).to.be(undefined);
- expect(searchSource.getField('source')).to.be(undefined);
- });
- });
-
- describe('ip assigned before custom searchSource filter', function () {
- it('custom searchSource filter becomes new searchSource', function () {
- const searchSource = new SearchSource();
- const football = {};
- searchSource.setField('index', indexPattern);
- expect(searchSource.getField('source')).to.be.a('function');
- searchSource.setField('source', football);
- expect(searchSource.getField('index')).to.be(indexPattern);
- expect(searchSource.getField('source')).to.be(football);
- });
-
- it('custom searchSource stays after removal', function () {
- const searchSource = new SearchSource();
- const football = {};
- searchSource.setField('index', indexPattern);
- searchSource.setField('source', football);
- searchSource.setField('index', null);
- expect(searchSource.getField('index')).to.be(undefined);
- expect(searchSource.getField('source')).to.be(football);
- });
- });
-
- describe('ip assigned after custom searchSource filter', function () {
- it('leaves the custom filter in place', function () {
- const searchSource = new SearchSource();
- const football = {};
- searchSource.setField('source', football);
- searchSource.setField('index', indexPattern);
- expect(searchSource.getField('index')).to.be(indexPattern);
- expect(searchSource.getField('source')).to.be(football);
- });
-
- it('custom searchSource stays after removal', function () {
- const searchSource = new SearchSource();
- const football = {};
- searchSource.setField('source', football);
- searchSource.setField('index', indexPattern);
- searchSource.setField('index', null);
- expect(searchSource.getField('index')).to.be(undefined);
- expect(searchSource.getField('source')).to.be(football);
- });
- });
- });
- });
-
- describe('#onRequestStart()', () => {
- it('should be called when starting a request', async () => {
- const searchSource = new SearchSource();
- const fn = sinon.spy();
- searchSource.onRequestStart(fn);
- const request = {};
- searchSource.requestIsStarting(request);
- await timeout();
- expect(fn.calledWith(searchSource, request)).to.be(true);
- });
-
- it('should not be called on parent searchSource', async () => {
- const parent = new SearchSource();
- const searchSource = new SearchSource().setParent(parent);
-
- const fn = sinon.spy();
- searchSource.onRequestStart(fn);
- const parentFn = sinon.spy();
- parent.onRequestStart(parentFn);
- const request = {};
- searchSource.requestIsStarting(request);
- await timeout();
- expect(fn.calledWith(searchSource, request)).to.be(true);
- expect(parentFn.notCalled).to.be(true);
- });
-
- it('should be called on parent searchSource if callParentStartHandlers is true', async () => {
- const parent = new SearchSource();
- const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true });
-
- const fn = sinon.spy();
- searchSource.onRequestStart(fn);
- const parentFn = sinon.spy();
- parent.onRequestStart(parentFn);
- const request = {};
- searchSource.requestIsStarting(request);
- await timeout();
- expect(fn.calledWith(searchSource, request)).to.be(true);
- expect(parentFn.calledWith(searchSource, request)).to.be(true);
- });
- });
-
- describe('#_mergeProp', function () {
- describe('filter', function () {
- let searchSource;
- let state;
-
- beforeEach(function () {
- searchSource = new SearchSource();
- state = {};
- });
-
- [null, undefined].forEach(falsyValue => {
- it(`ignores ${falsyValue} filter`, function () {
- searchSource._mergeProp(state, falsyValue, 'filter');
- expect(state.filters).to.be(undefined);
- });
- });
-
- [false, 0, '', NaN].forEach(falsyValue => {
- it(`doesn't add ${falsyValue} filter`, function () {
- searchSource._mergeProp(state, falsyValue, 'filter');
- expect(state.filters).to.be.empty();
- });
- });
-
- it('adds "meta.disabled: undefined" filter', function () {
- const filter = {
- meta: {}
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.eql([filter]);
- });
-
- it('adds "meta.disabled: false" filter', function () {
- const filter = {
- meta: {
- disabled: false
- }
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.eql([filter]);
- });
-
- it(`doesn't add "meta.disabled: true" filter`, function () {
- const filter = {
- meta: {
- disabled: true
- }
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.be.empty();
- });
-
- describe('when courier:ignoreFilterIfFieldNotInIndex is false', function () {
- it('adds filter for non-existent field', function () {
- config.set('courier:ignoreFilterIfFieldNotInIndex', false);
- const filter = {
- meta: {
- key: 'bar'
- }
- };
- state.index = {
- fields: []
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.eql([ filter ]);
- });
- });
-
- describe('when courier:ignoreFilterIfFieldNotInIndex is true', function () {
- it(`doesn't add filter for non-existent field`, function () {
- config.set('courier:ignoreFilterIfFieldNotInIndex', true);
- const filter = {
- meta: {
- key: 'bar'
- }
- };
- state.index = {
- fields: []
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.be.empty();
- });
-
- it(`adds filter for existent field`, function () {
- config.set('courier:ignoreFilterIfFieldNotInIndex', true);
- const filter = {
- meta: {
- key: 'bar'
- }
- };
- state.index = {
- fields: [{ name: 'bar' }]
- };
- searchSource._mergeProp(state, filter, 'filter');
- expect(state.filters).to.eql([ filter ]);
- });
- });
- });
- });
-});
diff --git a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js
index 2b5025f14fef7..3e5d7a1374115 100644
--- a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js
+++ b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js
@@ -19,59 +19,55 @@
import _ from 'lodash';
-export function NormalizeSortRequestProvider(config) {
- const defaultSortOptions = config.get('sort:options');
-
- /**
+/**
* Decorate queries with default parameters
* @param {query} query object
* @returns {object}
*/
- return function (sortObject, indexPattern) {
- // [].concat({}) -> [{}], [].concat([{}]) -> [{}]
- return [].concat(sortObject).map(function (sortable) {
- return normalize(sortable, indexPattern);
- });
- };
+export function normalizeSortRequest(sortObject, indexPattern, defaultSortOptions) {
+ // [].concat({}) -> [{}], [].concat([{}]) -> [{}]
+ return [].concat(sortObject).map(function (sortable) {
+ return normalize(sortable, indexPattern, defaultSortOptions);
+ });
+}
- /*
+/*
Normalize the sort description to the more verbose format:
{ someField: "desc" } into { someField: { "order": "desc"}}
*/
- function normalize(sortable, indexPattern) {
- const normalized = {};
- let sortField = _.keys(sortable)[0];
- let sortValue = sortable[sortField];
- const indexField = indexPattern.fields.getByName(sortField);
+function normalize(sortable, indexPattern, defaultSortOptions) {
+ const normalized = {};
+ let sortField = _.keys(sortable)[0];
+ let sortValue = sortable[sortField];
+ const indexField = indexPattern.fields.getByName(sortField);
- if (indexField && indexField.scripted && indexField.sortable) {
- let direction;
- if (_.isString(sortValue)) direction = sortValue;
- if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order;
+ if (indexField && indexField.scripted && indexField.sortable) {
+ let direction;
+ if (_.isString(sortValue)) direction = sortValue;
+ if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order;
- sortField = '_script';
- sortValue = {
- script: {
- source: indexField.script,
- lang: indexField.lang
- },
- type: castSortType(indexField.type),
- order: direction
- };
- } else {
- if (_.isString(sortValue)) {
- sortValue = { order: sortValue };
- }
- sortValue = _.defaults({}, sortValue, defaultSortOptions);
-
- if (sortField === '_score') {
- delete sortValue.unmapped_type;
- }
+ sortField = '_script';
+ sortValue = {
+ script: {
+ source: indexField.script,
+ lang: indexField.lang
+ },
+ type: castSortType(indexField.type),
+ order: direction
+ };
+ } else {
+ if (_.isString(sortValue)) {
+ sortValue = { order: sortValue };
}
+ sortValue = _.defaults({}, sortValue, defaultSortOptions);
- normalized[sortField] = sortValue;
- return normalized;
+ if (sortField === '_score') {
+ delete sortValue.unmapped_type;
+ }
}
+
+ normalized[sortField] = sortValue;
+ return normalized;
}
// The ES API only supports sort scripts of type 'number' and 'string'
diff --git a/src/legacy/ui/public/courier/search_source/index.js b/src/legacy/ui/public/courier/search_source/index.js
index 5ec7cc315db1c..dcae7b3d2ff05 100644
--- a/src/legacy/ui/public/courier/search_source/index.js
+++ b/src/legacy/ui/public/courier/search_source/index.js
@@ -17,4 +17,4 @@
* under the License.
*/
-export { SearchSourceProvider } from './search_source';
+export { SearchSource } from './search_source';
diff --git a/src/legacy/ui/public/courier/search_source/mocks.ts b/src/legacy/ui/public/courier/search_source/mocks.ts
new file mode 100644
index 0000000000000..bf546c1b9e7c2
--- /dev/null
+++ b/src/legacy/ui/public/courier/search_source/mocks.ts
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"), you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const searchSourceMock = {
+ setPreferredSearchStrategyId: jest.fn(),
+ getPreferredSearchStrategyId: jest.fn(),
+ setFields: jest.fn(),
+ setField: jest.fn(),
+ getId: jest.fn(),
+ getFields: jest.fn(),
+ getField: jest.fn(),
+ getOwnField: jest.fn(),
+ create: jest.fn(),
+ createCopy: jest.fn(),
+ createChild: jest.fn(),
+ setParent: jest.fn(),
+ getParent: jest.fn(),
+ fetch: jest.fn(),
+ onRequestStart: jest.fn(),
+ getSearchRequestBody: jest.fn(),
+ destroy: jest.fn(),
+ history: [],
+};
diff --git a/src/legacy/ui/public/courier/search_source/search_source.d.ts b/src/legacy/ui/public/courier/search_source/search_source.d.ts
index 11406ff3da824..674e7ace0594c 100644
--- a/src/legacy/ui/public/courier/search_source/search_source.d.ts
+++ b/src/legacy/ui/public/courier/search_source/search_source.d.ts
@@ -17,4 +17,23 @@
* under the License.
*/
-export type SearchSource = any;
+export declare class SearchSource {
+ setPreferredSearchStrategyId: (searchStrategyId: string) => void;
+ getPreferredSearchStrategyId: () => string;
+ setFields: (newFields: any) => SearchSource;
+ setField: (field: string, value: any) => SearchSource;
+ getId: () => string;
+ getFields: () => any;
+ getField: (field: string) => any;
+ getOwnField: () => any;
+ create: () => SearchSource;
+ createCopy: () => SearchSource;
+ createChild: (options?: any) => SearchSource;
+ setParent: (parent: SearchSource | boolean) => SearchSource;
+ getParent: () => SearchSource | undefined;
+ fetch: (options?: any) => Promise;
+ onRequestStart: (handler: (searchSource: SearchSource, options: any) => void) => void;
+ getSearchRequestBody: () => any;
+ destroy: () => void;
+ history: any[];
+}
diff --git a/src/legacy/ui/public/courier/search_source/search_source.js b/src/legacy/ui/public/courier/search_source/search_source.js
index 2ff4b6d574ca3..16efb1230e50e 100644
--- a/src/legacy/ui/public/courier/search_source/search_source.js
+++ b/src/legacy/ui/public/courier/search_source/search_source.js
@@ -71,16 +71,16 @@
import _ from 'lodash';
import angular from 'angular';
-import { buildEsQuery, getEsQueryConfig, filterMatchesIndex } from '@kbn/es-query';
+import { buildEsQuery, getEsQueryConfig } from '@kbn/es-query';
-import { createDefer } from 'ui/promises';
-import { NormalizeSortRequestProvider } from './_normalize_sort_request';
-import { SearchRequestProvider } from '../fetch/request';
+import { normalizeSortRequest } from './_normalize_sort_request';
-import { searchRequestQueue } from '../search_request_queue';
-import { FetchSoonProvider } from '../fetch';
-import { FieldWildcardProvider } from '../../field_wildcard';
+import { fetchSoon } from '../fetch';
+import { fieldWildcardFilter } from '../../field_wildcard';
import { getHighlightRequest } from '../../../../../plugins/data/common/field_formats';
+import { npSetup } from 'ui/new_platform';
+import chrome from '../../chrome';
+import { RequestFailure } from '../fetch/errors';
import { filterDocvalueFields } from './filter_docvalue_fields';
const FIELDS = [
@@ -114,327 +114,242 @@ function isIndexPattern(val) {
return Boolean(val && typeof val.getIndex === 'function');
}
-export function SearchSourceProvider(Promise, Private, config) {
- const SearchRequest = Private(SearchRequestProvider);
- const normalizeSortRequest = Private(NormalizeSortRequestProvider);
- const fetchSoon = Private(FetchSoonProvider);
- const { fieldWildcardFilter } = Private(FieldWildcardProvider);
- const getConfig = (...args) => config.get(...args);
+const esShardTimeout = npSetup.core.injectedMetadata.getInjectedVar('esShardTimeout');
+const config = npSetup.core.uiSettings;
+const getConfig = (...args) => config.get(...args);
+const forIp = Symbol('for which index pattern?');
- const forIp = Symbol('for which index pattern?');
+export class SearchSource {
+ constructor(initialFields) {
+ this._id = _.uniqueId('data_source');
- class SearchSource {
- constructor(initialFields) {
- this._id = _.uniqueId('data_source');
+ this._searchStrategyId = undefined;
+ this._fields = parseInitialFields(initialFields);
+ this._parent = undefined;
- this._searchStrategyId = undefined;
- this._fields = parseInitialFields(initialFields);
- this._parent = undefined;
-
- this.history = [];
- this._requestStartHandlers = [];
- this._inheritOptions = {};
-
- this._filterPredicates = [
- (filter) => {
- // remove null/undefined filters
- return filter;
- },
- (filter) => {
- const disabled = _.get(filter, 'meta.disabled');
- return disabled === undefined || disabled === false;
- },
- (filter, data) => {
- const index = data.index || this.getField('index');
- return !config.get('courier:ignoreFilterIfFieldNotInIndex') || filterMatchesIndex(filter, index);
- }
- ];
- }
+ this.history = [];
+ this._requestStartHandlers = [];
+ this._inheritOptions = {};
+ }
- /*****
+ /*****
* PUBLIC API
*****/
- setPreferredSearchStrategyId(searchStrategyId) {
- this._searchStrategyId = searchStrategyId;
- }
-
- getPreferredSearchStrategyId() {
- return this._searchStrategyId;
- }
-
- setFields(newFields) {
- this._fields = newFields;
- return this;
- }
-
- setField = (field, value) => {
- if (!FIELDS.includes(field)) {
- throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
- }
+ setPreferredSearchStrategyId(searchStrategyId) {
+ this._searchStrategyId = searchStrategyId;
+ }
- if (field === 'index') {
- const fields = this._fields;
+ getPreferredSearchStrategyId() {
+ return this._searchStrategyId;
+ }
- const hasSource = fields.source;
- const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp);
- const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index;
- if (sourceIsForOurIp) {
- delete fields.source;
- }
+ setFields(newFields) {
+ this._fields = newFields;
+ return this;
+ }
- if (value === null || value === undefined) {
- delete fields.index;
- return this;
- }
+ setField(field, value) {
+ if (!FIELDS.includes(field)) {
+ throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
+ }
- if (!isIndexPattern(value)) {
- throw new TypeError('expected indexPattern to be an IndexPattern duck.');
- }
+ if (field === 'index') {
+ const fields = this._fields;
- fields[field] = value;
- if (!fields.source) {
- // imply source filtering based on the index pattern, but allow overriding
- // it by simply setting another field for "source". When index is changed
- fields.source = function () {
- return value.getSourceFiltering();
- };
- fields.source[forIp] = value;
- }
+ const hasSource = fields.source;
+ const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp);
+ const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index;
+ if (sourceIsForOurIp) {
+ delete fields.source;
+ }
+ if (value === null || value === undefined) {
+ delete fields.index;
return this;
}
- if (value == null) {
- delete this._fields[field];
- return this;
+ if (!isIndexPattern(value)) {
+ throw new TypeError('expected indexPattern to be an IndexPattern duck.');
}
- this._fields[field] = value;
- return this;
- };
+ fields[field] = value;
+ if (!fields.source) {
+ // imply source filtering based on the index pattern, but allow overriding
+ // it by simply setting another field for "source". When index is changed
+ fields.source = function () {
+ return value.getSourceFiltering();
+ };
+ fields.source[forIp] = value;
+ }
- getId() {
- return this._id;
+ return this;
}
- getFields() {
- return _.clone(this._fields);
+ if (value == null) {
+ delete this._fields[field];
+ return this;
}
- /**
- * Get fields from the fields
- */
- getField = field => {
- if (!FIELDS.includes(field)) {
- throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
- }
+ this._fields[field] = value;
+ return this;
+ }
- let searchSource = this;
+ getId() {
+ return this._id;
+ }
- while (searchSource) {
- const value = searchSource._fields[field];
- if (value !== void 0) {
- return value;
- }
+ getFields() {
+ return _.clone(this._fields);
+ }
- searchSource = searchSource.getParent();
- }
- };
+ /**
+ * Get fields from the fields
+ */
+ getField(field) {
+ if (!FIELDS.includes(field)) {
+ throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
+ }
- /**
- * Get the field from our own fields, don't traverse up the chain
- */
- getOwnField(field) {
- if (!FIELDS.includes(field)) {
- throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
- }
+ let searchSource = this;
- const value = this._fields[field];
+ while (searchSource) {
+ const value = searchSource._fields[field];
if (value !== void 0) {
return value;
}
- }
- create() {
- return new SearchSource();
+ searchSource = searchSource.getParent();
}
+ }
- createCopy() {
- const json = angular.toJson(this._fields);
- const newSearchSource = new SearchSource(json);
- // when serializing the internal fields we lose the internal classes used in the index
- // pattern, so we have to set it again to workaround this behavior
- newSearchSource.setField('index', this.getField('index'));
- newSearchSource.setParent(this.getParent());
- return newSearchSource;
+ /**
+ * Get the field from our own fields, don't traverse up the chain
+ */
+ getOwnField(field) {
+ if (!FIELDS.includes(field)) {
+ throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
- createChild(options = {}) {
- const childSearchSource = new SearchSource();
- childSearchSource.setParent(this, options);
- return childSearchSource;
+ const value = this._fields[field];
+ if (value !== void 0) {
+ return value;
}
+ }
- /**
+ create() {
+ return new SearchSource();
+ }
+
+ createCopy() {
+ const json = angular.toJson(this._fields);
+ const newSearchSource = new SearchSource(json);
+ // when serializing the internal fields we lose the internal classes used in the index
+ // pattern, so we have to set it again to workaround this behavior
+ newSearchSource.setField('index', this.getField('index'));
+ newSearchSource.setParent(this.getParent());
+ return newSearchSource;
+ }
+
+ createChild(options = {}) {
+ const childSearchSource = new SearchSource();
+ childSearchSource.setParent(this, options);
+ return childSearchSource;
+ }
+
+ /**
* Set a searchSource that this source should inherit from
* @param {SearchSource} searchSource - the parent searchSource
* @return {this} - chainable
*/
- setParent(parent, options = {}) {
- this._parent = parent;
- this._inheritOptions = options;
- return this;
- }
+ setParent(parent, options = {}) {
+ this._parent = parent;
+ this._inheritOptions = options;
+ return this;
+ }
- /**
+ /**
* Get the parent of this SearchSource
* @return {undefined|searchSource}
*/
- getParent() {
- return this._parent || undefined;
- }
+ getParent() {
+ return this._parent || undefined;
+ }
- /**
+ /**
* Fetch this source and reject the returned Promise on error
*
* @async
*/
- fetch() {
- const self = this;
- let req = _.first(self._myStartableQueued());
-
- if (!req) {
- const errorHandler = (request, error) => {
- request.defer.reject(error);
- request.abort();
- };
- req = self._createRequest({ errorHandler });
- }
+ async fetch(options) {
+ const $injector = await chrome.dangerouslyGetActiveInjector();
+ const es = $injector.get('es');
- fetchSoon.fetchSearchRequests([req]);
- return req.getCompletePromise();
- }
+ await this.requestIsStarting(options);
- /**
- * Fetch all pending requests for this source ASAP
- * @async
- */
- fetchQueued() {
- return fetchSoon.fetchSearchRequests(this._myStartableQueued());
- }
+ const searchRequest = await this._flatten();
+ this.history = [searchRequest];
- /**
- * Cancel all pending requests for this searchSource
- * @return {undefined}
- */
- cancelQueued() {
- searchRequestQueue.getAll()
- .filter(req => req.source === this)
- .forEach(req => req.abort());
+ const response = await fetchSoon(searchRequest, {
+ ...(this._searchStrategyId && { searchStrategyId: this._searchStrategyId }),
+ ...options,
+ }, { es, config, esShardTimeout });
+
+ if (response.error) {
+ throw new RequestFailure(null, response);
}
- /**
+ return response;
+ }
+
+ /**
* Add a handler that will be notified whenever requests start
* @param {Function} handler
* @return {undefined}
*/
- onRequestStart(handler) {
- this._requestStartHandlers.push(handler);
- }
+ onRequestStart(handler) {
+ this._requestStartHandlers.push(handler);
+ }
- /**
+ /**
* Called by requests of this search source when they are started
* @param {Courier.Request} request
+ * @param options
* @return {Promise}
*/
- requestIsStarting(request) {
- this.activeFetchCount = (this.activeFetchCount || 0) + 1;
- this.history = [request];
-
- const handlers = [...this._requestStartHandlers];
- // If callparentStartHandlers has been set to true, we also call all
- // handlers of parent search sources.
- if (this._inheritOptions.callParentStartHandlers) {
- let searchSource = this.getParent();
- while (searchSource) {
- handlers.push(...searchSource._requestStartHandlers);
- searchSource = searchSource.getParent();
- }
+ requestIsStarting(options) {
+ const handlers = [...this._requestStartHandlers];
+ // If callparentStartHandlers has been set to true, we also call all
+ // handlers of parent search sources.
+ if (this._inheritOptions.callParentStartHandlers) {
+ let searchSource = this.getParent();
+ while (searchSource) {
+ handlers.push(...searchSource._requestStartHandlers);
+ searchSource = searchSource.getParent();
}
-
- return Promise
- .map(handlers, fn => fn(this, request))
- .then(_.noop);
}
- /**
- * Put a request in to the courier that this Source should
- * be fetched on the next run of the courier
- * @return {Promise}
- */
- onResults() {
- const self = this;
-
- return new Promise(function (resolve, reject) {
- const defer = createDefer(Promise);
- defer.promise.then(resolve, reject);
-
- const errorHandler = (request, error) => {
- reject(error);
- request.abort();
- };
- self._createRequest({ defer, errorHandler });
- });
- }
-
- async getSearchRequestBody() {
- const searchRequest = await this._flatten();
- return searchRequest.body;
- }
+ return Promise.all(handlers.map(fn => fn(this, options)));
+ }
- /**
- * Called by requests of this search source when they are done
- * @param {Courier.Request} request
- * @return {undefined}
- */
- requestIsStopped() {
- this.activeFetchCount -= 1;
- }
+ async getSearchRequestBody() {
+ const searchRequest = await this._flatten();
+ return searchRequest.body;
+ }
- /**
+ /**
* Completely destroy the SearchSource.
* @return {undefined}
*/
- destroy() {
- this.cancelQueued();
- this._requestStartHandlers.length = 0;
- }
+ destroy() {
+ this._requestStartHandlers.length = 0;
+ }
- /******
+ /******
* PRIVATE APIS
******/
- _myStartableQueued() {
- return searchRequestQueue
- .getStartable()
- .filter(req => req.source === this);
- }
-
- /**
- * Create a common search request object, which should
- * be put into the pending request queue, for this search
- * source
- *
- * @param {Deferred} defer - the deferred object that should be resolved
- * when the request is complete
- * @return {SearchRequest}
- */
- _createRequest({ defer, errorHandler }) {
- return new SearchRequest({ source: this, defer, errorHandler });
- }
-
- /**
+ /**
* Used to merge properties into the data within ._flatten().
* The data is passed in and modified by the function
*
@@ -443,192 +358,184 @@ export function SearchSourceProvider(Promise, Private, config) {
* @param {*} key - The key of `val`
* @return {undefined}
*/
- _mergeProp(data, val, key) {
- if (typeof val === 'function') {
- const source = this;
- return Promise.cast(val(this))
- .then(function (newVal) {
- return source._mergeProp(data, newVal, key);
- });
- }
-
- if (val == null || !key || !_.isString(key)) return;
-
- switch (key) {
- case 'filter':
- let filters = Array.isArray(val) ? val : [val];
-
- filters = filters.filter(filter => {
- return this._filterPredicates.every(predicate => predicate(filter, data));
- });
+ _mergeProp(data, val, key) {
+ if (typeof val === 'function') {
+ const source = this;
+ return Promise.resolve(val(this))
+ .then(function (newVal) {
+ return source._mergeProp(data, newVal, key);
+ });
+ }
- data.filters = [...(data.filters || []), ...filters];
- return;
- case 'index':
- case 'type':
- case 'id':
- case 'highlightAll':
- if (key && data[key] == null) {
- data[key] = val;
- }
- return;
- case 'searchAfter':
- key = 'search_after';
- addToBody();
- break;
- case 'source':
- key = '_source';
- addToBody();
- break;
- case 'sort':
- val = normalizeSortRequest(val, this.getField('index'));
- addToBody();
- break;
- case 'query':
- data.query = (data.query || []).concat(val);
- break;
- case 'fields':
- data[key] = _.uniq([...(data[key] || []), ...val]);
- break;
- default:
- addToBody();
- }
+ if (val == null || !key || !_.isString(key)) return;
+
+ switch (key) {
+ case 'filter':
+ const filters = Array.isArray(val) ? val : [val];
+ data.filters = [...(data.filters || []), ...filters];
+ return;
+ case 'index':
+ case 'type':
+ case 'id':
+ case 'highlightAll':
+ if (key && data[key] == null) {
+ data[key] = val;
+ }
+ return;
+ case 'searchAfter':
+ key = 'search_after';
+ addToBody();
+ break;
+ case 'source':
+ key = '_source';
+ addToBody();
+ break;
+ case 'sort':
+ val = normalizeSortRequest(val, this.getField('index'), config.get('sort:options'));
+ addToBody();
+ break;
+ case 'query':
+ data.query = (data.query || []).concat(val);
+ break;
+ case 'fields':
+ data[key] = _.uniq([...(data[key] || []), ...val]);
+ break;
+ default:
+ addToBody();
+ }
- /**
+ /**
* Add the key and val to the body of the request
*/
- function addToBody() {
- data.body = data.body || {};
- // ignore if we already have a value
- if (data.body[key] == null) {
- data.body[key] = val;
- }
+ function addToBody() {
+ data.body = data.body || {};
+ // ignore if we already have a value
+ if (data.body[key] == null) {
+ data.body[key] = val;
}
}
+ }
- /**
+ /**
* Walk the inheritance chain of a source and return it's
* flat representation (taking into account merging rules)
* @returns {Promise}
* @resolved {Object|null} - the flat data of the SearchSource
*/
- _flatten() {
- // the merged data of this dataSource and it's ancestors
- const flatData = {};
-
- // function used to write each property from each data object in the chain to flat data
- const root = this;
-
- // start the chain at this source
- let current = this;
-
- // call the ittr and return it's promise
- return (function ittr() {
- // iterate the _fields object (not array) and
- // pass each key:value pair to source._mergeProp. if _mergeProp
- // returns a promise, then wait for it to complete and call _mergeProp again
- return Promise.all(_.map(current._fields, function ittr(value, key) {
- if (Promise.is(value)) {
- return value.then(function (value) {
- return ittr(value, key);
- });
- }
-
- const prom = root._mergeProp(flatData, value, key);
- return Promise.is(prom) ? prom : null;
- }))
- .then(function () {
- // move to this sources parent
- const parent = current.getParent();
- // keep calling until we reach the top parent
- if (parent) {
- current = parent;
- return ittr();
- }
+ _flatten() {
+ // the merged data of this dataSource and it's ancestors
+ const flatData = {};
+
+ // function used to write each property from each data object in the chain to flat data
+ const root = this;
+
+ // start the chain at this source
+ let current = this;
+
+ // call the ittr and return it's promise
+ return (function ittr() {
+ // iterate the _fields object (not array) and
+ // pass each key:value pair to source._mergeProp. if _mergeProp
+ // returns a promise, then wait for it to complete and call _mergeProp again
+ return Promise.all(_.map(current._fields, function ittr(value, key) {
+ if (value instanceof Promise) {
+ return value.then(function (value) {
+ return ittr(value, key);
});
- }())
- .then(function () {
- // This is down here to prevent the circular dependency
- flatData.body = flatData.body || {};
-
- const computedFields = flatData.index.getComputedFields();
-
- flatData.body.stored_fields = computedFields.storedFields;
- flatData.body.script_fields = flatData.body.script_fields || {};
- _.extend(flatData.body.script_fields, computedFields.scriptFields);
-
- const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : [];
- flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields;
+ }
- if (flatData.body._source) {
- // exclude source fields for this index pattern specified by the user
- const filter = fieldWildcardFilter(flatData.body._source.excludes);
- flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter(
- docvalueField => filter(docvalueField.field)
- );
+ const prom = root._mergeProp(flatData, value, key);
+ return prom instanceof Promise ? prom : null;
+ }))
+ .then(function () {
+ // move to this sources parent
+ const parent = current.getParent();
+ // keep calling until we reach the top parent
+ if (parent) {
+ current = parent;
+ return ittr();
}
+ });
+ }())
+ .then(function () {
+ // This is down here to prevent the circular dependency
+ flatData.body = flatData.body || {};
+
+ const computedFields = flatData.index.getComputedFields();
+
+ flatData.body.stored_fields = computedFields.storedFields;
+ flatData.body.script_fields = flatData.body.script_fields || {};
+ _.extend(flatData.body.script_fields, computedFields.scriptFields);
+
+ const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : [];
+ flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields;
+
+ if (flatData.body._source) {
+ // exclude source fields for this index pattern specified by the user
+ const filter = fieldWildcardFilter(flatData.body._source.excludes, config.get('metaFields'));
+ flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter(
+ docvalueField => filter(docvalueField.field)
+ );
+ }
- // if we only want to search for certain fields
- const fields = flatData.fields;
- if (fields) {
- // filter out the docvalue_fields, and script_fields to only include those that we are concerned with
- flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields);
- flatData.body.script_fields = _.pick(flatData.body.script_fields, fields);
-
- // request the remaining fields from both stored_fields and _source
- const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields));
- flatData.body.stored_fields = remainingFields;
- _.set(flatData.body, '_source.includes', remainingFields);
- }
+ // if we only want to search for certain fields
+ const fields = flatData.fields;
+ if (fields) {
+ // filter out the docvalue_fields, and script_fields to only include those that we are concerned with
+ flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields);
+ flatData.body.script_fields = _.pick(flatData.body.script_fields, fields);
+
+ // request the remaining fields from both stored_fields and _source
+ const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields));
+ flatData.body.stored_fields = remainingFields;
+ _.set(flatData.body, '_source.includes', remainingFields);
+ }
- const esQueryConfigs = getEsQueryConfig(config);
- flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs);
+ const esQueryConfigs = getEsQueryConfig(config);
+ flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs);
- if (flatData.highlightAll != null) {
- if (flatData.highlightAll && flatData.body.query) {
- flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig);
- }
- delete flatData.highlightAll;
+ if (flatData.highlightAll != null) {
+ if (flatData.highlightAll && flatData.body.query) {
+ flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig);
}
+ delete flatData.highlightAll;
+ }
- /**
+ /**
* Translate a filter into a query to support es 3+
* @param {Object} filter - The filter to translate
* @return {Object} the query version of that filter
*/
- const translateToQuery = function (filter) {
- if (!filter) return;
+ const translateToQuery = function (filter) {
+ if (!filter) return;
- if (filter.query) {
- return filter.query;
- }
+ if (filter.query) {
+ return filter.query;
+ }
- return filter;
- };
+ return filter;
+ };
- // re-write filters within filter aggregations
- (function recurse(aggBranch) {
- if (!aggBranch) return;
- Object.keys(aggBranch).forEach(function (id) {
- const agg = aggBranch[id];
+ // re-write filters within filter aggregations
+ (function recurse(aggBranch) {
+ if (!aggBranch) return;
+ Object.keys(aggBranch).forEach(function (id) {
+ const agg = aggBranch[id];
- if (agg.filters) {
- // translate filters aggregations
- const filters = agg.filters.filters;
+ if (agg.filters) {
+ // translate filters aggregations
+ const filters = agg.filters.filters;
- Object.keys(filters).forEach(function (filterId) {
- filters[filterId] = translateToQuery(filters[filterId]);
- });
- }
+ Object.keys(filters).forEach(function (filterId) {
+ filters[filterId] = translateToQuery(filters[filterId]);
+ });
+ }
- recurse(agg.aggs || agg.aggregations);
- });
- }(flatData.body.aggs || flatData.body.aggregations));
+ recurse(agg.aggs || agg.aggregations);
+ });
+ }(flatData.body.aggs || flatData.body.aggregations));
- return flatData;
- });
- }
+ return flatData;
+ });
}
-
- return SearchSource;
}
diff --git a/src/legacy/ui/public/courier/search_source/search_source.test.js b/src/legacy/ui/public/courier/search_source/search_source.test.js
new file mode 100644
index 0000000000000..be08261ba9d2c
--- /dev/null
+++ b/src/legacy/ui/public/courier/search_source/search_source.test.js
@@ -0,0 +1,193 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { SearchSource } from '../search_source';
+
+jest.mock('ui/new_platform', () => ({
+ npSetup: {
+ core: {
+ injectedMetadata: {
+ getInjectedVar: () => 0,
+ }
+ }
+ }
+}));
+
+jest.mock('../fetch', () => ({
+ fetchSoon: jest.fn(),
+}));
+
+const indexPattern = { title: 'foo', getIndex: () => 'foo' };
+const indexPattern2 = { title: 'foo', getIndex: () => 'foo' };
+
+describe('SearchSource', function () {
+ describe('#setField()', function () {
+ it('sets the value for the property', function () {
+ const searchSource = new SearchSource();
+ searchSource.setField('aggs', 5);
+ expect(searchSource.getField('aggs')).toBe(5);
+ });
+
+ it('throws an error if the property is not accepted', function () {
+ const searchSource = new SearchSource();
+ expect(() => searchSource.setField('index', 5)).toThrow();
+ });
+ });
+
+ describe('#getField()', function () {
+ it('gets the value for the property', function () {
+ const searchSource = new SearchSource();
+ searchSource.setField('aggs', 5);
+ expect(searchSource.getField('aggs')).toBe(5);
+ });
+
+ it('throws an error if the property is not accepted', function () {
+ const searchSource = new SearchSource();
+ expect(() => searchSource.getField('unacceptablePropName')).toThrow();
+ });
+ });
+
+ describe(`#setField('index')`, function () {
+ describe('auto-sourceFiltering', function () {
+ describe('new index pattern assigned', function () {
+ it('generates a searchSource filter', function () {
+ const searchSource = new SearchSource();
+ expect(searchSource.getField('index')).toBe(undefined);
+ expect(searchSource.getField('source')).toBe(undefined);
+ searchSource.setField('index', indexPattern);
+ expect(searchSource.getField('index')).toBe(indexPattern);
+ expect(typeof searchSource.getField('source')).toBe('function');
+ });
+
+ it('removes created searchSource filter on removal', function () {
+ const searchSource = new SearchSource();
+ searchSource.setField('index', indexPattern);
+ searchSource.setField('index', null);
+ expect(searchSource.getField('index')).toBe(undefined);
+ expect(searchSource.getField('source')).toBe(undefined);
+ });
+ });
+
+ describe('new index pattern assigned over another', function () {
+ it('replaces searchSource filter with new', function () {
+ const searchSource = new SearchSource();
+ searchSource.setField('index', indexPattern);
+ const searchSourceFilter1 = searchSource.getField('source');
+ searchSource.setField('index', indexPattern2);
+ expect(searchSource.getField('index')).toBe(indexPattern2);
+ expect(typeof searchSource.getField('source')).toBe('function');
+ expect(searchSource.getField('source')).not.toBe(searchSourceFilter1);
+ });
+
+ it('removes created searchSource filter on removal', function () {
+ const searchSource = new SearchSource();
+ searchSource.setField('index', indexPattern);
+ searchSource.setField('index', indexPattern2);
+ searchSource.setField('index', null);
+ expect(searchSource.getField('index')).toBe(undefined);
+ expect(searchSource.getField('source')).toBe(undefined);
+ });
+ });
+
+ describe('ip assigned before custom searchSource filter', function () {
+ it('custom searchSource filter becomes new searchSource', function () {
+ const searchSource = new SearchSource();
+ const football = {};
+ searchSource.setField('index', indexPattern);
+ expect(typeof searchSource.getField('source')).toBe('function');
+ searchSource.setField('source', football);
+ expect(searchSource.getField('index')).toBe(indexPattern);
+ expect(searchSource.getField('source')).toBe(football);
+ });
+
+ it('custom searchSource stays after removal', function () {
+ const searchSource = new SearchSource();
+ const football = {};
+ searchSource.setField('index', indexPattern);
+ searchSource.setField('source', football);
+ searchSource.setField('index', null);
+ expect(searchSource.getField('index')).toBe(undefined);
+ expect(searchSource.getField('source')).toBe(football);
+ });
+ });
+
+ describe('ip assigned after custom searchSource filter', function () {
+ it('leaves the custom filter in place', function () {
+ const searchSource = new SearchSource();
+ const football = {};
+ searchSource.setField('source', football);
+ searchSource.setField('index', indexPattern);
+ expect(searchSource.getField('index')).toBe(indexPattern);
+ expect(searchSource.getField('source')).toBe(football);
+ });
+
+ it('custom searchSource stays after removal', function () {
+ const searchSource = new SearchSource();
+ const football = {};
+ searchSource.setField('source', football);
+ searchSource.setField('index', indexPattern);
+ searchSource.setField('index', null);
+ expect(searchSource.getField('index')).toBe(undefined);
+ expect(searchSource.getField('source')).toBe(football);
+ });
+ });
+ });
+ });
+
+ describe('#onRequestStart()', () => {
+ it('should be called when starting a request', () => {
+ const searchSource = new SearchSource();
+ const fn = jest.fn();
+ searchSource.onRequestStart(fn);
+ const options = {};
+ searchSource.requestIsStarting(options);
+ expect(fn).toBeCalledWith(searchSource, options);
+ });
+
+ it('should not be called on parent searchSource', () => {
+ const parent = new SearchSource();
+ const searchSource = new SearchSource().setParent(parent);
+
+ const fn = jest.fn();
+ searchSource.onRequestStart(fn);
+ const parentFn = jest.fn();
+ parent.onRequestStart(parentFn);
+ const options = {};
+ searchSource.requestIsStarting(options);
+
+ expect(fn).toBeCalledWith(searchSource, options);
+ expect(parentFn).not.toBeCalled();
+ });
+
+ it('should be called on parent searchSource if callParentStartHandlers is true', () => {
+ const parent = new SearchSource();
+ const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true });
+
+ const fn = jest.fn();
+ searchSource.onRequestStart(fn);
+ const parentFn = jest.fn();
+ parent.onRequestStart(parentFn);
+ const options = {};
+ searchSource.requestIsStarting(options);
+
+ expect(fn).toBeCalledWith(searchSource, options);
+ expect(parentFn).toBeCalledWith(searchSource, options);
+ });
+ });
+});
diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js
index 4b1f488ece128..7d9865c137e62 100644
--- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js
+++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js
@@ -19,48 +19,13 @@
import { addSearchStrategy } from './search_strategy_registry';
import { isDefaultTypeIndexPattern } from './is_default_type_index_pattern';
-import { SearchError } from './search_error';
-import { getSearchParams, getMSearchParams } from '../fetch/get_search_params';
-
-function getAllFetchParams(searchRequests, Promise) {
- return Promise.map(searchRequests, (searchRequest) => {
- return Promise.try(searchRequest.getFetchParams, void 0, searchRequest)
- .then((fetchParams) => {
- return (searchRequest.fetchParams = fetchParams);
- })
- .then(value => ({ resolved: value }))
- .catch(error => ({ rejected: error }));
- });
-}
-
-async function serializeAllFetchParams(fetchParams, searchRequests, serializeFetchParams) {
- const searchRequestsWithFetchParams = [];
- const failedSearchRequests = [];
-
- // Gather the fetch param responses from all the successful requests.
- fetchParams.forEach((result, index) => {
- if (result.resolved) {
- searchRequestsWithFetchParams.push(result.resolved);
- } else {
- const searchRequest = searchRequests[index];
-
- searchRequest.handleFailure(result.rejected);
- failedSearchRequests.push(searchRequest);
- }
- });
-
- return {
- serializedFetchParams: await serializeFetchParams(searchRequestsWithFetchParams),
- failedSearchRequests,
- };
-}
+import { getSearchParams, getMSearchParams, getPreference, getTimeout } from '../fetch/get_search_params';
export const defaultSearchStrategy = {
id: 'default',
search: params => {
- const { config } = params;
- return config.get('courier:batchSearches') ? msearch(params) : search(params);
+ return params.config.get('courier:batchSearches') ? msearch(params) : search(params);
},
isViable: (indexPattern) => {
@@ -72,79 +37,43 @@ export const defaultSearchStrategy = {
},
};
-async function msearch({ searchRequests, es, Promise, serializeFetchParams, config }) {
- // Flatten the searchSource within each searchRequest to get the fetch params,
- // e.g. body, filters, index pattern, query.
- const allFetchParams = await getAllFetchParams(searchRequests, Promise);
-
- // Serialize the fetch params into a format suitable for the body of an ES query.
- const {
- serializedFetchParams,
- failedSearchRequests,
- } = await serializeAllFetchParams(allFetchParams, searchRequests, serializeFetchParams);
-
- if (serializedFetchParams.trim() === '') {
- return {
- failedSearchRequests,
+function msearch({ searchRequests, es, config, esShardTimeout }) {
+ const inlineRequests = searchRequests.map(({ index, body, search_type: searchType }) => {
+ const inlineHeader = {
+ index: index.title || index,
+ search_type: searchType,
+ ignore_unavailable: true,
+ preference: getPreference(config)
};
- }
- const msearchParams = {
- ...getMSearchParams(config),
- body: serializedFetchParams,
- };
-
- const searching = es.msearch(msearchParams);
+ const inlineBody = {
+ ...body,
+ timeout: getTimeout(esShardTimeout)
+ };
+ return `${JSON.stringify(inlineHeader)}\n${JSON.stringify(inlineBody)}`;
+ });
+ const searching = es.msearch({
+ ...getMSearchParams(config),
+ body: `${inlineRequests.join('\n')}\n`,
+ });
return {
- // Munge data into shape expected by consumer.
- searching: new Promise((resolve, reject) => {
- // Unwrap the responses object returned by the ES client.
- searching.then(({ responses }) => {
- resolve(responses);
- }).catch(error => {
- // Format ES client error as a SearchError.
- const { statusCode, displayName, message, path } = error;
-
- const searchError = new SearchError({
- status: statusCode,
- title: displayName,
- message,
- path,
- });
-
- reject(searchError);
- });
- }),
- abort: searching.abort,
- failedSearchRequests,
+ searching: searching.then(({ responses }) => responses),
+ abort: searching.abort
};
}
-function search({ searchRequests, es, Promise, config, sessionId, esShardTimeout }) {
- const failedSearchRequests = [];
+function search({ searchRequests, es, config, esShardTimeout }) {
const abortController = new AbortController();
- const searchParams = getSearchParams(config, sessionId, esShardTimeout);
- const promises = searchRequests.map(async searchRequest => {
- return searchRequest.getFetchParams()
- .then(fetchParams => {
- const { index, body } = searchRequest.fetchParams = fetchParams;
- const promise = es.search({ index: index.title || index, body, ...searchParams });
- abortController.signal.addEventListener('abort', promise.abort);
- return promise;
- }, error => {
- searchRequest.handleFailure(error);
- failedSearchRequests.push(searchRequest);
- })
- .catch(({ response }) => {
- // Copying the _msearch behavior where the errors for individual requests are returned
- // instead of thrown
- return JSON.parse(response);
- });
+ const searchParams = getSearchParams(config, esShardTimeout);
+ const promises = searchRequests.map(({ index, body }) => {
+ const searching = es.search({ index: index.title || index, body, ...searchParams })
+ .catch(({ response }) => JSON.parse(response));
+ abortController.signal.addEventListener('abort', searching.abort);
+ return searching;
});
return {
searching: Promise.all(promises),
abort: () => abortController.abort(),
- failedSearchRequests
};
}
diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js
index dc8732032ba22..953ca4fe800f1 100644
--- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js
+++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js
@@ -18,7 +18,6 @@
*/
import { defaultSearchStrategy } from './default_search_strategy';
-import Bluebird from 'bluebird';
const { search } = defaultSearchStrategy;
@@ -29,14 +28,12 @@ function getConfigStub(config = {}) {
}
describe('defaultSearchStrategy', function () {
-
describe('search', function () {
-
let searchArgs;
beforeEach(() => {
- const msearchMock = jest.fn().mockReturnValue(Bluebird.resolve([]));
- const searchMock = jest.fn().mockReturnValue(Bluebird.resolve([]));
+ const msearchMock = jest.fn().mockReturnValue(Promise.resolve([]));
+ const searchMock = jest.fn().mockReturnValue(Promise.resolve([]));
searchArgs = {
searchRequests: [],
@@ -44,8 +41,6 @@ describe('defaultSearchStrategy', function () {
msearch: msearchMock,
search: searchMock,
},
- Promise: Bluebird,
- serializeFetchParams: () => Bluebird.resolve('pretend this is a valid request body'),
};
});
@@ -78,7 +73,5 @@ describe('defaultSearchStrategy', function () {
await search(searchArgs);
expect(searchArgs.es.msearch.mock.calls[0][0]).toHaveProperty('ignore_throttled', false);
});
-
});
-
});
diff --git a/src/legacy/ui/public/courier/search_strategy/index.js b/src/legacy/ui/public/courier/search_strategy/index.js
index 3f6d172426d0d..229d0cbb1da5d 100644
--- a/src/legacy/ui/public/courier/search_strategy/index.js
+++ b/src/legacy/ui/public/courier/search_strategy/index.js
@@ -18,9 +18,10 @@
*/
export {
- assignSearchRequestsToSearchStrategies,
addSearchStrategy,
hasSearchStategyForIndexPattern,
+ getSearchStrategyById,
+ getSearchStrategyForSearchRequest,
} from './search_strategy_registry';
export { isDefaultTypeIndexPattern } from './is_default_type_index_pattern';
diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js
index 3af93e4f16509..e67d39ea27aa6 100644
--- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js
+++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js
@@ -19,7 +19,7 @@
import { noOpSearchStrategy } from './no_op_search_strategy';
-const searchStrategies = [];
+export const searchStrategies = [];
export const addSearchStrategy = searchStrategy => {
if (searchStrategies.includes(searchStrategy)) {
@@ -29,28 +29,26 @@ export const addSearchStrategy = searchStrategy => {
searchStrategies.push(searchStrategy);
};
-const getSearchStrategyByViability = indexPattern => {
+export const getSearchStrategyByViability = indexPattern => {
return searchStrategies.find(searchStrategy => {
return searchStrategy.isViable(indexPattern);
});
};
-const getSearchStrategyById = searchStrategyId => {
+export const getSearchStrategyById = searchStrategyId => {
return searchStrategies.find(searchStrategy => {
return searchStrategy.id === searchStrategyId;
});
};
-const getSearchStrategyForSearchRequest = searchRequest => {
+export const getSearchStrategyForSearchRequest = (searchRequest, { searchStrategyId } = {}) => {
// Allow the searchSource to declare the correct strategy with which to execute its searches.
- const preferredSearchStrategyId = searchRequest.source.getPreferredSearchStrategyId();
- if (preferredSearchStrategyId != null) {
- return getSearchStrategyById(preferredSearchStrategyId);
+ if (searchStrategyId != null) {
+ return getSearchStrategyById(searchStrategyId);
}
// Otherwise try to match it to a strategy.
- const indexPattern = searchRequest.source.getField('index');
- const viableSearchStrategy = getSearchStrategyByViability(indexPattern);
+ const viableSearchStrategy = getSearchStrategyByViability(searchRequest.index);
if (viableSearchStrategy) {
return viableSearchStrategy;
@@ -60,47 +58,6 @@ const getSearchStrategyForSearchRequest = searchRequest => {
return noOpSearchStrategy;
};
-
-/**
- * Build a structure like this:
- *
- * [{
- * searchStrategy: rollupSearchStrategy,
- * searchRequests: [],
- * }, {
- * searchStrategy: defaultSearchStrategy,
- * searchRequests: [],
- * }]
- *
- * We use an array of objects to preserve the order of the search requests, which we use to
- * deterministically associate each response with the originating request.
- */
-export const assignSearchRequestsToSearchStrategies = searchRequests => {
- const searchStrategiesWithRequests = [];
- const searchStrategyById = {};
-
- searchRequests.forEach(searchRequest => {
- const matchingSearchStrategy = getSearchStrategyForSearchRequest(searchRequest);
- const { id } = matchingSearchStrategy;
- let searchStrategyWithRequest = searchStrategyById[id];
-
- // Create the data structure if we don't already have it.
- if (!searchStrategyWithRequest) {
- searchStrategyWithRequest = {
- searchStrategy: matchingSearchStrategy,
- searchRequests: [],
- };
-
- searchStrategyById[id] = searchStrategyWithRequest;
- searchStrategiesWithRequests.push(searchStrategyWithRequest);
- }
-
- searchStrategyWithRequest.searchRequests.push(searchRequest);
- });
-
- return searchStrategiesWithRequests;
-};
-
export const hasSearchStategyForIndexPattern = indexPattern => {
return Boolean(getSearchStrategyByViability(indexPattern));
};
diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js
index 5f7e14082d577..362d303eb6203 100644
--- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js
+++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js
@@ -17,79 +17,98 @@
* under the License.
*/
+import { noOpSearchStrategy } from './no_op_search_strategy';
import {
- assignSearchRequestsToSearchStrategies,
+ searchStrategies,
addSearchStrategy,
+ getSearchStrategyByViability,
+ getSearchStrategyById,
+ getSearchStrategyForSearchRequest,
+ hasSearchStategyForIndexPattern
} from './search_strategy_registry';
-import { noOpSearchStrategy } from './no_op_search_strategy';
+const mockSearchStrategies = [{
+ id: 0,
+ isViable: index => index === 0
+}, {
+ id: 1,
+ isViable: index => index === 1
+}];
+
+describe('Search strategy registry', () => {
+ beforeEach(() => {
+ searchStrategies.length = 0;
+ });
+
+ describe('addSearchStrategy', () => {
+ it('adds a search strategy', () => {
+ addSearchStrategy(mockSearchStrategies[0]);
+ expect(searchStrategies.length).toBe(1);
+ });
+
+ it('does not add a search strategy if it is already included', () => {
+ addSearchStrategy(mockSearchStrategies[0]);
+ addSearchStrategy(mockSearchStrategies[0]);
+ expect(searchStrategies.length).toBe(1);
+ });
+ });
+
+ describe('getSearchStrategyByViability', () => {
+ beforeEach(() => {
+ mockSearchStrategies.forEach(addSearchStrategy);
+ });
+
+ it('returns the viable strategy', () => {
+ expect(getSearchStrategyByViability(0)).toBe(mockSearchStrategies[0]);
+ expect(getSearchStrategyByViability(1)).toBe(mockSearchStrategies[1]);
+ });
+
+ it('returns undefined if there is no viable strategy', () => {
+ expect(getSearchStrategyByViability(-1)).toBe(undefined);
+ });
+ });
+
+ describe('getSearchStrategyById', () => {
+ beforeEach(() => {
+ mockSearchStrategies.forEach(addSearchStrategy);
+ });
+
+ it('returns the strategy by ID', () => {
+ expect(getSearchStrategyById(0)).toBe(mockSearchStrategies[0]);
+ expect(getSearchStrategyById(1)).toBe(mockSearchStrategies[1]);
+ });
-describe('SearchStrategyRegistry', () => {
- describe('assignSearchRequestsToSearchStrategies', () => {
- test('associates search requests with valid search strategies', () => {
- const searchStrategyA = {
- id: 'a',
- isViable: indexPattern => {
- return indexPattern === 'a';
- },
- };
-
- addSearchStrategy(searchStrategyA);
-
- const searchStrategyB = {
- id: 'b',
- isViable: indexPattern => {
- return indexPattern === 'b';
- },
- };
-
- addSearchStrategy(searchStrategyB);
-
- const searchRequest0 = {
- id: 0,
- source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} },
- };
-
- const searchRequest1 = {
- id: 1,
- source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} },
- };
-
- const searchRequest2 = {
- id: 2,
- source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} },
- };
-
- const searchRequest3 = {
- id: 3,
- source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} },
- };
-
- const searchRequests = [ searchRequest0, searchRequest1, searchRequest2, searchRequest3];
- const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests);
-
- expect(searchStrategiesWithSearchRequests).toEqual([{
- searchStrategy: searchStrategyB,
- searchRequests: [ searchRequest0, searchRequest3 ],
- }, {
- searchStrategy: searchStrategyA,
- searchRequests: [ searchRequest1, searchRequest2 ],
- }]);
+ it('returns undefined if there is no strategy with that ID', () => {
+ expect(getSearchStrategyById(-1)).toBe(undefined);
});
+ });
- test(`associates search requests with noOpSearchStrategy when a viable one can't be found`, () => {
- const searchRequest0 = {
- id: 0,
- source: { getField: () => {}, getPreferredSearchStrategyId: () => {} },
- };
+ describe('getSearchStrategyForSearchRequest', () => {
+ beforeEach(() => {
+ mockSearchStrategies.forEach(addSearchStrategy);
+ });
- const searchRequests = [ searchRequest0 ];
- const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests);
+ it('returns the strategy by ID if provided', () => {
+ expect(getSearchStrategyForSearchRequest({}, { searchStrategyId: 1 })).toBe(mockSearchStrategies[1]);
+ });
+
+ it('returns the strategy by viability if there is one', () => {
+ expect(getSearchStrategyForSearchRequest({ index: 1 })).toBe(mockSearchStrategies[1]);
+ });
+
+ it('returns the no op strategy if there is no viable strategy', () => {
+ expect(getSearchStrategyForSearchRequest({ index: 3 })).toBe(noOpSearchStrategy);
+ });
+ });
+
+ describe('hasSearchStategyForIndexPattern', () => {
+ beforeEach(() => {
+ mockSearchStrategies.forEach(addSearchStrategy);
+ });
- expect(searchStrategiesWithSearchRequests).toEqual([{
- searchStrategy: noOpSearchStrategy,
- searchRequests: [ searchRequest0 ],
- }]);
+ it('returns whether there is a search strategy for this index pattern', () => {
+ expect(hasSearchStategyForIndexPattern(0)).toBe(true);
+ expect(hasSearchStategyForIndexPattern(-1)).toBe(false);
});
});
});
diff --git a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss
deleted file mode 100644
index 769abea150199..0000000000000
--- a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss
+++ /dev/null
@@ -1,3 +0,0 @@
-.kbnError--multi-allow-explicit-index {
- padding: $euiSizeL;
-}
diff --git a/src/legacy/ui/public/error_allow_explicit_index/_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_index.scss
deleted file mode 100644
index 84cb111127679..0000000000000
--- a/src/legacy/ui/public/error_allow_explicit_index/_index.scss
+++ /dev/null
@@ -1 +0,0 @@
-@import './error_allow_explicit_index';
diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html
deleted file mode 100644
index e61383b11101a..0000000000000
--- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js
deleted file mode 100644
index 35763d8dd0385..0000000000000
--- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { i18n } from '@kbn/i18n';
-import { get } from 'lodash';
-
-import uiRoutes from '../routes';
-import { KbnUrlProvider } from '../url';
-
-import template from './error_allow_explicit_index.html';
-
-uiRoutes
- .when('/error/multi.allow_explicit_index', {
- template,
- k7Breadcrumbs: () => [{ text: i18n.translate('common.ui.errorAllowExplicitIndex.breadcrumbs.errorText', { defaultMessage: 'Error' }) }],
- });
-
-export function ErrorAllowExplicitIndexProvider(Private, Promise) {
- const kbnUrl = Private(KbnUrlProvider);
-
- return new (class ErrorAllowExplicitIndex {
- test(error) {
- if (!error || error.status !== 400) {
- return false;
- }
-
- const type = get(error, 'body.error.type');
- const reason = get(error, 'body.error.reason');
-
- return (
- type === 'illegal_argument_exception' &&
- String(reason).includes('explicit index')
- );
- }
-
- takeover() {
- kbnUrl.change('/error/multi.allow_explicit_index');
- return Promise.halt();
- }
- });
-}
diff --git a/src/legacy/ui/public/error_allow_explicit_index/index.js b/src/legacy/ui/public/error_allow_explicit_index/index.js
deleted file mode 100644
index a832fde31c987..0000000000000
--- a/src/legacy/ui/public/error_allow_explicit_index/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-export { ErrorAllowExplicitIndexProvider } from './error_allow_explicit_index';
diff --git a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js
index aeffdbc8bfa6c..a15c602b7ba83 100644
--- a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js
+++ b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js
@@ -20,19 +20,12 @@
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
-import { FieldWildcardProvider } from '../../field_wildcard';
+import { fieldWildcardFilter, makeRegEx } from '../../field_wildcard';
describe('fieldWildcard', function () {
- let fieldWildcardFilter;
- let makeRegEx;
+ const metaFields = ['_id', '_type', '_source'];
beforeEach(ngMock.module('kibana'));
- beforeEach(ngMock.inject(function (config, Private) {
- config.set('metaFields', ['_id', '_type', '_source']);
- const fieldWildcard = Private(FieldWildcardProvider);
- fieldWildcardFilter = fieldWildcard.fieldWildcardFilter;
- makeRegEx = fieldWildcard.makeRegEx;
- }));
describe('makeRegEx', function () {
it('matches * in any position', function () {
@@ -70,7 +63,7 @@ describe('fieldWildcard', function () {
});
it('filters nothing when given an empty array', function () {
- const filter = fieldWildcardFilter([]);
+ const filter = fieldWildcardFilter([], metaFields);
const original = [
'foo',
'bar',
@@ -82,7 +75,7 @@ describe('fieldWildcard', function () {
});
it('does not filter metaFields', function () {
- const filter = fieldWildcardFilter([ '_*' ]);
+ const filter = fieldWildcardFilter([ '_*' ], metaFields);
const original = [
'_id',
@@ -97,7 +90,7 @@ describe('fieldWildcard', function () {
const filter = fieldWildcardFilter([
'f*',
'*4'
- ]);
+ ], metaFields);
const original = [
'foo',
@@ -114,7 +107,7 @@ describe('fieldWildcard', function () {
'f*',
'*4',
'undefined'
- ]);
+ ], metaFields);
const original = [
'foo',
diff --git a/src/legacy/ui/public/field_wildcard/field_wildcard.js b/src/legacy/ui/public/field_wildcard/field_wildcard.js
index f73997d40a4e4..656641b20a98c 100644
--- a/src/legacy/ui/public/field_wildcard/field_wildcard.js
+++ b/src/legacy/ui/public/field_wildcard/field_wildcard.js
@@ -19,31 +19,25 @@
import { escapeRegExp, memoize } from 'lodash';
-export function FieldWildcardProvider(config) {
- const metaFields = config.get('metaFields');
+export const makeRegEx = memoize(function makeRegEx(glob) {
+ return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$');
+});
- const makeRegEx = memoize(function makeRegEx(glob) {
- return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$');
- });
-
- // Note that this will return an essentially noop function if globs is undefined.
- function fieldWildcardMatcher(globs = []) {
- return function matcher(val) {
- // do not test metaFields or keyword
- if (metaFields.indexOf(val) !== -1) {
- return false;
- }
- return globs.some(p => makeRegEx(p).test(val));
- };
- }
-
- // Note that this will return an essentially noop function if globs is undefined.
- function fieldWildcardFilter(globs = []) {
- const matcher = fieldWildcardMatcher(globs);
- return function filter(val) {
- return !matcher(val);
- };
- }
+// Note that this will return an essentially noop function if globs is undefined.
+export function fieldWildcardMatcher(globs = [], metaFields) {
+ return function matcher(val) {
+ // do not test metaFields or keyword
+ if (metaFields.indexOf(val) !== -1) {
+ return false;
+ }
+ return globs.some(p => makeRegEx(p).test(val));
+ };
+}
- return { makeRegEx, fieldWildcardMatcher, fieldWildcardFilter };
+// Note that this will return an essentially noop function if globs is undefined.
+export function fieldWildcardFilter(globs = [], metaFields = []) {
+ const matcher = fieldWildcardMatcher(globs, metaFields);
+ return function filter(val) {
+ return !matcher(val);
+ };
}
diff --git a/src/legacy/ui/public/field_wildcard/index.js b/src/legacy/ui/public/field_wildcard/index.js
index d03643f8804d8..db9f830e450b8 100644
--- a/src/legacy/ui/public/field_wildcard/index.js
+++ b/src/legacy/ui/public/field_wildcard/index.js
@@ -17,4 +17,4 @@
* under the License.
*/
-export { FieldWildcardProvider } from './field_wildcard';
+export * from './field_wildcard';
diff --git a/src/legacy/ui/public/legacy_compat/angular_config.tsx b/src/legacy/ui/public/legacy_compat/angular_config.tsx
index 28d57e9f8e8c9..8eac31e24530c 100644
--- a/src/legacy/ui/public/legacy_compat/angular_config.tsx
+++ b/src/legacy/ui/public/legacy_compat/angular_config.tsx
@@ -64,7 +64,6 @@ export const configureAppAngularModule = (angularModule: IModule) => {
.value('buildNum', legacyMetadata.buildNum)
.value('buildSha', legacyMetadata.buildSha)
.value('serverName', legacyMetadata.serverName)
- .value('sessionId', Date.now())
.value('esUrl', getEsUrl(newPlatform))
.value('uiCapabilities', capabilities.get())
.config(setupCompileProvider(newPlatform))
diff --git a/src/legacy/ui/public/management/components/sidebar_nav.tsx b/src/legacy/ui/public/management/components/sidebar_nav.tsx
index ef232c7ef7eda..f0ac787e0ef44 100644
--- a/src/legacy/ui/public/management/components/sidebar_nav.tsx
+++ b/src/legacy/ui/public/management/components/sidebar_nav.tsx
@@ -19,6 +19,7 @@
import { EuiIcon, EuiSideNav, IconType } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
+import { i18n } from '@kbn/i18n';
import React from 'react';
import { IndexedArray } from 'ui/indexed_array';
@@ -73,6 +74,9 @@ export class SidebarNav extends React.Component {
- if (this.searchSource) {
- this.searchSource.cancelQueued();
- }
- };
+ this.destroy = () => {};
/**
* Delete this object from Elasticsearch
diff --git a/src/legacy/ui/public/vis/vis.js b/src/legacy/ui/public/vis/vis.js
index c34fc1b10378e..c1fff1556e3ad 100644
--- a/src/legacy/ui/public/vis/vis.js
+++ b/src/legacy/ui/public/vis/vis.js
@@ -33,14 +33,13 @@ import '../render_complete/directive';
import { AggConfigs } from '../agg_types/agg_configs';
import { PersistedState } from '../persisted_state';
import { updateVisualizationConfig } from './vis_update';
-import { SearchSourceProvider } from '../courier/search_source';
+import { SearchSource } from '../courier';
import { start as visualizations } from '../../../core_plugins/visualizations/public/np_ready/public/legacy';
import '../directives/bind';
export function VisProvider(Private, getAppState) {
const visTypes = visualizations.types;
- const SearchSource = Private(SearchSourceProvider);
class Vis extends EventEmitter {
constructor(indexPattern, visState) {
diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts
index 9d6b56c32f1cb..c73f787457a03 100644
--- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts
+++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts
@@ -18,6 +18,7 @@
*/
jest.mock('ui/new_platform');
+import { searchSourceMock } from '../../courier/search_source/mocks';
import { mockDataLoaderFetch, timefilter } from './embedded_visualize_handler.test.mocks';
import _ from 'lodash';
@@ -85,7 +86,7 @@ describe('EmbeddedVisualizeHandler', () => {
inspectorAdapters: {},
query: undefined,
queryFilter: null,
- searchSource: undefined,
+ searchSource: searchSourceMock,
timeRange: undefined,
uiState: undefined,
};
@@ -96,7 +97,7 @@ describe('EmbeddedVisualizeHandler', () => {
{
vis: mockVis,
title: 'My Vis',
- searchSource: undefined,
+ searchSource: searchSourceMock,
destroy: () => ({}),
copyOnSave: false,
save: () => Promise.resolve('123'),
@@ -128,7 +129,7 @@ describe('EmbeddedVisualizeHandler', () => {
{
vis: mockVis,
title: 'My Vis',
- searchSource: undefined,
+ searchSource: searchSourceMock,
destroy: () => ({}),
copyOnSave: false,
save: () => Promise.resolve('123'),
diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts
index 119ec8a004239..bc2152911d1ec 100644
--- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts
+++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts
@@ -518,9 +518,9 @@ export class EmbeddedVisualizeHandler {
// If the data loader was aborted then no need to surface this error in the UI
if (error && error.name === 'AbortError') return;
- // TODO: come up with a general way to cancel execution of pipeline expressions.
- if (this.dataLoaderParams.searchSource && this.dataLoaderParams.searchSource.cancelQueued) {
- this.dataLoaderParams.searchSource.cancelQueued();
+ // Cancel execution of pipeline expressions
+ if (this.abortController) {
+ this.abortController.abort();
}
this.vis.requestError = error;
diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts
index f644d7f52d458..0f9f04c87fc6f 100644
--- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts
+++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts
@@ -28,7 +28,7 @@ import {
} from './build_pipeline';
import { Vis, VisState } from 'ui/vis';
import { AggConfig } from 'ui/agg_types/agg_config';
-import { SearchSource } from 'ui/courier';
+import { searchSourceMock } from 'ui/courier/search_source/mocks';
jest.mock('ui/new_platform');
jest.mock('ui/agg_types/buckets/date_histogram', () => ({
@@ -348,10 +348,7 @@ describe('visualize loader pipeline helpers: build pipeline', () => {
toExpression: () => 'testing custom expressions',
},
};
- const searchSource: SearchSource = {
- getField: () => null,
- };
- const expression = await buildPipeline(vis, { searchSource });
+ const expression = await buildPipeline(vis, { searchSource: searchSourceMock });
expect(expression).toMatchSnapshot();
});
});
diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts
index e8f1faf915eaf..a0d5b7b36d7f6 100644
--- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts
+++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts
@@ -442,18 +442,9 @@ export const buildVislibDimensions = async (
} else if (xAgg.type.name === 'histogram') {
const intervalParam = xAgg.type.paramByName('interval');
const output = { params: {} as any };
- const searchRequest = {
- whenAborted: (fn: any) => {
- if (params.abortSignal) {
- params.abortSignal.addEventListener('abort', fn);
- }
- },
- };
- await intervalParam.modifyAggConfigOnSearchRequestStart(
- xAgg,
- params.searchSource,
- searchRequest
- );
+ await intervalParam.modifyAggConfigOnSearchRequestStart(xAgg, params.searchSource, {
+ abortSignal: params.abortSignal,
+ });
intervalParam.write(xAgg, output);
dimensions.x.params.interval = output.params.interval;
}
diff --git a/test/functional/apps/management/index.js b/test/functional/apps/management/index.js
index 4d4031b4e489b..e68920f06372a 100644
--- a/test/functional/apps/management/index.js
+++ b/test/functional/apps/management/index.js
@@ -42,6 +42,8 @@ export default function ({ getService, loadTestFile }) {
loadTestFile(require.resolve('./_kibana_settings'));
loadTestFile(require.resolve('./_scripted_fields'));
loadTestFile(require.resolve('./_scripted_fields_preview'));
+ loadTestFile(require.resolve('./_mgmt_import_saved_objects'));
+
});
describe('', function () {
diff --git a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx
index 39912ec2ca8b4..477559784bf59 100644
--- a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx
+++ b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx
@@ -24,7 +24,7 @@ import { MatchedRouteProvider } from '../context/MatchedRouteContext';
export const REACT_APP_ROOT_ID = 'react-apm-root';
-const MainContainer = styled.div`
+const MainContainer = styled.main`
min-width: ${px(unit * 50)};
padding: ${px(units.plus)};
`;
diff --git a/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js b/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js
index d4d7276ebbc18..11723587b057d 100644
--- a/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js
+++ b/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js
@@ -61,7 +61,7 @@ run(
'webpack-dev-server',
'--config',
webpackConfig,
- '--progress',
+ ...(process.stdout.isTTY ? ['--progress'] : []),
'--hide-modules',
'--display-entrypoints',
'false',
diff --git a/x-pack/legacy/plugins/code/index.ts b/x-pack/legacy/plugins/code/index.ts
index d0d17aa9a802b..34a2102861c91 100644
--- a/x-pack/legacy/plugins/code/index.ts
+++ b/x-pack/legacy/plugins/code/index.ts
@@ -78,7 +78,7 @@ export const code = (kibana: any) =>
// Set up with the new platform plugin lifecycle API.
const plugin = codePlugin(initializerContext);
- plugin.setup(coreSetup);
+ await plugin.setup(coreSetup, initializerContext.legacy.http);
// @ts-ignore
const kbnServer = this.kbnServer;
diff --git a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx
index 68c96b904e98a..ca97c7a091c22 100644
--- a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx
+++ b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx
@@ -131,7 +131,7 @@ class AdminPage extends React.PureComponent {
public render() {
return (
-
-
+
);
}
}
diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts
index 6d70c8386c31d..9d168e604c1b3 100644
--- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import util from 'util';
import Boom from 'boom';
import { ServiceHandlerAdapter, ServiceRegisterOptions } from '../service_handler_adapter';
@@ -48,7 +48,7 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter {
private readonly nonCodeAdapter: NonCodeNodeAdapter = new NonCodeNodeAdapter('', this.log);
constructor(
- private readonly server: CodeServerRouter,
+ private readonly router: CodeServerRouter,
private readonly log: Logger,
serverOptions: ServerOptions,
esClient: EsClient
@@ -113,17 +113,25 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter {
const d = serviceDefinition[method];
const path = `${options.routePrefix}/${d.routePath || method}`;
- this.server.route({
+ this.router.route({
method: 'post',
path,
- handler: async (req: Request) => {
- const { context, params } = req.payload as RequestPayload;
+ npHandler: async (
+ ctx: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) => {
+ const { context, params } = req.body as RequestPayload;
this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`);
try {
const data = await localHandler(params, context);
- return { data };
+ return res.ok({ body: { data } });
} catch (e) {
- throw Boom.boomify(e);
+ if (Boom.isBoom(e)) {
+ throw e;
+ } else {
+ throw Boom.boomify(e, { statusCode: 500 });
+ }
}
},
});
diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts
index adb7e9b93fbad..e23b5a9027e75 100644
--- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts
@@ -4,13 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { LocalEndpoint } from '../local_endpoint';
import { CodeNode } from './code_nodes';
export class ClusterNodeEndpoint extends LocalEndpoint {
constructor(
- public readonly httpRequest: Request,
+ public readonly httpRequest: KibanaRequest,
public readonly resource: string,
public readonly codeNode: CodeNode
) {
diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts
index 27f5c57214112..6ac0b830905bb 100644
--- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import Boom from 'boom';
import { Endpoint, ResourceLocator } from '../resource_locator';
import { ClusterService } from './cluster_service';
@@ -26,7 +26,7 @@ export class ClusterResourceLocator implements ResourceLocator {
return RepositoryUtils.buildRepository(url).uri;
}
- async locate(req: Request, resource: string): Promise {
+ async locate(req: KibanaRequest, resource: string): Promise {
// to be compatible with
if (resource.trim() === '') {
return new LocalEndpoint(req, resource);
@@ -58,7 +58,7 @@ export class ClusterResourceLocator implements ResourceLocator {
/**
* Return undefined to let NodeRepositoriesService enqueue the clone job in cluster mode.
*/
- async allocate(req: Request, resource: string): Promise {
+ async allocate(req: KibanaRequest, resource: string): Promise {
// make the cluster service synchronize the meta data and allocate new resources to nodes
await this.clusterService.pollClusterState();
return undefined;
diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts
index 5f5319730c258..bcc2e7b21e672 100644
--- a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts
@@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request, Server } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
+import { httpServiceMock, httpServerMock } from 'src/core/server/mocks';
import { createTestHapiServer } from '../test_utils';
import { LocalHandlerAdapter } from './local_handler_adapter';
import { CodeServerRouter } from '../security';
@@ -17,12 +18,13 @@ import { Logger } from '../log';
import { ConsoleLoggerFactory } from '../utils/console_logger_factory';
const log: Logger = new ConsoleLoggerFactory().getLogger(['test']);
-let hapiServer: Server = createTestHapiServer();
+let hapiServer = createTestHapiServer();
-let server: CodeServerRouter = new CodeServerRouter(hapiServer);
+const routerMock = httpServiceMock.createRouter();
+let router: CodeServerRouter = new CodeServerRouter(routerMock);
beforeEach(async () => {
hapiServer = createTestHapiServer();
- server = new CodeServerRouter(hapiServer);
+ router = new CodeServerRouter(routerMock);
});
const TestDefinition = {
test1: {
@@ -49,13 +51,13 @@ test('local adapter should work', async () => {
const services = new CodeServices(new LocalHandlerAdapter());
services.registerHandler(TestDefinition, testServiceHandler);
const testApi = services.serviceFor(TestDefinition);
- const endpoint = await services.locate({} as Request, '');
+ const endpoint = await services.locate(httpServerMock.createKibanaRequest(), '');
const { result } = await testApi.test1(endpoint, { name: 'tester' });
expect(result).toBe(`hello tester`);
});
-test('multi-node adapter should register routes', async () => {
- const services = new CodeServices(new CodeNodeAdapter(server, log));
+test.skip('multi-node adapter should register routes', async () => {
+ const services = new CodeServices(new CodeNodeAdapter(router, log));
services.registerHandler(TestDefinition, testServiceHandler);
const prefix = DEFAULT_SERVICE_OPTION.routePrefix;
@@ -70,8 +72,8 @@ test('multi-node adapter should register routes', async () => {
expect(data.result).toBe(`hello tester`);
});
-test('non-code-node could send request to code-node', async () => {
- const codeNode = new CodeServices(new CodeNodeAdapter(server, log));
+test.skip('non-code-node could send request to code-node', async () => {
+ const codeNode = new CodeServices(new CodeNodeAdapter(router, log));
const codeNodeUrl = 'http://localhost:5601';
const nonCodeNodeAdapter = new NonCodeNodeAdapter(codeNodeUrl, log);
const nonCodeNode = new CodeServices(nonCodeNodeAdapter);
@@ -80,13 +82,13 @@ test('non-code-node could send request to code-node', async () => {
baseUrl: string,
path: string,
payload: RequestPayload,
- originRequest: Request
+ originRequest: KibanaRequest
) => {
expect(baseUrl).toBe(codeNodeUrl);
const response = await hapiServer.inject({
method: 'POST',
url: path,
- headers: originRequest.headers,
+ headers: originRequest.headers as any,
payload,
});
expect(response.statusCode).toBe(200);
@@ -96,11 +98,13 @@ test('non-code-node could send request to code-node', async () => {
nonCodeNode.registerHandler(TestDefinition, null);
const testApi = nonCodeNode.serviceFor(TestDefinition);
const fakeRequest = ({
- path: 'fakePath',
+ route: {
+ path: 'fakePath',
+ },
headers: {
fakeHeader: 'fakeHeaderValue',
},
- } as unknown) as Request;
+ } as unknown) as KibanaRequest;
const fakeResource = 'fakeResource';
const endpoint = await nonCodeNode.locate(fakeRequest, fakeResource);
const { result } = await testApi.test1(endpoint, { name: 'tester' });
@@ -108,5 +112,5 @@ test('non-code-node could send request to code-node', async () => {
const context = await testApi.test2(endpoint, {});
expect(context.resource).toBe(fakeResource);
- expect(context.path).toBe(fakeRequest.path);
+ expect(context.path).toBe(fakeRequest.route.path);
});
diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.ts
index 480cab11ed84e..a2abe402a8e52 100644
--- a/x-pack/legacy/plugins/code/server/distributed/code_services.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/code_services.ts
@@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { KibanaRequest } from 'src/core/server';
import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition';
import {
DEFAULT_SERVICE_OPTION,
@@ -11,7 +12,6 @@ import {
ServiceRegisterOptions,
} from './service_handler_adapter';
import { Endpoint } from './resource_locator';
-import { RequestFacade } from '../../';
export class CodeServices {
constructor(private readonly adapter: ServiceHandlerAdapter) {}
@@ -32,11 +32,11 @@ export class CodeServices {
await this.adapter.stop();
}
- public allocate(req: RequestFacade, resource: string): Promise {
+ public allocate(req: KibanaRequest, resource: string): Promise {
return this.adapter.locator.allocate(req, resource);
}
- public locate(req: RequestFacade, resource: string): Promise {
+ public locate(req: KibanaRequest, resource: string): Promise {
return this.adapter.locator.locate(req, resource);
}
diff --git a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts
index 689ecc7fc641b..a7da90544fed3 100644
--- a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts
@@ -4,17 +4,17 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { Endpoint } from './resource_locator';
import { RequestContext } from './service_definition';
export class LocalEndpoint implements Endpoint {
- constructor(readonly httpRequest: Request, readonly resource: string) {}
+ constructor(readonly httpRequest: KibanaRequest, readonly resource: string) {}
toContext(): RequestContext {
return {
resource: this.resource,
- path: this.httpRequest.path,
+ path: this.httpRequest.route.path,
} as RequestContext;
}
}
diff --git a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts
index f4d9b6f1815a0..4f51ee2938366 100644
--- a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { ServiceHandlerAdapter } from './service_handler_adapter';
import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition';
import { Endpoint, ResourceLocator } from './resource_locator';
@@ -45,7 +45,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter {
}
locator: ResourceLocator = {
- async locate(httpRequest: Request, resource: string): Promise {
+ async locate(httpRequest: KibanaRequest, resource: string): Promise {
return Promise.resolve(new LocalEndpoint(httpRequest, resource));
},
@@ -53,7 +53,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter {
return Promise.resolve(true);
},
- async allocate(httpRequest: Request, resource: string): Promise {
+ async allocate(httpRequest: KibanaRequest, resource: string): Promise {
return Promise.resolve(new LocalEndpoint(httpRequest, resource));
},
};
diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts
index 2778d29955e79..a7d2edf4b0308 100644
--- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import util from 'util';
import Boom from 'boom';
import {
@@ -31,10 +31,10 @@ export interface RequestPayload {
export class CodeNodeAdapter implements ServiceHandlerAdapter {
localAdapter: LocalHandlerAdapter = new LocalHandlerAdapter();
- constructor(private readonly server: CodeServerRouter, private readonly log: Logger) {}
+ constructor(private readonly router: CodeServerRouter, private readonly log: Logger) {}
locator: ResourceLocator = {
- async locate(httpRequest: Request, resource: string): Promise {
+ async locate(httpRequest: KibanaRequest, resource: string): Promise {
return Promise.resolve(new LocalEndpoint(httpRequest, resource));
},
@@ -42,7 +42,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter {
return Promise.resolve(false);
},
- async allocate(httpRequest: Request, resource: string): Promise {
+ async allocate(httpRequest: KibanaRequest, resource: string): Promise {
return Promise.resolve(new LocalEndpoint(httpRequest, resource));
},
};
@@ -70,11 +70,16 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter {
const d = serviceDefinition[method];
const path = `${options.routePrefix}/${d.routePath || method}`;
// register routes, receive requests from non-code node.
- this.server.route({
+ this.router.route({
method: 'post',
path,
- handler: async (req: Request) => {
- const { context, params } = req.payload as RequestPayload;
+ npHandler: async (
+ ctx: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) => {
+ // @ts-ignore
+ const { context, params } = req.body as RequestPayload;
this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`);
const endpoint: Endpoint = {
toContext(): RequestContext {
@@ -83,7 +88,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter {
};
try {
const data = await serviceMethodMap[method](endpoint, params);
- return { data };
+ return res.ok({ body: data });
} catch (e) {
if (!Boom.isBoom(e)) {
throw Boom.boomify(e, { statusCode: 500 });
diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts
index 048b7c81dfe6f..03c4917dfb732 100644
--- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts
@@ -4,12 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { LocalEndpoint } from '../local_endpoint';
export class CodeNodeEndpoint extends LocalEndpoint {
constructor(
- public readonly httpRequest: Request,
+ public readonly httpRequest: KibanaRequest,
public readonly resource: string,
public readonly codeNodeUrl: string
) {
diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts
index b11ffeba394cf..e4b3d21b80ec7 100644
--- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts
@@ -4,14 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { Endpoint, ResourceLocator } from '../resource_locator';
import { CodeNodeEndpoint } from './code_node_endpoint';
export class CodeNodeResourceLocator implements ResourceLocator {
constructor(private readonly codeNodeUrl: string) {}
- async locate(httpRequest: Request, resource: string): Promise {
+ async locate(httpRequest: KibanaRequest, resource: string): Promise {
return Promise.resolve(new CodeNodeEndpoint(httpRequest, resource, this.codeNodeUrl));
}
@@ -19,7 +19,7 @@ export class CodeNodeResourceLocator implements ResourceLocator {
return Promise.resolve(false);
}
- allocate(req: Request, resource: string): Promise {
+ allocate(req: KibanaRequest, resource: string): Promise {
return this.locate(req, resource);
}
}
diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts
index 648dffd01663e..1221651bc51e2 100644
--- a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts
@@ -7,7 +7,7 @@
import Wreck from '@hapi/wreck';
import util from 'util';
import Boom from 'boom';
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import * as http from 'http';
import {
DEFAULT_SERVICE_OPTION,
@@ -23,8 +23,8 @@ import { Logger } from '../../log';
const pickHeaders = ['authorization'];
-function filterHeaders(originRequest: Request) {
- const result: { [name: string]: string } = {};
+function filterHeaders(originRequest: KibanaRequest) {
+ const result: { [name: string]: string | string[] | undefined } = {};
for (const header of pickHeaders) {
if (originRequest.headers[header]) {
result[header] = originRequest.headers[header];
@@ -82,7 +82,12 @@ export class NonCodeNodeAdapter implements ServiceHandlerAdapter {
return dispatchedHandler as ServiceMethodMap;
}
- async requestFn(baseUrl: string, path: string, payload: RequestPayload, originRequest: Request) {
+ async requestFn(
+ baseUrl: string,
+ path: string,
+ payload: RequestPayload,
+ originRequest: KibanaRequest
+ ) {
const opt = {
baseUrl,
payload: JSON.stringify(payload),
diff --git a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts
index 9dc6300675cb6..287e36982cbfd 100644
--- a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts
+++ b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Request } from 'hapi';
+import { KibanaRequest } from 'src/core/server';
import { RequestContext } from './service_definition';
export interface Endpoint {
@@ -12,7 +12,7 @@ export interface Endpoint {
}
export interface ResourceLocator {
- locate(req: Request, resource: string): Promise;
+ locate(req: KibanaRequest, resource: string): Promise;
/**
* Returns whether the resource resides on the local node. This should support both url and uri of the repository.
@@ -25,5 +25,5 @@ export interface ResourceLocator {
* Allocates the resource to nodes and returns the endpoint corresponds to the allocated node.
* If the resource cannot be allocated to any node, it returns undefined.
*/
- allocate(req: Request, resource: string): Promise;
+ allocate(req: KibanaRequest, resource: string): Promise;
}
diff --git a/x-pack/legacy/plugins/code/server/init_es.ts b/x-pack/legacy/plugins/code/server/init_es.ts
index 39ae05bf26877..0b12cddb73983 100644
--- a/x-pack/legacy/plugins/code/server/init_es.ts
+++ b/x-pack/legacy/plugins/code/server/init_es.ts
@@ -4,17 +4,15 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { Server } from 'hapi';
+import { IClusterClient } from 'src/core/server';
import { RepositoryIndexInitializerFactory } from './indexer';
import { RepositoryConfigController } from './repository_config_controller';
import { EsClientWithInternalRequest } from './utils/esclient_with_internal_request';
import { EsClient } from './lib/esqueue';
import { Logger } from './log';
-export async function initEs(server: Server, log: Logger) {
- // wait until elasticsearch is ready
- await server.plugins.elasticsearch.waitUntilReady();
- const esClient: EsClient = new EsClientWithInternalRequest(server);
+export async function initEs(cluster: IClusterClient, log: Logger) {
+ const esClient: EsClient = new EsClientWithInternalRequest(cluster);
const repoConfigController = new RepositoryConfigController(esClient);
const repoIndexInitializerFactory = new RepositoryIndexInitializerFactory(esClient, log);
return {
diff --git a/x-pack/legacy/plugins/code/server/init_workers.ts b/x-pack/legacy/plugins/code/server/init_workers.ts
index c4385cd711c5c..f20adf375f9a3 100644
--- a/x-pack/legacy/plugins/code/server/init_workers.ts
+++ b/x-pack/legacy/plugins/code/server/init_workers.ts
@@ -5,7 +5,6 @@
*/
import checkDiskSpace from 'check-disk-space';
-import { Server } from 'hapi';
import { IndexerType } from '../model';
import { DiskWatermarkService } from './disk_watermark';
@@ -22,7 +21,6 @@ import { CloneScheduler, IndexScheduler, UpdateScheduler } from './scheduler';
import { Logger } from './log';
export function initWorkers(
- server: Server,
log: Logger,
esClient: EsClient,
queue: Esqueue,
diff --git a/x-pack/legacy/plugins/code/server/plugin.ts b/x-pack/legacy/plugins/code/server/plugin.ts
index 390b0ddc1256c..737d0b5c6686b 100644
--- a/x-pack/legacy/plugins/code/server/plugin.ts
+++ b/x-pack/legacy/plugins/code/server/plugin.ts
@@ -6,7 +6,7 @@
import crypto from 'crypto';
import * as _ from 'lodash';
-import { CoreSetup } from 'src/core/server';
+import { CoreSetup, IRouter } from 'src/core/server';
import { RepositoryIndexInitializerFactory, tryMigrateIndices } from './indexer';
import { Esqueue } from './lib/esqueue';
@@ -55,6 +55,18 @@ import { NodeRepositoriesService } from './distributed/cluster/node_repositories
import { initCodeUsageCollector } from './usage_collector';
import { PluginSetupContract } from '../../../../plugins/code/server/index';
+declare module 'src/core/server' {
+ interface RequestHandlerContext {
+ code: {
+ codeServices: CodeServices | null;
+ // @deprecated
+ legacy: {
+ securityPlugin: any;
+ };
+ };
+ }
+}
+
export class CodePlugin {
private isCodeNode = false;
@@ -67,15 +79,30 @@ export class CodePlugin {
private codeServices: CodeServices | null = null;
private nodeService: NodeRepositoriesService | null = null;
+ private rndString: string | null = null;
+ private router: IRouter | null = null;
+
constructor(private readonly initContext: PluginSetupContract) {
this.log = {} as Logger;
this.serverOptions = {} as ServerOptions;
}
- public setup(core: CoreSetup) {
+ public async setup(core: CoreSetup, npHttp: any) {
const { server } = core.http as any;
this.serverOptions = new ServerOptions(this.initContext.legacy.config, server.config());
this.log = new Logger(this.initContext.legacy.logger, this.serverOptions.verbose);
+
+ this.router = npHttp.createRouter();
+ this.rndString = crypto.randomBytes(20).toString('hex');
+
+ npHttp.registerRouteHandlerContext('code', () => {
+ return {
+ codeServices: this.codeServices,
+ legacy: {
+ securityPlugin: server.plugins.security,
+ },
+ };
+ });
}
// TODO: CodeStart will not have the register route api.
@@ -83,16 +110,17 @@ export class CodePlugin {
public async start(core: CoreSetup) {
// called after all plugins are set up
const { server } = core.http as any;
- const codeServerRouter = new CodeServerRouter(server);
+ const codeServerRouter = new CodeServerRouter(this.router!);
const codeNodeUrl = this.serverOptions.codeNodeUrl;
- const rndString = crypto.randomBytes(20).toString('hex');
- checkRoute(server, rndString);
+
+ checkRoute(this.router!, this.rndString!);
+
if (this.serverOptions.clusterEnabled) {
this.initDevMode(server);
this.codeServices = await this.initClusterNode(server, codeServerRouter);
} else if (codeNodeUrl) {
const checkResult = await this.retryUntilAvailable(
- async () => await checkCodeNode(codeNodeUrl, this.log, rndString),
+ async () => await checkCodeNode(codeNodeUrl, this.log, this.rndString!),
5000
);
if (checkResult.me) {
@@ -115,7 +143,7 @@ export class CodePlugin {
private async initClusterNode(server: any, codeServerRouter: CodeServerRouter) {
this.log.info('Initializing Code plugin as cluster-node');
const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs(
- server,
+ this.initContext.legacy.elasticsearch.adminClient$,
this.log
);
const clusterNodeAdapter = new ClusterNodeAdapter(
@@ -139,7 +167,6 @@ export class CodePlugin {
);
this.lspService = lspService;
const { indexScheduler, updateScheduler, cloneWorker } = initWorkers(
- server,
this.log,
esClient,
this.queue!,
@@ -159,18 +186,18 @@ export class CodePlugin {
);
await this.nodeService.start();
+ this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController);
+
// Execute index version checking and try to migrate index data if necessary.
await tryMigrateIndices(esClient, this.log);
- this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController);
-
return codeServices;
}
private async initCodeNode(server: any, codeServices: CodeServices) {
this.isCodeNode = true;
const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs(
- server,
+ this.initContext.legacy.elasticsearch.adminClient$,
this.log
);
@@ -186,7 +213,6 @@ export class CodePlugin {
);
this.lspService = lspService;
const { indexScheduler, updateScheduler } = initWorkers(
- server,
this.log,
esClient,
this.queue!,
@@ -198,14 +224,14 @@ export class CodePlugin {
this.indexScheduler = indexScheduler;
this.updateScheduler = updateScheduler;
- // Execute index version checking and try to migrate index data if necessary.
- await tryMigrateIndices(esClient, this.log);
-
this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController);
// TODO: extend the usage collection to cluster mode.
initCodeUsageCollector(server, esClient, lspService);
+ // Execute index version checking and try to migrate index data if necessary.
+ await tryMigrateIndices(esClient, this.log);
+
return codeServices;
}
@@ -235,7 +261,10 @@ export class CodePlugin {
codeServices.registerHandler(LspServiceDefinition, null, LspServiceDefinitionOption);
codeServices.registerHandler(WorkspaceDefinition, null);
codeServices.registerHandler(SetupDefinition, null);
- const { repoConfigController, repoIndexInitializerFactory } = await initEs(server, this.log);
+ const { repoConfigController, repoIndexInitializerFactory } = await initEs(
+ this.initContext.legacy.elasticsearch.adminClient$,
+ this.log
+ );
this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController);
return codeServices;
}
@@ -246,7 +275,7 @@ export class CodePlugin {
repoIndexInitializerFactory: RepositoryIndexInitializerFactory,
repoConfigController: RepositoryConfigController
) {
- const codeServerRouter = new CodeServerRouter(server);
+ const codeServerRouter = new CodeServerRouter(this.router!);
repositoryRoute(
codeServerRouter,
codeServices,
@@ -264,7 +293,7 @@ export class CodePlugin {
fileRoute(codeServerRouter, codeServices);
workspaceRoute(codeServerRouter, this.serverOptions, codeServices);
symbolByQnameRoute(codeServerRouter, this.log);
- installRoute(codeServerRouter, codeServices, this.serverOptions);
+ installRoute(server, codeServerRouter, codeServices, this.serverOptions);
lspRoute(codeServerRouter, codeServices, this.serverOptions, this.log);
setupRoute(codeServerRouter, codeServices);
statusRoute(codeServerRouter, codeServices);
diff --git a/x-pack/legacy/plugins/code/server/routes/check.ts b/x-pack/legacy/plugins/code/server/routes/check.ts
index ad89d6281b4ff..7e585ffc34922 100644
--- a/x-pack/legacy/plugins/code/server/routes/check.ts
+++ b/x-pack/legacy/plugins/code/server/routes/check.ts
@@ -4,10 +4,16 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { schema } from '@kbn/config-schema';
import fetch from 'node-fetch';
+import {
+ IRouter,
+ KibanaRequest,
+ KibanaResponseFactory,
+ RequestHandlerContext,
+} from 'src/core/server';
import { Logger } from '../log';
-import { ServerFacade } from '../..';
export async function checkCodeNode(url: string, log: Logger, rndStr: string) {
try {
@@ -24,13 +30,22 @@ export async function checkCodeNode(url: string, log: Logger, rndStr: string) {
return null;
}
-export function checkRoute(server: ServerFacade, rndStr: string) {
- server.route({
- method: 'GET',
- path: '/api/code/codeNode',
- options: { auth: false },
- handler(req: any) {
- return { me: req.query.rndStr === rndStr };
+export function checkRoute(router: IRouter, rndStr: string) {
+ router.get(
+ {
+ path: '/api/code/codeNode',
+ validate: {
+ query: schema.object({}, { allowUnknowns: true }),
+ },
+ options: {
+ authRequired: false,
+ },
},
- });
+ (context: RequestHandlerContext, req: KibanaRequest, res: KibanaResponseFactory) => {
+ return res.ok({
+ // @ts-ignore
+ body: { me: req.query.rndStr === rndStr },
+ });
+ }
+ );
}
diff --git a/x-pack/legacy/plugins/code/server/routes/file.ts b/x-pack/legacy/plugins/code/server/routes/file.ts
index 10a9050fa0a90..47cc16f7a6574 100644
--- a/x-pack/legacy/plugins/code/server/routes/file.ts
+++ b/x-pack/legacy/plugins/code/server/routes/file.ts
@@ -4,9 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
-
-import { RequestFacade, RequestQueryFacade, ResponseToolkitFacade } from '../../';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import { DEFAULT_TREE_CHILDREN_LIMIT } from '../git_operations';
import { CodeServerRouter } from '../security';
import { RepositoryObjectClient } from '../search';
@@ -20,14 +18,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
const gitService = codeServices.serviceFor(GitServiceDefinition);
async function getRepoUriFromMeta(
- req: RequestFacade,
+ context: RequestHandlerContext,
+ req: KibanaRequest,
repoUri: string
): Promise {
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
try {
const repo = await repoObjectClient.getRepository(repoUri);
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri);
return repo.uri;
} catch (e) {
return undefined;
@@ -37,23 +36,27 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/tree/{ref}/{path*}',
method: 'GET',
- async handler(req: RequestFacade) {
- const { uri, path, ref } = req.params;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, path, ref } = req.params as any;
const revision = decodeRevisionString(ref);
- const queries = req.query as RequestQueryFacade;
+ const queries = req.query as any;
const limit = queries.limit
? parseInt(queries.limit as string, 10)
: DEFAULT_TREE_CHILDREN_LIMIT;
const skip = queries.skip ? parseInt(queries.skip as string, 10) : 0;
const withParents = 'parents' in queries;
const flatten = 'flatten' in queries;
- const repoUri = await getRepoUriFromMeta(req, uri);
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
- return await gitService.fileTree(endpoint, {
+ const filetree = await gitService.fileTree(endpoint, {
uri: repoUri,
path,
revision,
@@ -62,11 +65,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
withParents,
flatten,
});
+ return res.ok({ body: filetree });
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
@@ -75,46 +82,59 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/blob/{ref}/{path*}',
method: 'GET',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
- const { uri, path, ref } = req.params;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, path, ref } = req.params as any;
const revision = decodeRevisionString(ref);
- const repoUri = await getRepoUriFromMeta(req, uri);
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
const blob = await gitService.blob(endpoint, {
uri,
path,
- line: (req.query as RequestQueryFacade).line as string,
+ line: (req.query as any).line as string,
revision: decodeURIComponent(revision),
});
if (blob.imageType) {
- const response = h.response(blob.content);
- response.type(blob.imageType);
- return response;
+ return res.ok({
+ body: blob.content,
+ headers: { 'Content-Type': blob.imageType },
+ });
} else if (blob.isBinary) {
- return h
- .response('')
- .type('application/octet-stream')
- .code(204);
+ return res.noContent({
+ headers: { 'Content-Type': 'application/octet-stream' },
+ });
} else {
if (blob.content) {
- return h
- .response(blob.content)
- .type('text/plain')
- .header('lang', blob.lang!);
+ return res.ok({
+ body: blob.content,
+ headers: {
+ 'Content-Type': 'text/plain',
+ lang: blob.lang!,
+ },
+ });
} else {
- return h.response('').type(`text/big`);
+ return res.ok({
+ body: blob.content,
+ headers: { 'Content-Type': 'text/big' },
+ });
}
}
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
@@ -123,27 +143,40 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/app/code/repo/{uri*3}/raw/{ref}/{path*}',
method: 'GET',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
- const { uri, path, ref } = req.params;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, path, ref } = req.params as any;
const revision = decodeRevisionString(ref);
- const repoUri = await getRepoUriFromMeta(req, uri);
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
const blob = await gitService.raw(endpoint, { uri: repoUri, path, revision });
if (blob.isBinary) {
- return h.response(blob.content).encoding('binary');
+ return res.ok({
+ body: blob.content,
+ headers: { 'Content-Transfer-Encoding': 'binary' },
+ });
} else {
- return h.response(blob.content).type('text/plain');
+ return res.ok({
+ body: blob.content,
+ headers: { 'Content-Type': 'text/plain' },
+ });
}
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
@@ -152,33 +185,47 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/history/{ref}',
method: 'GET',
- handler: historyHandler,
+ npHandler: historyHandler,
});
router.route({
path: '/api/code/repo/{uri*3}/history/{ref}/{path*}',
method: 'GET',
- handler: historyHandler,
+ npHandler: historyHandler,
});
- async function historyHandler(req: RequestFacade) {
- const { uri, ref, path } = req.params;
+ async function historyHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, ref, path } = req.params as any;
const revision = decodeRevisionString(ref);
- const queries = req.query as RequestQueryFacade;
+ const queries = req.query as any;
const count = queries.count ? parseInt(queries.count as string, 10) : 10;
const after = queries.after !== undefined;
try {
- const repoUri = await getRepoUriFromMeta(req, uri);
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
- return await gitService.history(endpoint, { uri: repoUri, path, revision, count, after });
+ const history = await gitService.history(endpoint, {
+ uri: repoUri,
+ path,
+ revision,
+ count,
+ after,
+ });
+ return res.ok({ body: history });
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
}
@@ -186,21 +233,29 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/references',
method: 'GET',
- async handler(req: RequestFacade) {
- const uri = req.params.uri;
- const repoUri = await getRepoUriFromMeta(req, uri);
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri } = req.params as any;
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.badRequest({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
- return await gitService.branchesAndTags(endpoint, { uri: repoUri });
+ const branchesAndTags = await gitService.branchesAndTags(endpoint, { uri: repoUri });
+ return res.ok({ body: branchesAndTags });
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
@@ -209,23 +264,31 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/diff/{revision}',
method: 'GET',
- async handler(req: RequestFacade) {
- const { uri, revision } = req.params;
- const repoUri = await getRepoUriFromMeta(req, uri);
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, revision } = req.params as any;
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
- return await gitService.commitDiff(endpoint, {
+ const diff = await gitService.commitDiff(endpoint, {
uri: repoUri,
revision: decodeRevisionString(revision),
});
+ return res.ok({ body: diff });
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
@@ -234,25 +297,33 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
path: '/api/code/repo/{uri*3}/blame/{revision}/{path*}',
method: 'GET',
- async handler(req: RequestFacade) {
- const { uri, path, revision } = req.params;
- const repoUri = await getRepoUriFromMeta(req, uri);
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, path, revision } = req.params as any;
+ const repoUri = await getRepoUriFromMeta(context, req, uri);
if (!repoUri) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
const endpoint = await codeServices.locate(req, uri);
try {
- return await gitService.blame(endpoint, {
+ const blames = await gitService.blame(endpoint, {
uri: repoUri,
revision: decodeRevisionString(decodeURIComponent(revision)),
path,
});
+ return res.ok({ body: blames });
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
} else {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
},
diff --git a/x-pack/legacy/plugins/code/server/routes/index.ts b/x-pack/legacy/plugins/code/server/routes/index.ts
index 27f40de552a3e..82973ac1d2791 100644
--- a/x-pack/legacy/plugins/code/server/routes/index.ts
+++ b/x-pack/legacy/plugins/code/server/routes/index.ts
@@ -8,7 +8,6 @@ export * from './check';
export * from './file';
export * from './install';
export * from './lsp';
-export * from './redirect';
export * from './repository';
export * from './search';
export * from './setup';
diff --git a/x-pack/legacy/plugins/code/server/routes/install.ts b/x-pack/legacy/plugins/code/server/routes/install.ts
index 338f305cba858..28ccc4012ceec 100644
--- a/x-pack/legacy/plugins/code/server/routes/install.ts
+++ b/x-pack/legacy/plugins/code/server/routes/install.ts
@@ -4,9 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import * as Boom from 'boom';
-
-import { RequestFacade } from '../..';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
+import { ServerFacade } from '../..';
import { enabledLanguageServers, LanguageServerDefinition } from '../lsp/language_servers';
import { CodeServerRouter } from '../security';
import { CodeServices } from '../distributed/code_services';
@@ -15,12 +14,13 @@ import { Endpoint } from '../distributed/resource_locator';
import { ServerOptions } from '../server_options';
export function installRoute(
+ server: ServerFacade,
router: CodeServerRouter,
codeServices: CodeServices,
options: ServerOptions
) {
const lspService = codeServices.serviceFor(LspServiceDefinition);
- const kibanaVersion = router.server.config().get('pkg.version') as string;
+ const kibanaVersion = server.config().get('pkg.version') as string;
const status = async (endpoint: Endpoint, def: LanguageServerDefinition) => ({
name: def.name,
status: await lspService.languageServerStatus(endpoint, { langName: def.name }),
@@ -35,23 +35,35 @@ export function installRoute(
router.route({
path: '/api/code/install',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
const endpoint = await codeServices.locate(req, '');
- return await Promise.all(enabledLanguageServers(options).map(def => status(endpoint, def)));
+ const installRes = await Promise.all(
+ enabledLanguageServers(options).map(def => status(endpoint, def))
+ );
+ return res.ok({ body: installRes });
},
method: 'GET',
});
router.route({
path: '/api/code/install/{name}',
- async handler(req: RequestFacade) {
- const name = req.params.name;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { name } = req.params as any;
const def = enabledLanguageServers(options).find(d => d.name === name);
const endpoint = await codeServices.locate(req, '');
if (def) {
- return await status(endpoint, def);
+ const installRes = await status(endpoint, def);
+ return res.ok({ body: installRes });
} else {
- return Boom.notFound(`language server ${name} not found.`);
+ return res.notFound({ body: `language server ${name} not found.` });
}
},
method: 'GET',
diff --git a/x-pack/legacy/plugins/code/server/routes/lsp.ts b/x-pack/legacy/plugins/code/server/routes/lsp.ts
index 10acb1e3863e8..6b8af10f9f11e 100644
--- a/x-pack/legacy/plugins/code/server/routes/lsp.ts
+++ b/x-pack/legacy/plugins/code/server/routes/lsp.ts
@@ -4,10 +4,10 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
import { ResponseError } from 'vscode-jsonrpc';
import { ResponseMessage } from 'vscode-jsonrpc/lib/messages';
import { SymbolLocator } from '@elastic/lsp-extension';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import {
LanguageServerStartFailed,
@@ -22,7 +22,6 @@ import { ServerOptions } from '../server_options';
import { EsClientWithRequest } from '../utils/esclient_with_request';
import { promiseTimeout } from '../utils/timeout';
-import { RequestFacade, ResponseToolkitFacade } from '../..';
import { CodeServices } from '../distributed/code_services';
import { GitServiceDefinition, LspServiceDefinition } from '../distributed/apis';
import { findTitleFromHover, groupFiles } from '../utils/lsp_utils';
@@ -32,7 +31,7 @@ import { SymbolSearchResult } from '../../model';
const LANG_SERVER_ERROR = 'language server error';
export function lspRoute(
- server: CodeServerRouter,
+ router: CodeServerRouter,
codeServices: CodeServices,
serverOptions: ServerOptions,
log: Logger
@@ -40,23 +39,29 @@ export function lspRoute(
const lspService = codeServices.serviceFor(LspServiceDefinition);
const gitService = codeServices.serviceFor(GitServiceDefinition);
- server.route({
+ router.route({
path: '/api/code/lsp/textDocument/{method}',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
- if (typeof req.payload === 'object' && req.payload != null) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ if (typeof req.body === 'object' && req.body != null) {
+ // @ts-ignore
const method = req.params.method;
if (method) {
try {
- const params = (req.payload as unknown) as any;
+ const params = (req.body as unknown) as any;
const uri = params.textDocument.uri;
const { repoUri } = parseLspUrl(uri)!;
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
const endpoint = await codeServices.locate(req, repoUri);
const requestPromise = lspService.sendRequest(endpoint, {
method: `textDocument/${method}`,
- params: req.payload,
+ params: req.body,
});
- return await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise);
+ const result = await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise);
+ return res.ok({ body: result });
} catch (error) {
if (error instanceof ResponseError) {
// hide some errors;
@@ -67,39 +72,48 @@ export function lspRoute(
) {
log.debug(error);
}
- return h
- .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } })
- .type('json')
- .code(500); // different code for LS errors and other internal errors.
+ return res.custom({
+ statusCode: 500,
+ body: { error: { code: 500, msg: LANG_SERVER_ERROR } },
+ });
} else if (error.isBoom) {
- return error;
+ return res.customError({
+ body: error.error,
+ statusCode: error.statusCode ? error.statusCode : 500,
+ });
} else {
log.error(error);
- return h
- .response({ error: { code: error.code || 500, msg: LANG_SERVER_ERROR } })
- .type('json')
- .code(500);
+ return res.custom({
+ statusCode: 500,
+ body: { error: { code: 500, msg: LANG_SERVER_ERROR } },
+ });
}
}
} else {
- return h.response('missing `method` in request').code(400);
+ return res.badRequest({ body: 'missing `method` in request' });
}
} else {
- return h.response('json body required').code(400); // bad request
+ return res.badRequest({ body: 'json body required' });
}
},
method: 'POST',
});
- server.route({
+ router.route({
path: '/api/code/lsp/findDefinitions',
method: 'POST',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
// @ts-ignore
- const { textDocument, position } = req.payload;
+ const { textDocument, position } = req.body as any;
+ // @ts-ignore
+ const { qname } = req.params as any;
const { uri } = textDocument;
const { repoUri } = parseLspUrl(uri);
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
const endpoint = await codeServices.locate(req, repoUri);
const response: ResponseMessage = await promiseTimeout(
serverOptions.lsp.requestTimeoutMs,
@@ -116,16 +130,16 @@ export function lspRoute(
},
});
const title: string = await findTitleFromHover(hover, uri, position);
- const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log);
+ const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log);
const locators = response.result as SymbolLocator[];
const locations = [];
- const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences();
+ const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences();
for (const locator of locators) {
if (locator.location) {
locations.push(locator.location);
} else if (locator.qname && repoScope.length > 0) {
- const searchResults = await symbolSearchClient.findByQname(req.params.qname, repoScope);
+ const searchResults = await symbolSearchClient.findByQname(qname, repoScope);
for (const symbol of searchResults.symbols) {
locations.push(symbol.symbolInformation.location);
}
@@ -135,20 +149,23 @@ export function lspRoute(
const ep = await codeServices.locate(req, loc.uri);
return await gitService.blob(ep, loc);
});
- return { title, files, uri, position };
+ return res.ok({ body: { title, files, uri, position } });
},
});
- server.route({
+ router.route({
path: '/api/code/lsp/findReferences',
method: 'POST',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
try {
- // @ts-ignore
- const { textDocument, position } = req.payload;
+ const { textDocument, position } = req.body as any;
const { uri } = textDocument;
const { repoUri } = parseLspUrl(uri);
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
const endpoint = await codeServices.locate(req, repoUri);
const response: ResponseMessage = await promiseTimeout(
serverOptions.lsp.requestTimeoutMs,
@@ -169,21 +186,24 @@ export function lspRoute(
const ep = await codeServices.locate(req, loc.uri);
return await gitService.blob(ep, loc);
});
- return { title, files, uri, position };
+ return res.ok({ body: { title, files, uri, position } });
} catch (error) {
log.error(error);
if (error instanceof ResponseError) {
- return h
- .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } })
- .type('json')
- .code(500); // different code for LS errors and other internal errors.
+ return res.custom({
+ statusCode: 500,
+ body: { error: { code: error.code, msg: LANG_SERVER_ERROR } },
+ });
} else if (error.isBoom) {
- return error;
+ return res.customError({
+ body: error.error,
+ statusCode: error.statusCode ? error.statusCode : 500,
+ });
} else {
- return h
- .response({ error: { code: 500, msg: LANG_SERVER_ERROR } })
- .type('json')
- .code(500);
+ return res.custom({
+ statusCode: 500,
+ body: { error: { code: 500, msg: LANG_SERVER_ERROR } },
+ });
}
}
},
@@ -194,21 +214,26 @@ export function symbolByQnameRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/lsp/symbol/{qname}',
method: 'GET',
- async handler(req: RequestFacade) {
- try {
- const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log);
- const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences();
- if (repoScope.length === 0) {
- return {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ // @ts-ignore
+ const { qname } = req.params as any;
+ const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log);
+ const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences();
+ if (repoScope.length === 0) {
+ return res.ok({
+ body: {
symbols: [],
total: 0,
took: 0,
- } as SymbolSearchResult;
- }
- return await symbolSearchClient.findByQname(req.params.qname, repoScope);
- } catch (error) {
- return Boom.internal(`Search Exception`);
+ } as SymbolSearchResult,
+ });
}
+ const symbol = await symbolSearchClient.findByQname(qname, repoScope);
+ return res.ok({ body: symbol });
},
});
}
diff --git a/x-pack/legacy/plugins/code/server/routes/redirect.ts b/x-pack/legacy/plugins/code/server/routes/redirect.ts
deleted file mode 100644
index 2882a37334836..0000000000000
--- a/x-pack/legacy/plugins/code/server/routes/redirect.ts
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { RequestFacade, ServerFacade } from '../../';
-import { Logger } from '../log';
-
-export function redirectRoute(server: ServerFacade, redirectUrl: string, log: Logger) {
- const proxyHandler = {
- proxy: {
- passThrough: true,
- async mapUri(request: RequestFacade) {
- let uri;
- uri = `${redirectUrl}${request.path}`;
- if (request.url.search) {
- uri += request.url.search;
- }
- log.info(`redirect ${request.path}${request.url.search || ''} to ${uri}`);
- return {
- uri,
- };
- },
- },
- };
-
- server.route({
- path: '/api/code/{p*}',
- method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
- handler: proxyHandler,
- });
-
- server.route({
- path: '/api/code/lsp/{p*}',
- method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
- handler: proxyHandler,
- });
-}
diff --git a/x-pack/legacy/plugins/code/server/routes/repository.ts b/x-pack/legacy/plugins/code/server/routes/repository.ts
index 862586b406de4..d9e8edb4d2f50 100644
--- a/x-pack/legacy/plugins/code/server/routes/repository.ts
+++ b/x-pack/legacy/plugins/code/server/routes/repository.ts
@@ -4,10 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
-
import { i18n } from '@kbn/i18n';
-import { RequestFacade, ResponseToolkitFacade } from '../..';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
+
import { validateGitUrl } from '../../common/git_url_utils';
import { RepositoryUtils } from '../../common/repository_utils';
import { RepositoryConfig, RepositoryUri, WorkerReservedProgress } from '../../model';
@@ -36,8 +35,12 @@ export function repositoryRoute(
path: '/api/code/repo',
requireAdmin: true,
method: 'POST',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
- const repoUrl: string = (req.payload as any).url;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const repoUrl: string = (req.body as any).url;
// Reject the request if the url is an invalid git url.
try {
@@ -49,11 +52,11 @@ export function repositoryRoute(
} catch (error) {
log.error(`Validate git url ${repoUrl} error.`);
log.error(error);
- return Boom.badRequest(error);
+ return res.badRequest({ body: error });
}
const repo = RepositoryUtils.buildRepository(repoUrl);
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
try {
// Check if the repository already exists
@@ -61,28 +64,32 @@ export function repositoryRoute(
// distinguish between that the repository exists in the current space and that the repository exists in
// another space, and return the default message if error happens during reference checking.
try {
- const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference(
+ const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference(
repo.uri
);
if (!hasRef) {
- return Boom.conflict(
- i18n.translate('xpack.code.repositoryManagement.repoOtherSpaceImportedMessage', {
- defaultMessage: 'The repository has already been imported in another space!',
- })
- );
+ return res.custom({
+ statusCode: 409, // conflict
+ body: i18n.translate(
+ 'xpack.code.repositoryManagement.repoOtherSpaceImportedMessage',
+ {
+ defaultMessage: 'The repository has already been imported in another space!',
+ }
+ ),
+ });
}
} catch (e) {
log.error(`Failed to check reference for ${repo.uri} in current space`);
}
const msg = `Repository ${repoUrl} already exists. Skip clone.`;
log.info(msg);
- return h.response(msg).code(304); // Not Modified
+ return res.custom({ statusCode: 304, body: msg });
} catch (error) {
log.info(`Repository ${repoUrl} does not exist. Go ahead with clone.`);
try {
// create the reference first, and make the creation idempotent, to avoid potential dangling repositories
// which have no references from any space, in case the writes to ES may fail independently
- await getReferenceHelper(req.getSavedObjectsClient()).createReference(repo.uri);
+ await getReferenceHelper(context.core.savedObjects.client).createReference(repo.uri);
// Create the index for the repository
const initializer = (await repoIndexInitializerFactory.create(
@@ -105,12 +112,12 @@ export function repositoryRoute(
if (endpoint) {
await repositoryService.clone(endpoint, payload);
}
- return repo;
+ return res.ok({ body: repo });
} catch (error2) {
const msg = `Issue repository clone request for ${repoUrl} error`;
log.error(msg);
log.error(error2);
- return Boom.badRequest(msg);
+ return res.badRequest({ body: msg });
}
}
},
@@ -121,12 +128,16 @@ export function repositoryRoute(
path: '/api/code/repo/{uri*3}',
requireAdmin: true,
method: 'DELETE',
- async handler(req: RequestFacade, h: ResponseToolkitFacade) {
- const repoUri: string = req.params.uri as string;
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri } = req.params as any;
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
try {
// make sure the repo belongs to the current space
- getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
+ getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
// Check if the repository already exists. If not, an error will be thrown.
await repoObjectClient.getRepository(repoUri);
@@ -139,7 +150,7 @@ export function repositoryRoute(
if (status.progress !== WorkerReservedProgress.ERROR) {
const msg = `Repository ${repoUri} is already in delete.`;
log.info(msg);
- return h.response(msg).code(304); // Not Modified
+ return res.custom({ statusCode: 304, body: msg });
}
} catch (error) {
// Do nothing here since this error is expected.
@@ -151,15 +162,14 @@ export function repositoryRoute(
};
const endpoint = await codeServices.locate(req, repoUri);
await repositoryService.delete(endpoint, payload);
-
// delete the reference last to avoid dangling repositories
- await getReferenceHelper(req.getSavedObjectsClient()).deleteReference(repoUri);
- return {};
+ await getReferenceHelper(context.core.savedObjects.client).deleteReference(repoUri);
+ return res.ok();
} catch (error) {
const msg = `Issue repository delete request for ${repoUri} error`;
log.error(msg);
log.error(error);
- return Boom.notFound(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -168,17 +178,22 @@ export function repositoryRoute(
router.route({
path: '/api/code/repo/{uri*3}',
method: 'GET',
- async handler(req: RequestFacade) {
- const repoUri = req.params.uri as string;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri } = req.params as any;
try {
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
- return await repoObjectClient.getRepository(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
+ const repo = await repoObjectClient.getRepository(repoUri);
+ return res.ok({ body: repo });
} catch (error) {
const msg = `Get repository ${repoUri} error`;
log.error(msg);
log.error(error);
- return Boom.notFound(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -186,15 +201,20 @@ export function repositoryRoute(
router.route({
path: '/api/code/repo/status/{uri*3}',
method: 'GET',
- async handler(req: RequestFacade) {
- const repoUri = req.params.uri as string;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri } = req.params as any;
try {
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
-
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
let gitStatus = null;
let indexStatus = null;
let deleteStatus = null;
- const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference(repoUri);
+ const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference(
+ repoUri
+ );
if (hasRef) {
try {
@@ -215,16 +235,17 @@ export function repositoryRoute(
log.debug(`Get repository delete status ${repoUri} error: ${error}`);
}
}
- return {
+ const status = {
gitStatus,
indexStatus,
deleteStatus,
};
+ return res.ok({ body: status });
} catch (error) {
const msg = `Get repository status ${repoUri} error`;
log.error(msg);
log.error(error);
- return Boom.notFound(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -233,16 +254,21 @@ export function repositoryRoute(
router.route({
path: '/api/code/repos',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
try {
- const uris = await getReferenceHelper(req.getSavedObjectsClient()).findReferences();
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
- return await repoObjectClient.getRepositories(uris);
+ const uris = await getReferenceHelper(context.core.savedObjects.client).findReferences();
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
+ const repo = await repoObjectClient.getRepositories(uris);
+ return res.ok({ body: repo });
} catch (error) {
const msg = `Get all repositories error`;
log.error(msg);
log.error(error);
- return Boom.notFound(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -254,12 +280,16 @@ export function repositoryRoute(
path: '/api/code/repo/index/{uri*3}',
method: 'POST',
requireAdmin: true,
- async handler(req: RequestFacade) {
- const repoUri = req.params.uri as string;
- const reindex: boolean = (req.payload as any).reindex;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri } = req.params as any;
+ const reindex: boolean = (req.body as any).reindex;
try {
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
const cloneStatus = await repoObjectClient.getRepositoryGitStatus(repoUri);
const payload = {
@@ -269,12 +299,12 @@ export function repositoryRoute(
};
const endpoint = await codeServices.locate(req, repoUri);
await repositoryService.index(endpoint, payload);
- return {};
+ return res.ok();
} catch (error) {
const msg = `Index repository ${repoUri} error`;
log.error(msg);
log.error(error);
- return Boom.notFound(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -284,29 +314,33 @@ export function repositoryRoute(
path: '/api/code/repo/config/{uri*3}',
method: 'PUT',
requireAdmin: true,
- async handler(req: RequestFacade) {
- const config: RepositoryConfig = req.payload as RepositoryConfig;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const config: RepositoryConfig = req.body as RepositoryConfig;
const repoUri: RepositoryUri = config.uri;
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
try {
// Check if the repository exists
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
await repoObjectClient.getRepository(repoUri);
} catch (error) {
- return Boom.badRequest(`Repository not existed for ${repoUri}`);
+ return res.badRequest({ body: `Repository not existed for ${repoUri}` });
}
try {
// Persist to elasticsearch
await repoObjectClient.setRepositoryConfig(repoUri, config);
repoConfigController.resetConfigCache(repoUri);
- return {};
+ return res.ok();
} catch (error) {
const msg = `Update repository config for ${repoUri} error`;
log.error(msg);
log.error(error);
- return Boom.badRequest(msg);
+ return res.notFound({ body: msg });
}
},
});
@@ -315,14 +349,19 @@ export function repositoryRoute(
router.route({
path: '/api/code/repo/config/{uri*3}',
method: 'GET',
- async handler(req: RequestFacade) {
- const repoUri = req.params.uri as string;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri } = req.params as any;
try {
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
- return await repoObjectClient.getRepositoryConfig(repoUri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
+ const config = await repoObjectClient.getRepositoryConfig(repoUri);
+ return res.ok({ body: config });
} catch (error) {
- return Boom.notFound(`Repository config ${repoUri} not exist`);
+ return res.notFound({ body: `Repository config ${repoUri} not exist` });
}
},
});
diff --git a/x-pack/legacy/plugins/code/server/routes/search.ts b/x-pack/legacy/plugins/code/server/routes/search.ts
index 86bdc931cff7a..5c2b731b33c42 100644
--- a/x-pack/legacy/plugins/code/server/routes/search.ts
+++ b/x-pack/legacy/plugins/code/server/routes/search.ts
@@ -4,9 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
-
-import { RequestFacade, RequestQueryFacade } from '../../';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import {
CommitSearchRequest,
DocumentSearchRequest,
@@ -32,9 +30,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/search/repo',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
let page = 1;
- const { p, q, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -42,14 +44,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) {
const searchReq: RepositorySearchRequest = {
query: q as string,
page,
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log);
- const res = await repoSearchClient.search(searchReq);
- return res;
+ const repoSearchClient = new RepositorySearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
+ const searchRes = await repoSearchClient.search(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
},
});
@@ -57,9 +62,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/suggestions/repo',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
let page = 1;
- const { p, q, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -67,14 +76,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) {
const searchReq: RepositorySearchRequest = {
query: q as string,
page,
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log);
- const res = await repoSearchClient.suggest(searchReq);
- return res;
+ const repoSearchClient = new RepositorySearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
+ const searchRes = await repoSearchClient.suggest(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
},
});
@@ -84,9 +96,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/search/doc',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
let page = 1;
- const { p, q, langs, repos, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, langs, repos, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -96,14 +112,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
page,
langFilters: langs ? (langs as string).split(',') : [],
repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [],
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log);
- const res = await docSearchClient.search(searchReq);
- return res;
+ const docSearchClient = new DocumentSearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
+ const searchRes = await docSearchClient.search(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
},
});
@@ -111,9 +130,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/suggestions/doc',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
let page = 1;
- const { p, q, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -121,14 +144,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
const searchReq: DocumentSearchRequest = {
query: q as string,
page,
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log);
- const res = await docSearchClient.suggest(searchReq);
- return res;
+ const docSearchClient = new DocumentSearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
+ const searchRes = await docSearchClient.suggest(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
},
});
@@ -143,14 +169,21 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/integration/snippets',
method: 'POST',
- async handler(req: RequestFacade) {
- const reqs: StackTraceSnippetsRequest[] = (req.payload as any).requests;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
const scopes = new Set(
- await getReferenceHelper(req.getSavedObjectsClient()).findReferences()
+ await getReferenceHelper(context.core.savedObjects.client).findReferences()
);
- return await Promise.all(
+ const reqs: StackTraceSnippetsRequest[] = (req.body as any).requests;
+ const searchRes = await Promise.all(
reqs.map((stacktraceReq: StackTraceSnippetsRequest) => {
- const integClient = new IntegrationsSearchClient(new EsClientWithRequest(req), log);
+ const integClient = new IntegrationsSearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
return Promise.all(
stacktraceReq.stacktraceItems.map((stacktrace: StackTraceItem) => {
const repoUris = stacktraceReq.repoUris.filter(uri => scopes.has(uri));
@@ -166,14 +199,19 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) {
);
})
);
+ return res.ok({ body: searchRes });
},
});
}
export function symbolSearchRoute(router: CodeServerRouter, log: Logger) {
- const symbolSearchHandler = async (req: RequestFacade) => {
+ const symbolSearchHandler = async (
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) => {
let page = 1;
- const { p, q, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -181,14 +219,14 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) {
const searchReq: SymbolSearchRequest = {
query: q as string,
page,
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log);
- const res = await symbolSearchClient.suggest(searchReq);
- return res;
+ const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log);
+ const searchRes = await symbolSearchClient.suggest(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
};
@@ -196,12 +234,12 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/suggestions/symbol',
method: 'GET',
- handler: symbolSearchHandler,
+ npHandler: symbolSearchHandler,
});
router.route({
path: '/api/code/search/symbol',
method: 'GET',
- handler: symbolSearchHandler,
+ npHandler: symbolSearchHandler,
});
}
@@ -209,9 +247,13 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) {
router.route({
path: '/api/code/search/commit',
method: 'GET',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
let page = 1;
- const { p, q, repos, repoScope } = req.query as RequestQueryFacade;
+ const { p, q, repos, repoScope } = req.query as any;
if (p) {
page = parseInt(p as string, 10);
}
@@ -220,21 +262,27 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) {
query: q as string,
page,
repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [],
- repoScope: await getScope(req, repoScope),
+ repoScope: await getScope(context, repoScope),
};
try {
- const commitSearchClient = new CommitSearchClient(new EsClientWithRequest(req), log);
- const res = await commitSearchClient.search(searchReq);
- return res;
+ const commitSearchClient = new CommitSearchClient(
+ new EsClientWithRequest(context, req),
+ log
+ );
+ const searchRes = await commitSearchClient.search(searchReq);
+ return res.ok({ body: searchRes });
} catch (error) {
- return Boom.internal(`Search Exception`);
+ return res.internalError({ body: 'Search Exception' });
}
},
});
}
-async function getScope(req: RequestFacade, repoScope: string | string[]): Promise {
- let scope: string[] = await getReferenceHelper(req.getSavedObjectsClient()).findReferences();
+async function getScope(
+ context: RequestHandlerContext,
+ repoScope: string | string[]
+): Promise {
+ let scope: string[] = await getReferenceHelper(context.core.savedObjects.client).findReferences();
if (typeof repoScope === 'string') {
const uriSet = new Set(repoScope.split(','));
scope = scope.filter(uri => uriSet.has(uri));
diff --git a/x-pack/legacy/plugins/code/server/routes/setup.ts b/x-pack/legacy/plugins/code/server/routes/setup.ts
index 58db84fd80aaf..6f89ebf35441f 100644
--- a/x-pack/legacy/plugins/code/server/routes/setup.ts
+++ b/x-pack/legacy/plugins/code/server/routes/setup.ts
@@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { RequestFacade } from '../..';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
+
import { CodeServerRouter } from '../security';
import { CodeServices } from '../distributed/code_services';
import { SetupDefinition } from '../distributed/apis';
@@ -14,9 +15,14 @@ export function setupRoute(router: CodeServerRouter, codeServices: CodeServices)
router.route({
method: 'get',
path: '/api/code/setup',
- async handler(req: RequestFacade) {
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
const endpoint = await codeServices.locate(req, '');
- return await setupService.setup(endpoint, {});
+ const setup = await setupService.setup(endpoint, {});
+ return res.ok({ body: setup });
},
});
}
diff --git a/x-pack/legacy/plugins/code/server/routes/status.ts b/x-pack/legacy/plugins/code/server/routes/status.ts
index 56b2972bd4147..e2723342b49d2 100644
--- a/x-pack/legacy/plugins/code/server/routes/status.ts
+++ b/x-pack/legacy/plugins/code/server/routes/status.ts
@@ -4,10 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
import { CodeServerRouter } from '../security';
-import { RequestFacade } from '../../';
import { LangServerType, RepoFileStatus, StatusReport } from '../../common/repo_file_status';
import { CTAGS, LanguageServerDefinition } from '../lsp/language_servers';
import { LanguageServerStatus } from '../../common/language_server';
@@ -108,18 +107,22 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices
router.route({
path: '/api/code/repo/{uri*3}/status/{ref}/{path*}',
method: 'GET',
- async handler(req: RequestFacade) {
- const { uri, path, ref } = req.params;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri, path, ref } = req.params as any;
const report: StatusReport = {};
- const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req));
+ const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req));
const endpoint = await codeServices.locate(req, uri);
try {
// Check if the repository already exists
const repo = await repoObjectClient.getRepository(uri);
- await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri);
+ await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri);
} catch (e) {
- return Boom.notFound(`repo ${uri} not found`);
+ return res.notFound({ body: `repo ${uri} not found` });
}
await handleRepoStatus(endpoint, report, uri, ref, repoObjectClient);
if (path) {
@@ -141,10 +144,10 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices
// not a file? The path may be a dir.
}
} catch (e) {
- return Boom.internal(e.message || e.name);
+ return res.internalError({ body: e.message || e.name });
}
}
- return report;
+ return res.ok({ body: report });
},
});
}
diff --git a/x-pack/legacy/plugins/code/server/routes/workspace.ts b/x-pack/legacy/plugins/code/server/routes/workspace.ts
index 8a112af297245..4dfafda7369c1 100644
--- a/x-pack/legacy/plugins/code/server/routes/workspace.ts
+++ b/x-pack/legacy/plugins/code/server/routes/workspace.ts
@@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import Boom from 'boom';
+import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server';
-import { RequestFacade, RequestQueryFacade } from '../../';
+import { RequestQueryFacade } from '../../';
import { ServerOptions } from '../server_options';
import { CodeServerRouter } from '../security';
import { CodeServices } from '../distributed/code_services';
@@ -23,8 +23,12 @@ export function workspaceRoute(
router.route({
path: '/api/code/workspace',
method: 'GET',
- async handler() {
- return serverOptions.repoConfigs;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ return res.ok({ body: serverOptions.repoConfigs });
},
});
@@ -32,23 +36,35 @@ export function workspaceRoute(
path: '/api/code/workspace/{uri*3}/{revision}',
requireAdmin: true,
method: 'POST',
- async handler(req: RequestFacade) {
- const repoUri = req.params.uri as string;
- getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri);
- const revision = req.params.revision as string;
+ async npHandler(
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) {
+ const { uri: repoUri, revision } = req.params as any;
+ getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri);
const repoConfig = serverOptions.repoConfigs[repoUri];
const force = !!(req.query as RequestQueryFacade).force;
if (repoConfig) {
const endpoint = await codeServices.locate(req, repoUri);
try {
await workspaceService.initCmd(endpoint, { repoUri, revision, force, repoConfig });
+ return res.ok();
} catch (e) {
if (e.isBoom) {
- return e;
+ return res.customError({
+ body: e.error,
+ statusCode: e.statusCode ? e.statusCode : 500,
+ });
+ } else {
+ return res.customError({
+ body: e.error,
+ statusCode: 500,
+ });
}
}
} else {
- return Boom.notFound(`repo config for ${repoUri} not found.`);
+ return res.notFound({ body: `repo config for ${repoUri} not found.` });
}
},
});
diff --git a/x-pack/legacy/plugins/code/server/security.ts b/x-pack/legacy/plugins/code/server/security.ts
index c548b51940599..b511fba5af4d8 100644
--- a/x-pack/legacy/plugins/code/server/security.ts
+++ b/x-pack/legacy/plugins/code/server/security.ts
@@ -4,27 +4,100 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { ServerFacade, ServerRouteFacade, RouteOptionsFacade } from '..';
+import { schema } from '@kbn/config-schema';
+
+import { IRouter, RequestHandler } from 'src/core/server';
+import { ServerRouteFacade, RouteOptionsFacade } from '..';
export class CodeServerRouter {
- constructor(readonly server: ServerFacade) {}
+ constructor(readonly router: IRouter) {}
route(route: CodeRoute) {
const routeOptions: RouteOptionsFacade = (route.options || {}) as RouteOptionsFacade;
- routeOptions.tags = [
+ const tags = [
...(routeOptions.tags || []),
`access:code_${route.requireAdmin ? 'admin' : 'user'}`,
];
- this.server.route({
- handler: route.handler,
- method: route.method,
- options: routeOptions,
- path: route.path,
- });
+ const routeHandler = route.npHandler!;
+
+ switch ((route.method as string).toLowerCase()) {
+ case 'get': {
+ this.router.get(
+ {
+ path: route.path,
+ validate: {
+ query: schema.object({}, { allowUnknowns: true }),
+ params: schema.object({}, { allowUnknowns: true }),
+ },
+ options: {
+ tags,
+ },
+ },
+ routeHandler
+ );
+ break;
+ }
+ case 'put': {
+ this.router.put(
+ {
+ path: route.path,
+ validate: {
+ query: schema.object({}, { allowUnknowns: true }),
+ params: schema.object({}, { allowUnknowns: true }),
+ body: schema.object({}, { allowUnknowns: true }),
+ },
+ options: {
+ tags,
+ },
+ },
+ routeHandler
+ );
+ break;
+ }
+ case 'delete': {
+ this.router.delete(
+ {
+ path: route.path,
+ validate: {
+ query: schema.object({}, { allowUnknowns: true }),
+ params: schema.object({}, { allowUnknowns: true }),
+ },
+ options: {
+ tags,
+ },
+ },
+ routeHandler
+ );
+ break;
+ }
+ case 'patch':
+ case 'post': {
+ this.router.post(
+ {
+ path: route.path,
+ validate: {
+ query: schema.object({}, { allowUnknowns: true }),
+ params: schema.object({}, { allowUnknowns: true }),
+ body: schema.object({}, { allowUnknowns: true }),
+ },
+ options: {
+ tags,
+ },
+ },
+ routeHandler
+ );
+ break;
+ }
+ default: {
+ throw new Error(`Unknown HTTP method: ${route.method}`);
+ }
+ }
}
}
export interface CodeRoute extends ServerRouteFacade {
requireAdmin?: boolean;
+ // New Platform Route Handler API
+ npHandler?: RequestHandler;
}
diff --git a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts
index 49e27cdde62b6..9dcfb543e8306 100644
--- a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts
+++ b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts
@@ -4,50 +4,62 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { AnyObject } from '../lib/esqueue';
+import {
+ IndicesCreateParams,
+ IndicesDeleteParams,
+ IndicesExistsParams,
+ IndicesExistsAliasParams,
+ IndicesDeleteAliasParams,
+ IndicesGetAliasParams,
+ IndicesGetMappingParams,
+ IndicesPutAliasParams,
+ IndicesUpdateAliasesParams,
+ IndicesRefreshParams,
+} from 'elasticsearch';
+
import { WithRequest } from './with_request';
import { WithInternalRequest } from './with_internal_request';
export class EsIndexClient {
constructor(readonly self: WithRequest | WithInternalRequest) {}
- public exists(params: AnyObject): Promise {
+ public exists(params: IndicesExistsParams): Promise {
return this.self.callCluster('indices.exists', params);
}
- public create(params: AnyObject): Promise {
+ public create(params: IndicesCreateParams): Promise {
return this.self.callCluster('indices.create', params);
}
- public refresh(params: AnyObject): Promise {
+ public refresh(params: IndicesRefreshParams): Promise {
return this.self.callCluster('indices.refresh', params);
}
- public delete(params: AnyObject): Promise {
+ public delete(params: IndicesDeleteParams): Promise {
return this.self.callCluster('indices.delete', params);
}
- public existsAlias(params: AnyObject): Promise {
+ public existsAlias(params: IndicesExistsAliasParams): Promise {
return this.self.callCluster('indices.existsAlias', params);
}
- public getAlias(params: AnyObject): Promise {
+ public getAlias(params: IndicesGetAliasParams): Promise {
return this.self.callCluster('indices.getAlias', params);
}
- public putAlias(params: AnyObject): Promise {
+ public putAlias(params: IndicesPutAliasParams): Promise {
return this.self.callCluster('indices.putAlias', params);
}
- public deleteAlias(params: AnyObject): Promise {
+ public deleteAlias(params: IndicesDeleteAliasParams): Promise {
return this.self.callCluster('indices.deleteAlias', params);
}
- public updateAliases(params: AnyObject): Promise {
+ public updateAliases(params: IndicesUpdateAliasesParams): Promise {
return this.self.callCluster('indices.updateAliases', params);
}
- public getMapping(params: AnyObject): Promise {
+ public getMapping(params: IndicesGetMappingParams): Promise {
return this.self.callCluster('indices.getMapping', params);
}
}
diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts
index 5a2cb0952e4b6..60a57f4dd26ea 100644
--- a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts
+++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts
@@ -4,35 +4,46 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { ServerFacade } from '../..';
-import { AnyObject, EsClient } from '../lib/esqueue';
+import {
+ BulkIndexDocumentsParams,
+ DeleteDocumentByQueryParams,
+ DeleteDocumentParams,
+ GetParams,
+ IndexDocumentParams,
+ ReindexParams,
+ SearchParams,
+ UpdateDocumentParams,
+ UpdateDocumentByQueryParams,
+} from 'elasticsearch';
+import { IClusterClient } from 'src/core/server';
+import { EsClient } from '../lib/esqueue';
import { EsIndexClient } from './es_index_client';
import { WithInternalRequest } from './with_internal_request';
export class EsClientWithInternalRequest extends WithInternalRequest implements EsClient {
public readonly indices = new EsIndexClient(this);
- constructor(server: ServerFacade) {
- super(server);
+ constructor(cluster: IClusterClient) {
+ super(cluster);
}
- public bulk(params: AnyObject): Promise {
+ public bulk(params: BulkIndexDocumentsParams): Promise {
return this.callCluster('bulk', params);
}
- public delete(params: AnyObject): Promise {
+ public delete(params: DeleteDocumentParams): Promise {
return this.callCluster('delete', params);
}
- public deleteByQuery(params: AnyObject): Promise {
+ public deleteByQuery(params: DeleteDocumentByQueryParams): Promise {
return this.callCluster('deleteByQuery', params);
}
- public get(params: AnyObject): Promise {
+ public get(params: GetParams): Promise {
return this.callCluster('get', params);
}
- public index(params: AnyObject): Promise {
+ public index(params: IndexDocumentParams): Promise {
return this.callCluster('index', params);
}
@@ -40,19 +51,19 @@ export class EsClientWithInternalRequest extends WithInternalRequest implements
return this.callCluster('ping');
}
- public reindex(params: AnyObject): Promise {
+ public reindex(params: ReindexParams): Promise {
return this.callCluster('reindex', params);
}
- public search(params: AnyObject): Promise {
+ public search(params: SearchParams): Promise {
return this.callCluster('search', params);
}
- public update(params: AnyObject): Promise {
+ public update(params: UpdateDocumentParams): Promise {
return this.callCluster('update', params);
}
- public updateByQuery(params: AnyObject): Promise {
+ public updateByQuery(params: UpdateDocumentByQueryParams): Promise {
return this.callCluster('updateByQuery', params);
}
}
diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts
index a1f70db0a7074..2e4a18937a232 100644
--- a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts
+++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { RequestFacade } from '../../';
+import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { AnyObject, EsClient } from '../lib/esqueue';
import { EsIndexClient } from './es_index_client';
import { WithRequest } from './with_request';
@@ -12,8 +12,8 @@ import { WithRequest } from './with_request';
export class EsClientWithRequest extends WithRequest implements EsClient {
public readonly indices = new EsIndexClient(this);
- constructor(readonly req: RequestFacade) {
- super(req);
+ constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) {
+ super(context, req);
}
public bulk(params: AnyObject): Promise {
diff --git a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts
index a51fa990ff10e..9f8dde129039a 100644
--- a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts
+++ b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts
@@ -4,14 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { ServerFacade } from '../..';
-import { AnyObject } from '../lib/esqueue';
+import { APICaller, IClusterClient } from 'src/core/server';
export class WithInternalRequest {
- public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise;
+ public readonly callCluster: APICaller;
- constructor(server: ServerFacade) {
- const cluster = server.plugins.elasticsearch.getCluster('admin');
- this.callCluster = cluster.callWithInternalUser;
+ constructor(cluster: IClusterClient) {
+ this.callCluster = cluster.callAsInternalUser;
}
}
diff --git a/x-pack/legacy/plugins/code/server/utils/with_request.ts b/x-pack/legacy/plugins/code/server/utils/with_request.ts
index e08b9727f375e..e2a4bfd03de66 100644
--- a/x-pack/legacy/plugins/code/server/utils/with_request.ts
+++ b/x-pack/legacy/plugins/code/server/utils/with_request.ts
@@ -4,24 +4,20 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { RequestFacade } from '../../';
-import { AnyObject } from '../lib/esqueue';
+import { APICaller, KibanaRequest, RequestHandlerContext } from 'src/core/server';
export class WithRequest {
- public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise;
+ public readonly callCluster: APICaller;
- constructor(readonly req: RequestFacade) {
- const cluster = req.server.plugins.elasticsearch.getCluster('data');
-
- // @ts-ignore
- const securityPlugin = req.server.plugins.security;
- if (securityPlugin) {
- const useRbac = securityPlugin.authorization.mode.useRbacForRequest(req);
- if (useRbac) {
- this.callCluster = cluster.callWithInternalUser;
- return;
- }
- }
- this.callCluster = cluster.callWithRequest.bind(null, req);
+ constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) {
+ const securityPlugin = context.code.legacy.securityPlugin;
+ const useRbac =
+ securityPlugin &&
+ securityPlugin.authorization &&
+ // @ts-ignore
+ securityPlugin.authorization.mode.useRbacForRequest(req);
+ this.callCluster = useRbac
+ ? context.core.elasticsearch.dataClient.callAsInternalUser
+ : context.core.elasticsearch.dataClient.callAsCurrentUser;
}
}
diff --git a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx
index b1eef34001750..fe3c930f9e08e 100644
--- a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx
+++ b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx
@@ -9,11 +9,12 @@ import React from 'react';
import euiStyled from '../../../../../common/eui_styled_components';
interface AppNavigationProps {
+ 'aria-label': string;
children: React.ReactNode;
}
-export const AppNavigation = ({ children }: AppNavigationProps) => (
-
+export const AppNavigation = ({ 'aria-label': label, children }: AppNavigationProps) => (
+
{children}
@@ -25,7 +26,7 @@ const Nav = euiStyled.nav`
border-bottom: ${props => props.theme.eui.euiBorderThin};
padding: ${props =>
`${props.theme.eui.euiSize} ${props.theme.eui.euiSizeL} ${props.theme.eui.euiSize} ${props.theme.eui.euiSizeL}`}
-
+
.euiTabs {
padding-left: 3px;
margin-left: -3px;
diff --git a/x-pack/legacy/plugins/infra/public/lib/adapters/framework/kibana_framework_adapter.ts b/x-pack/legacy/plugins/infra/public/lib/adapters/framework/kibana_framework_adapter.ts
index beb912373dbff..5f763b3199a91 100644
--- a/x-pack/legacy/plugins/infra/public/lib/adapters/framework/kibana_framework_adapter.ts
+++ b/x-pack/legacy/plugins/infra/public/lib/adapters/framework/kibana_framework_adapter.ts
@@ -115,10 +115,10 @@ export class InfraKibanaFrameworkAdapter implements InfraFrameworkAdapter {
}),
scope: true,
template: `
-
+ >
`,
}));
diff --git a/x-pack/legacy/plugins/infra/public/pages/infrastructure/index.tsx b/x-pack/legacy/plugins/infra/public/pages/infrastructure/index.tsx
index f8984f630f896..6affcae1805b3 100644
--- a/x-pack/legacy/plugins/infra/public/pages/infrastructure/index.tsx
+++ b/x-pack/legacy/plugins/infra/public/pages/infrastructure/index.tsx
@@ -58,7 +58,11 @@ export const InfrastructurePage = injectUICapabilities(
readOnlyBadge={!uiCapabilities.infrastructure.save}
/>
-
+
@@ -110,9 +113,7 @@ export const LogsPage = injectUICapabilities(({ match, uiCapabilities }: LogsPag
) : (
<>
-
+
;
+ getIndexFields(
+ req: InfraFrameworkRequest,
+ indices: string,
+ timefield: string
+ ): Promise;
}
export interface IndexFieldDescriptor {
diff --git a/x-pack/legacy/plugins/infra/server/lib/adapters/fields/framework_fields_adapter.ts b/x-pack/legacy/plugins/infra/server/lib/adapters/fields/framework_fields_adapter.ts
index 2cea001d87b00..179bfef6f1bd8 100644
--- a/x-pack/legacy/plugins/infra/server/lib/adapters/fields/framework_fields_adapter.ts
+++ b/x-pack/legacy/plugins/infra/server/lib/adapters/fields/framework_fields_adapter.ts
@@ -4,22 +4,29 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { startsWith, uniq } from 'lodash';
-import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../framework';
+import { startsWith, uniq, first } from 'lodash';
+import { idx } from '@kbn/elastic-idx';
+import {
+ InfraBackendFrameworkAdapter,
+ InfraFrameworkRequest,
+ InfraDatabaseSearchResponse,
+} from '../framework';
import { FieldsAdapter, IndexFieldDescriptor } from './adapter_types';
import { getAllowedListForPrefix } from '../../../../common/ecs_allowed_list';
+import { getAllCompositeData } from '../../../utils/get_all_composite_data';
+import { createAfterKeyHandler } from '../../../utils/create_afterkey_handler';
interface Bucket {
- key: string;
+ key: { dataset: string };
doc_count: number;
}
interface DataSetResponse {
- modules: {
- buckets: Bucket[];
- };
- dataSets: {
+ datasets: {
buckets: Bucket[];
+ after_key: {
+ dataset: string;
+ };
};
}
@@ -32,13 +39,14 @@ export class FrameworkFieldsAdapter implements FieldsAdapter {
public async getIndexFields(
request: InfraFrameworkRequest,
- indices: string
+ indices: string,
+ timefield: string
): Promise {
const indexPatternsService = this.framework.getIndexPatternsService(request);
const response = await indexPatternsService.getFieldsForWildcard({
pattern: indices,
});
- const { dataSets, modules } = await this.getDataSetsAndModules(request, indices);
+ const { dataSets, modules } = await this.getDataSetsAndModules(request, indices, timefield);
const allowedList = modules.reduce(
(acc, name) => uniq([...acc, ...getAllowedListForPrefix(name)]),
[] as string[]
@@ -52,41 +60,68 @@ export class FrameworkFieldsAdapter implements FieldsAdapter {
private async getDataSetsAndModules(
request: InfraFrameworkRequest,
- indices: string
+ indices: string,
+ timefield: string
): Promise<{ dataSets: string[]; modules: string[] }> {
const params = {
index: indices,
allowNoIndices: true,
ignoreUnavailable: true,
body: {
- aggs: {
- modules: {
- terms: {
- field: 'event.modules',
- size: 1000,
- },
+ size: 0,
+ query: {
+ bool: {
+ filter: [
+ {
+ range: {
+ [timefield]: {
+ gte: 'now-24h',
+ lte: 'now',
+ },
+ },
+ },
+ ],
},
- dataSets: {
- terms: {
- field: 'event.dataset',
- size: 1000,
+ },
+ aggs: {
+ datasets: {
+ composite: {
+ sources: [
+ {
+ dataset: {
+ terms: {
+ field: 'event.dataset',
+ },
+ },
+ },
+ ],
},
},
},
},
};
- const response = await this.framework.callWithRequest<{}, DataSetResponse>(
+
+ const bucketSelector = (response: InfraDatabaseSearchResponse<{}, DataSetResponse>) =>
+ (response.aggregations && response.aggregations.datasets.buckets) || [];
+ const handleAfterKey = createAfterKeyHandler('body.aggs.datasets.composite.after', input =>
+ idx(input, _ => _.aggregations.datasets.after_key)
+ );
+
+ const buckets = await getAllCompositeData(
+ this.framework,
request,
- 'search',
- params
+ params,
+ bucketSelector,
+ handleAfterKey
);
- if (!response.aggregations) {
- return { dataSets: [], modules: [] };
- }
- const { modules, dataSets } = response.aggregations;
- return {
- modules: modules.buckets.map(bucket => bucket.key),
- dataSets: dataSets.buckets.map(bucket => bucket.key),
- };
+ const dataSets = buckets.map(bucket => bucket.key.dataset);
+ const modules = dataSets.reduce(
+ (acc, dataset) => {
+ const module = first(dataset.split(/\./));
+ return module ? uniq([...acc, module]) : acc;
+ },
+ [] as string[]
+ );
+ return { modules, dataSets };
}
}
diff --git a/x-pack/legacy/plugins/infra/server/lib/domains/fields_domain.ts b/x-pack/legacy/plugins/infra/server/lib/domains/fields_domain.ts
index a339fcd4f08bf..c5a3bbeb87449 100644
--- a/x-pack/legacy/plugins/infra/server/lib/domains/fields_domain.ts
+++ b/x-pack/legacy/plugins/infra/server/lib/domains/fields_domain.ts
@@ -28,7 +28,8 @@ export class InfraFieldsDomain {
request,
`${includeMetricIndices ? configuration.metricAlias : ''},${
includeLogIndices ? configuration.logAlias : ''
- }`
+ }`,
+ configuration.fields.timestamp
);
return fields;
diff --git a/x-pack/legacy/plugins/infra/server/lib/snapshot/snapshot.ts b/x-pack/legacy/plugins/infra/server/lib/snapshot/snapshot.ts
index 212a01d9e50bf..741293f61056e 100644
--- a/x-pack/legacy/plugins/infra/server/lib/snapshot/snapshot.ts
+++ b/x-pack/legacy/plugins/infra/server/lib/snapshot/snapshot.ts
@@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { idx } from '@kbn/elastic-idx';
import {
InfraSnapshotGroupbyInput,
InfraSnapshotMetricInput,
@@ -12,7 +13,11 @@ import {
InfraNodeType,
InfraSourceConfiguration,
} from '../../graphql/types';
-import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../adapters/framework';
+import {
+ InfraBackendFrameworkAdapter,
+ InfraFrameworkRequest,
+ InfraDatabaseSearchResponse,
+} from '../adapters/framework';
import { InfraSources } from '../sources';
import { JsonObject } from '../../../common/typed_json';
@@ -31,6 +36,8 @@ import {
InfraSnapshotNodeMetricsBucket,
} from './response_helpers';
import { IP_FIELDS } from '../constants';
+import { getAllCompositeData } from '../../utils/get_all_composite_data';
+import { createAfterKeyHandler } from '../../utils/create_afterkey_handler';
export interface InfraSnapshotRequestOptions {
nodeType: InfraNodeType;
@@ -63,6 +70,14 @@ export class InfraSnapshot {
}
}
+const bucketSelector = (
+ response: InfraDatabaseSearchResponse<{}, InfraSnapshotAggregationResponse>
+) => (response.aggregations && response.aggregations.nodes.buckets) || [];
+
+const handleAfterKey = createAfterKeyHandler('body.aggregations.nodes.composite.after', input =>
+ idx(input, _ => _.aggregations.nodes.after_key)
+);
+
const requestGroupedNodes = async (
request: InfraFrameworkRequest,
options: InfraSnapshotRequestOptions,
@@ -112,11 +127,10 @@ const requestGroupedNodes = async (
},
};
- return await getAllCompositeAggregationData(
- framework,
- request,
- query
- );
+ return await getAllCompositeData<
+ InfraSnapshotAggregationResponse,
+ InfraSnapshotNodeGroupByBucket
+ >(framework, request, query, bucketSelector, handleAfterKey);
};
const requestNodeMetrics = async (
@@ -174,12 +188,10 @@ const requestNodeMetrics = async (
},
},
};
-
- return await getAllCompositeAggregationData(
- framework,
- request,
- query
- );
+ return await getAllCompositeData<
+ InfraSnapshotAggregationResponse,
+ InfraSnapshotNodeMetricsBucket
+ >(framework, request, query, bucketSelector, handleAfterKey);
};
// buckets can be InfraSnapshotNodeGroupByBucket[] or InfraSnapshotNodeMetricsBucket[]
@@ -191,46 +203,6 @@ interface InfraSnapshotAggregationResponse {
};
}
-const getAllCompositeAggregationData = async (
- framework: InfraBackendFrameworkAdapter,
- request: InfraFrameworkRequest,
- query: any,
- previousBuckets: BucketType[] = []
-): Promise => {
- const response = await framework.callWithRequest<{}, InfraSnapshotAggregationResponse>(
- request,
- 'search',
- query
- );
-
- // Nothing available, return the previous buckets.
- if (response.hits.total.value === 0) {
- return previousBuckets;
- }
-
- // if ES doesn't return an aggregations key, something went seriously wrong.
- if (!response.aggregations) {
- throw new Error('Whoops!, `aggregations` key must always be returned.');
- }
-
- const currentBuckets = response.aggregations.nodes.buckets;
-
- // if there are no currentBuckets then we are finished paginating through the results
- if (currentBuckets.length === 0) {
- return previousBuckets;
- }
-
- // There is possibly more data, concat previous and current buckets and call ourselves recursively.
- const newQuery = { ...query };
- newQuery.body.aggregations.nodes.composite.after = response.aggregations.nodes.after_key;
- return getAllCompositeAggregationData(
- framework,
- request,
- query,
- previousBuckets.concat(currentBuckets)
- );
-};
-
const mergeNodeBuckets = (
nodeGroupByBuckets: InfraSnapshotNodeGroupByBucket[],
nodeMetricsBuckets: InfraSnapshotNodeMetricsBucket[],
diff --git a/x-pack/legacy/plugins/infra/server/utils/create_afterkey_handler.ts b/x-pack/legacy/plugins/infra/server/utils/create_afterkey_handler.ts
new file mode 100644
index 0000000000000..559fba0799987
--- /dev/null
+++ b/x-pack/legacy/plugins/infra/server/utils/create_afterkey_handler.ts
@@ -0,0 +1,21 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { set } from 'lodash';
+import { InfraDatabaseSearchResponse } from '../lib/adapters/framework';
+
+export const createAfterKeyHandler = (
+ optionsAfterKeyPath: string | string[],
+ afterKeySelector: (input: InfraDatabaseSearchResponse) => any
+) => (options: Options, response: InfraDatabaseSearchResponse): Options => {
+ if (!response.aggregations) {
+ return options;
+ }
+ const newOptions = { ...options };
+ const afterKey = afterKeySelector(response);
+ set(newOptions, optionsAfterKeyPath, afterKey);
+ return newOptions;
+};
diff --git a/x-pack/legacy/plugins/infra/server/utils/get_all_composite_data.ts b/x-pack/legacy/plugins/infra/server/utils/get_all_composite_data.ts
new file mode 100644
index 0000000000000..a5729b6004dcf
--- /dev/null
+++ b/x-pack/legacy/plugins/infra/server/utils/get_all_composite_data.ts
@@ -0,0 +1,54 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import {
+ InfraBackendFrameworkAdapter,
+ InfraFrameworkRequest,
+ InfraDatabaseSearchResponse,
+} from '../lib/adapters/framework';
+
+export const getAllCompositeData = async <
+ Aggregation = undefined,
+ Bucket = {},
+ Options extends object = {}
+>(
+ framework: InfraBackendFrameworkAdapter,
+ request: InfraFrameworkRequest,
+ options: Options,
+ bucketSelector: (response: InfraDatabaseSearchResponse<{}, Aggregation>) => Bucket[],
+ onAfterKey: (options: Options, response: InfraDatabaseSearchResponse<{}, Aggregation>) => Options,
+ previousBuckets: Bucket[] = []
+): Promise => {
+ const response = await framework.callWithRequest<{}, Aggregation>(request, 'search', options);
+
+ // Nothing available, return the previous buckets.
+ if (response.hits.total.value === 0) {
+ return previousBuckets;
+ }
+
+ // if ES doesn't return an aggregations key, something went seriously wrong.
+ if (!response.aggregations) {
+ throw new Error('Whoops!, `aggregations` key must always be returned.');
+ }
+
+ const currentBuckets = bucketSelector(response);
+
+ // if there are no currentBuckets then we are finished paginating through the results
+ if (currentBuckets.length === 0) {
+ return previousBuckets;
+ }
+
+ // There is possibly more data, concat previous and current buckets and call ourselves recursively.
+ const newOptions = onAfterKey(options, response);
+ return getAllCompositeData(
+ framework,
+ request,
+ newOptions,
+ bucketSelector,
+ onAfterKey,
+ previousBuckets.concat(currentBuckets)
+ );
+};
diff --git a/x-pack/legacy/plugins/maps/public/kibana_services.js b/x-pack/legacy/plugins/maps/public/kibana_services.js
index 8a980764db9a3..e2500d7331db6 100644
--- a/x-pack/legacy/plugins/maps/public/kibana_services.js
+++ b/x-pack/legacy/plugins/maps/public/kibana_services.js
@@ -4,17 +4,21 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { uiModules } from 'ui/modules';
-import { SearchSourceProvider } from 'ui/courier';
import { getRequestInspectorStats, getResponseInspectorStats } from 'ui/courier/utils/courier_inspector_utils';
export { xpackInfo } from 'plugins/xpack_main/services/xpack_info';
import { start as data } from '../../../../../src/legacy/core_plugins/data/public/legacy';
+export { SearchSource } from 'ui/courier';
export const indexPatternService = data.indexPatterns.indexPatterns;
-export let SearchSource;
-
-export async function fetchSearchSourceAndRecordWithInspector({ searchSource, requestId, requestName, requestDesc, inspectorAdapters }) {
+export async function fetchSearchSourceAndRecordWithInspector({
+ searchSource,
+ requestId,
+ requestName,
+ requestDesc,
+ inspectorAdapters,
+ abortSignal,
+}) {
const inspectorRequest = inspectorAdapters.requests.start(
requestName,
{ id: requestId, description: requestDesc });
@@ -24,7 +28,7 @@ export async function fetchSearchSourceAndRecordWithInspector({ searchSource, re
searchSource.getSearchRequestBody().then(body => {
inspectorRequest.json(body);
});
- resp = await searchSource.fetch();
+ resp = await searchSource.fetch({ abortSignal });
inspectorRequest
.stats(getResponseInspectorStats(searchSource, resp))
.ok({ json: resp });
@@ -35,8 +39,3 @@ export async function fetchSearchSourceAndRecordWithInspector({ searchSource, re
return resp;
}
-
-uiModules.get('app/maps').run(($injector) => {
- const Private = $injector.get('Private');
- SearchSource = Private(SearchSourceProvider);
-});
diff --git a/x-pack/legacy/plugins/maps/public/layers/sources/es_source.js b/x-pack/legacy/plugins/maps/public/layers/sources/es_source.js
index 730f640ed3c79..0670474df89bb 100644
--- a/x-pack/legacy/plugins/maps/public/layers/sources/es_source.js
+++ b/x-pack/legacy/plugins/maps/public/layers/sources/es_source.js
@@ -133,10 +133,8 @@ export class AbstractESSource extends AbstractVectorSource {
async _runEsQuery(requestName, searchSource, registerCancelCallback, requestDescription) {
- const cancel = () => {
- searchSource.cancelQueued();
- };
- registerCancelCallback(cancel);
+ const abortController = new AbortController();
+ registerCancelCallback(() => abortController.abort());
try {
return await fetchSearchSourceAndRecordWithInspector({
@@ -144,7 +142,8 @@ export class AbstractESSource extends AbstractVectorSource {
searchSource,
requestName,
requestId: this.getId(),
- requestDesc: requestDescription
+ requestDesc: requestDescription,
+ abortSignal: abortController.signal,
});
} catch(error) {
if (error.name === 'AbortError') {
diff --git a/x-pack/legacy/plugins/ml/public/contexts/kibana/__mocks__/saved_search.ts b/x-pack/legacy/plugins/ml/public/contexts/kibana/__mocks__/saved_search.ts
index 311e6688f7aa9..07979d7c1bd11 100644
--- a/x-pack/legacy/plugins/ml/public/contexts/kibana/__mocks__/saved_search.ts
+++ b/x-pack/legacy/plugins/ml/public/contexts/kibana/__mocks__/saved_search.ts
@@ -4,10 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { searchSourceMock } from '../../../../../../../../src/legacy/ui/public/courier/search_source/mocks';
+
export const savedSearchMock = {
id: 'the-saved-search-id',
title: 'the-saved-search-title',
- searchSource: {},
+ searchSource: searchSourceMock,
columns: [],
sort: [],
destroy: () => {},
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/analytics.ts b/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/analytics.ts
index 9f9091db02cce..f99f9661f12ef 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/analytics.ts
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/analytics.ts
@@ -25,6 +25,7 @@ interface RegressionAnalysis {
regression: {
dependent_variable: string;
training_percent?: number;
+ prediction_field_name?: string;
};
}
@@ -81,6 +82,15 @@ export const getDependentVar = (analysis: AnalysisConfig) => {
return depVar;
};
+export const getPredictionFieldName = (analysis: AnalysisConfig) => {
+ // If undefined will be defaulted to dependent_variable when config is created
+ let predictionFieldName;
+ if (isRegressionAnalysis(analysis) && analysis.regression.prediction_field_name !== undefined) {
+ predictionFieldName = analysis.regression.prediction_field_name;
+ }
+ return predictionFieldName;
+};
+
export const isOutlierAnalysis = (arg: any): arg is OutlierAnalysis => {
const keys = Object.keys(arg);
return keys.length === 1 && keys[0] === ANALYSIS_CONFIG_TYPE.OUTLIER_DETECTION;
@@ -189,26 +199,30 @@ export const loadEvalData = async ({
isTraining,
index,
dependentVariable,
+ resultsField,
+ predictionFieldName,
}: {
isTraining: boolean;
index: string;
dependentVariable: string;
+ resultsField: string;
+ predictionFieldName?: string;
}) => {
const results: LoadEvaluateResult = { success: false, eval: null, error: null };
+ const defaultPredictionField = `${dependentVariable}_prediction`;
+ const predictedField = `${resultsField}.${
+ predictionFieldName ? predictionFieldName : defaultPredictionField
+ }`;
+
+ const query = { term: { [`${resultsField}.is_training`]: { value: isTraining } } };
const config = {
index,
- query: {
- term: {
- 'ml.is_training': {
- value: isTraining,
- },
- },
- },
+ query,
evaluation: {
regression: {
actual_field: dependentVariable,
- predicted_field: `ml.${dependentVariable}_prediction`,
+ predicted_field: predictedField,
metrics: {
r_squared: {},
mean_squared_error: {},
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/index.ts b/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/index.ts
index 05f92548304c5..774db35f2a52b 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/index.ts
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/common/index.ts
@@ -7,6 +7,7 @@
export {
getAnalysisType,
getDependentVar,
+ getPredictionFieldName,
isOutlierAnalysis,
refreshAnalyticsList$,
useRefreshAnalyticsList,
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_exploration/components/regression_exploration/evaluate_panel.tsx b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_exploration/components/regression_exploration/evaluate_panel.tsx
index b12258500e6f0..c7ea3421ac5de 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_exploration/components/regression_exploration/evaluate_panel.tsx
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_exploration/components/regression_exploration/evaluate_panel.tsx
@@ -39,8 +39,15 @@ export const EvaluatePanel: FC = ({ jobId, index, dependentVariable }) =>
const loadData = async () => {
setIsLoadingGeneralization(true);
setIsLoadingTraining(true);
-
- const genErrorEval = await loadEvalData({ isTraining: false, index, dependentVariable });
+ // TODO: resultsField and predictionFieldName will need to be properly passed to this function
+ // once the results view is in use.
+ const genErrorEval = await loadEvalData({
+ isTraining: false,
+ index,
+ dependentVariable,
+ resultsField: 'ml',
+ predictionFieldName: undefined,
+ });
if (genErrorEval.success === true && genErrorEval.eval) {
const { meanSquaredError, rSquared } = getValuesFromResponse(genErrorEval.eval);
@@ -58,8 +65,15 @@ export const EvaluatePanel: FC = ({ jobId, index, dependentVariable }) =>
error: genErrorEval.error,
});
}
-
- const trainingErrorEval = await loadEvalData({ isTraining: true, index, dependentVariable });
+ // TODO: resultsField and predictionFieldName will need to be properly passed to this function
+ // once the results view is in use.
+ const trainingErrorEval = await loadEvalData({
+ isTraining: true,
+ index,
+ dependentVariable,
+ resultsField: 'ml',
+ predictionFieldName: undefined,
+ });
if (trainingErrorEval.success === true && trainingErrorEval.eval) {
const { meanSquaredError, rSquared } = getValuesFromResponse(trainingErrorEval.eval);
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/components/analytics_list/expanded_row.tsx b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/components/analytics_list/expanded_row.tsx
index 67bad23adacf6..bfa21e503d5aa 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/components/analytics_list/expanded_row.tsx
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/components/analytics_list/expanded_row.tsx
@@ -18,7 +18,13 @@ import { DataFrameAnalyticsListRow } from './common';
import { ExpandedRowDetailsPane, SectionConfig } from './expanded_row_details_pane';
import { ExpandedRowJsonPane } from './expanded_row_json_pane';
import { ProgressBar } from './progress_bar';
-import { getDependentVar, getValuesFromResponse, loadEvalData, Eval } from '../../../../common';
+import {
+ getDependentVar,
+ getPredictionFieldName,
+ getValuesFromResponse,
+ loadEvalData,
+ Eval,
+} from '../../../../common';
import { isCompletedAnalyticsJob } from './common';
import { isRegressionAnalysis } from '../../../../common/analytics';
// import { ExpandedRowMessagesPane } from './expanded_row_messages_pane';
@@ -60,6 +66,9 @@ export const ExpandedRow: FC = ({ item }) => {
const [isLoadingGeneralization, setIsLoadingGeneralization] = useState(false);
const index = idx(item, _ => _.config.dest.index) as string;
const dependentVariable = getDependentVar(item.config.analysis);
+ const predictionFieldName = getPredictionFieldName(item.config.analysis);
+ // default is 'ml'
+ const resultsField = item.config.dest.results_field;
const jobIsCompleted = isCompletedAnalyticsJob(item.stats);
const isRegressionJob = isRegressionAnalysis(item.config.analysis);
@@ -71,6 +80,8 @@ export const ExpandedRow: FC = ({ item }) => {
isTraining: false,
index,
dependentVariable,
+ resultsField,
+ predictionFieldName,
});
if (genErrorEval.success === true && genErrorEval.eval) {
@@ -94,6 +105,8 @@ export const ExpandedRow: FC = ({ item }) => {
isTraining: true,
index,
dependentVariable,
+ resultsField,
+ predictionFieldName,
});
if (trainingErrorEval.success === true && trainingErrorEval.eval) {
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.test.ts b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.test.ts
index 46cf833610073..da5a27f0e12e0 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.test.ts
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.test.ts
@@ -29,6 +29,7 @@ const getMockState = (index: SourceIndex) =>
jobConfig: {
source: { index },
dest: { index: 'the-destination-index' },
+ analysis: {},
},
});
diff --git a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts
index 605bb68346cf6..135f29adf1bae 100644
--- a/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts
+++ b/x-pack/legacy/plugins/ml/public/data_frame_analytics/pages/analytics_management/hooks/use_create_analytics_form/reducer.ts
@@ -12,10 +12,11 @@ import { validateIndexPattern } from 'ui/index_patterns';
import { isValidIndexName } from '../../../../../../common/util/es_utils';
import { Action, ACTION } from './actions';
-import { getInitialState, getJobConfigFromFormState, State } from './state';
+import { getInitialState, getJobConfigFromFormState, State, JOB_TYPES } from './state';
import { isJobIdValid } from '../../../../../../common/util/job_utils';
import { maxLengthValidator } from '../../../../../../common/util/validators';
import { JOB_ID_MAX_LENGTH } from '../../../../../../common/constants/validation';
+import { getDependentVar, isRegressionAnalysis } from '../../../../common/analytics';
const getSourceIndexString = (state: State) => {
const { jobConfig } = state;
@@ -34,7 +35,7 @@ const getSourceIndexString = (state: State) => {
};
export const validateAdvancedEditor = (state: State): State => {
- const { jobIdEmpty, jobIdValid, jobIdExists, createIndexPattern } = state.form;
+ const { jobIdEmpty, jobIdValid, jobIdExists, jobType, createIndexPattern } = state.form;
const { jobConfig } = state;
state.advancedEditorMessages = [];
@@ -64,6 +65,12 @@ export const validateAdvancedEditor = (state: State): State => {
name => destinationIndexName === name
);
+ let dependentVariableEmpty = false;
+ if (isRegressionAnalysis(jobConfig.analysis)) {
+ const dependentVariableName = getDependentVar(jobConfig.analysis) || '';
+ dependentVariableEmpty = jobType === JOB_TYPES.REGRESSION && dependentVariableName === '';
+ }
+
if (sourceIndexNameEmpty) {
state.advancedEditorMessages.push({
error: i18n.translate(
@@ -108,6 +115,18 @@ export const validateAdvancedEditor = (state: State): State => {
});
}
+ if (dependentVariableEmpty) {
+ state.advancedEditorMessages.push({
+ error: i18n.translate(
+ 'xpack.ml.dataframe.analytics.create.advancedEditorMessage.dependentVariableEmpty',
+ {
+ defaultMessage: 'The dependent variable field must not be empty.',
+ }
+ ),
+ message: '',
+ });
+ }
+
state.isValid =
!jobIdEmpty &&
jobIdValid &&
@@ -116,6 +135,7 @@ export const validateAdvancedEditor = (state: State): State => {
sourceIndexNameValid &&
!destinationIndexNameEmpty &&
destinationIndexNameValid &&
+ !dependentVariableEmpty &&
(!destinationIndexPatternTitleExists || !createIndexPattern);
return state;
@@ -126,14 +146,18 @@ const validateForm = (state: State): State => {
jobIdEmpty,
jobIdValid,
jobIdExists,
+ jobType,
sourceIndexNameEmpty,
sourceIndexNameValid,
destinationIndexNameEmpty,
destinationIndexNameValid,
destinationIndexPatternTitleExists,
createIndexPattern,
+ dependentVariable,
} = state.form;
+ const dependentVariableEmpty = jobType === JOB_TYPES.REGRESSION && dependentVariable === '';
+
state.isValid =
!jobIdEmpty &&
jobIdValid &&
@@ -142,6 +166,7 @@ const validateForm = (state: State): State => {
sourceIndexNameValid &&
!destinationIndexNameEmpty &&
destinationIndexNameValid &&
+ !dependentVariableEmpty &&
(!destinationIndexPatternTitleExists || !createIndexPattern);
return state;
diff --git a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/advanced_detector_modal.tsx b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/advanced_detector_modal.tsx
index 9b56c9d2cc858..97fb8c1d41fc5 100644
--- a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/advanced_detector_modal.tsx
+++ b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/advanced_detector_modal.tsx
@@ -87,6 +87,7 @@ export const AdvancedDetectorModal: FC = ({
);
const [descriptionOption, setDescriptionOption] = useState(detector.description || '');
const [fieldsEnabled, setFieldsEnabled] = useState(true);
+ const [excludeFrequentEnabled, setExcludeFrequentEnabled] = useState(true);
const [fieldOptionEnabled, setFieldOptionEnabled] = useState(true);
const { descriptionPlaceholder, setDescriptionPlaceholder } = useDetectorPlaceholder(detector);
@@ -117,6 +118,9 @@ export const AdvancedDetectorModal: FC = ({
useEffect(() => {
const agg = getAgg(aggOption.label);
let field = getField(fieldOption.label);
+ const byField = getField(byFieldOption.label);
+ const overField = getField(overFieldOption.label);
+ const partitionField = getField(partitionFieldOption.label);
if (agg !== null) {
setFieldsEnabled(true);
@@ -126,6 +130,8 @@ export const AdvancedDetectorModal: FC = ({
field = eventRateField;
} else {
setFieldOptionEnabled(true);
+ // only enable exclude frequent if there is a by or over selected
+ setExcludeFrequentEnabled(byField !== null || overField !== null);
}
} else {
setFieldsEnabled(false);
@@ -134,9 +140,9 @@ export const AdvancedDetectorModal: FC = ({
const dtr: RichDetector = {
agg,
field,
- byField: getField(byFieldOption.label),
- overField: getField(overFieldOption.label),
- partitionField: getField(partitionFieldOption.label),
+ byField,
+ overField,
+ partitionField,
excludeFrequent: excludeFrequentOption.label !== '' ? excludeFrequentOption.label : null,
description: descriptionOption !== '' ? descriptionOption : null,
};
@@ -157,9 +163,20 @@ export const AdvancedDetectorModal: FC = ({
setFieldsEnabled(aggOption.label !== '');
if (agg !== null) {
setFieldOptionEnabled(isFieldlessAgg(agg) === false);
+
+ const byField = getField(byFieldOption.label);
+ const overField = getField(overFieldOption.label);
+ setExcludeFrequentEnabled(byField !== null || overField !== null);
}
}, []);
+ useEffect(() => {
+ // wipe the exclude frequent choice if the select has been disabled
+ if (excludeFrequentEnabled === false) {
+ setExcludeFrequentOption(emptyOption);
+ }
+ }, [excludeFrequentEnabled]);
+
function onCreateClick() {
detectorChangeHandler(detector, payload.index);
}
@@ -245,7 +262,7 @@ export const AdvancedDetectorModal: FC = ({
selectedOptions={[excludeFrequentOption]}
onChange={onOptionChange(setExcludeFrequentOption)}
isClearable={true}
- isDisabled={fieldsEnabled === false}
+ isDisabled={fieldsEnabled === false || excludeFrequentEnabled === false}
/>
@@ -256,6 +273,7 @@ export const AdvancedDetectorModal: FC = ({
setDescriptionOption(e.target.value)}
diff --git a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/descriptions.tsx b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/descriptions.tsx
index 6bef006b83ef7..e7a306decfbef 100644
--- a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/descriptions.tsx
+++ b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/pick_fields_step/components/advanced_detector_modal/descriptions.tsx
@@ -168,6 +168,7 @@ export const DescriptionDescription: FC = memo(({ children }) => {
);
return (
{title}}
description={
@@ -177,7 +178,7 @@ export const DescriptionDescription: FC = memo(({ children }) => {
/>
}
>
-
+
<>{children}>
diff --git a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/summary_step/components/job_details/job_details.tsx b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/summary_step/components/job_details/job_details.tsx
index 885ba92e5e0de..dc0311e552bda 100644
--- a/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/summary_step/components/job_details/job_details.tsx
+++ b/x-pack/legacy/plugins/ml/public/jobs/new_job_new/pages/components/summary_step/components/job_details/job_details.tsx
@@ -6,6 +6,7 @@
import React, { FC, useContext } from 'react';
import { i18n } from '@kbn/i18n';
+import moment from 'moment';
import { FormattedMessage } from '@kbn/i18n/react';
import { EuiFlexGroup, EuiFlexItem, EuiDescriptionList } from '@elastic/eui';
import { JobCreatorContext } from '../../../job_creator_context';
@@ -16,11 +17,16 @@ import {
} from '../../../../../common/job_creator';
import { newJobDefaults } from '../../../../../utils/new_job_defaults';
import { ListItems, falseLabel, trueLabel, defaultLabel, Italic } from '../common';
+import { useKibanaContext } from '../../../../../../../contexts/kibana';
export const JobDetails: FC = () => {
const { jobCreator } = useContext(JobCreatorContext);
+ const kibanaContext = useKibanaContext();
+ const dateFormat: string = kibanaContext.kibanaConfig.get('dateFormat');
const { anomaly_detectors: anomalyDetectors } = newJobDefaults();
+ const isAdvanced = isAdvancedJobCreator(jobCreator);
+
const modelMemoryLimitDefault = anomalyDetectors.model_memory_limit || '';
const modelMemoryLimit =
jobCreator.modelMemoryLimit !== null ? (
@@ -117,7 +123,7 @@ export const JobDetails: FC = () => {
});
}
- if (isAdvancedJobCreator(jobCreator) && jobCreator.categorizationFieldName !== null) {
+ if (isAdvanced && jobCreator.categorizationFieldName !== null) {
detectorDetails.push({
title: i18n.translate(
'xpack.ml.newJob.wizard.summaryStep.jobDetails.categorizationField.title',
@@ -129,7 +135,7 @@ export const JobDetails: FC = () => {
});
}
- if (isAdvancedJobCreator(jobCreator) && jobCreator.summaryCountFieldName !== null) {
+ if (isAdvanced && jobCreator.summaryCountFieldName !== null) {
detectorDetails.push({
title: i18n.translate(
'xpack.ml.newJob.wizard.summaryStep.jobDetails.summaryCountField.title',
@@ -185,6 +191,21 @@ export const JobDetails: FC = () => {
},
];
+ const timeRangeDetails: ListItems[] = [
+ {
+ title: i18n.translate('xpack.ml.newJob.wizard.summaryStep.timeRange.start.title', {
+ defaultMessage: 'Start',
+ }),
+ description: moment(jobCreator.start).format(dateFormat),
+ },
+ {
+ title: i18n.translate('xpack.ml.newJob.wizard.summaryStep.timeRange.end.title', {
+ defaultMessage: 'End',
+ }),
+ description: moment(jobCreator.end).format(dateFormat),
+ },
+ ];
+
return (
@@ -196,6 +217,11 @@ export const JobDetails: FC = () => {
+ {isAdvanced === false && (
+
+
+
+ )}
);
};
diff --git a/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/anomaly_detection_panel.tsx b/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/anomaly_detection_panel.tsx
index 7a466be3b3c16..72e17c4d090f4 100644
--- a/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/anomaly_detection_panel.tsx
+++ b/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/anomaly_detection_panel.tsx
@@ -31,7 +31,7 @@ export interface Group {
docs_processed: number;
earliest_timestamp: number;
latest_timestamp: number;
- max_anomaly_score: number | null;
+ max_anomaly_score: number | undefined | null;
}
type MaxScoresByGroup = Dictionary<{
@@ -103,8 +103,9 @@ export const AnomalyDetectionPanel: FC = () => {
// Check results for each group's promise index and update state
Object.keys(scores).forEach(groupId => {
const resultsIndex = scores[groupId] && scores[groupId].index;
+ // maxScore will be null if it was not loaded correctly
const { maxScore } = resultsIndex !== undefined && results[resultsIndex];
- tempGroups[groupId].max_anomaly_score = maxScore || undefined;
+ tempGroups[groupId].max_anomaly_score = maxScore;
});
setGroups(tempGroups);
diff --git a/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/table.tsx b/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/table.tsx
index 853b33618181f..883726980fa94 100644
--- a/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/table.tsx
+++ b/x-pack/legacy/plugins/ml/public/overview/components/anomaly_detection_panel/table.tsx
@@ -90,11 +90,11 @@ export const AnomalyDetectionTable: FC = ({ items, jobsList, statsBarData
),
sortable: true,
render: (score: Group['max_anomaly_score']) => {
- if (score === null) {
+ if (score === undefined) {
// score is not loaded yet
return ;
- } else if (score === undefined) {
- // an error occurred for this group's score
+ } else if (score === null) {
+ // an error occurred loading this group's score
return (
= ({ items, jobsList, statsBarData
);
+ } else if (score === 0) {
+ return (
+ // @ts-ignore
+
+ {score}
+
+ );
} else {
const color: string = getSeverityColor(score);
return (
diff --git a/x-pack/legacy/plugins/ml/public/services/ml_api_service/index.d.ts b/x-pack/legacy/plugins/ml/public/services/ml_api_service/index.d.ts
index ec09b610f2518..2e7da6fb9cc69 100644
--- a/x-pack/legacy/plugins/ml/public/services/ml_api_service/index.d.ts
+++ b/x-pack/legacy/plugins/ml/public/services/ml_api_service/index.d.ts
@@ -103,7 +103,7 @@ declare interface Ml {
getVisualizerOverallStats(obj: object): Promise;
results: {
- getMaxAnomalyScore: (jobIds: string[], earliestMs: number, latestMs: number) => Promise; // THIS ONE IS RIGHT
+ getMaxAnomalyScore: (jobIds: string[], earliestMs: number, latestMs: number) => Promise;
};
jobs: {
diff --git a/x-pack/legacy/plugins/rollup/public/search/rollup_search_strategy.js b/x-pack/legacy/plugins/rollup/public/search/rollup_search_strategy.js
index abc0bc620b81a..ab24a37a2ecec 100644
--- a/x-pack/legacy/plugins/rollup/public/search/rollup_search_strategy.js
+++ b/x-pack/legacy/plugins/rollup/public/search/rollup_search_strategy.js
@@ -7,43 +7,8 @@
import { kfetch } from 'ui/kfetch';
import { SearchError, getSearchErrorType } from 'ui/courier';
-function getAllFetchParams(searchRequests, Promise) {
- return Promise.map(searchRequests, (searchRequest) => {
- return Promise.try(searchRequest.getFetchParams, void 0, searchRequest)
- .then((fetchParams) => {
- return (searchRequest.fetchParams = fetchParams);
- })
- .then(value => ({ resolved: value }))
- .catch(error => ({ rejected: error }));
- });
-}
-
-function serializeAllFetchParams(fetchParams, searchRequests) {
- const searchRequestsWithFetchParams = [];
- const failedSearchRequests = [];
-
- // Gather the fetch param responses from all the successful requests.
- fetchParams.forEach((result, index) => {
- if (result.resolved) {
- searchRequestsWithFetchParams.push(result.resolved);
- } else {
- const searchRequest = searchRequests[index];
-
- searchRequest.handleFailure(result.rejected);
- failedSearchRequests.push(searchRequest);
- }
- });
-
- const serializedFetchParams = serializeFetchParams(searchRequestsWithFetchParams);
-
- return {
- serializedFetchParams,
- failedSearchRequests,
- };
-}
-
-function serializeFetchParams(searchRequestsWithFetchParams) {
- return JSON.stringify(searchRequestsWithFetchParams.map(searchRequestWithFetchParams => {
+function serializeFetchParams(searchRequests) {
+ return JSON.stringify(searchRequests.map(searchRequestWithFetchParams => {
const indexPattern = searchRequestWithFetchParams.index.title || searchRequestWithFetchParams.index;
const {
body: {
@@ -84,16 +49,9 @@ function shimHitsInFetchResponse(response) {
export const rollupSearchStrategy = {
id: 'rollup',
- search: async ({ searchRequests, Promise }) => {
- // Flatten the searchSource within each searchRequest to get the fetch params,
- // e.g. body, filters, index pattern, query.
- const allFetchParams = await getAllFetchParams(searchRequests, Promise);
-
+ search: ({ searchRequests, Promise }) => {
// Serialize the fetch params into a format suitable for the body of an ES query.
- const {
- serializedFetchParams,
- failedSearchRequests,
- } = await serializeAllFetchParams(allFetchParams, searchRequests);
+ const serializedFetchParams = serializeFetchParams(searchRequests);
const controller = new AbortController();
const promise = kfetch({
@@ -124,7 +82,6 @@ export const rollupSearchStrategy = {
return Promise.reject(searchError);
}),
abort: () => controller.abort(),
- failedSearchRequests,
};
},
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/index.ts b/x-pack/legacy/plugins/siem/public/components/link_to/index.ts
index cbd01b720295f..7eb39de3d96b4 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/index.ts
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/index.ts
@@ -6,6 +6,6 @@
export { LinkToPage } from './link_to';
export { getOverviewUrl, RedirectToOverviewPage } from './redirect_to_overview';
-export { getHostsUrl } from './redirect_to_hosts';
-export { getNetworkUrl, RedirectToNetworkPage } from './redirect_to_network';
+export { getHostsUrl, getHostDetailsUrl } from './redirect_to_hosts';
+export { getNetworkUrl, getIPDetailsUrl, RedirectToNetworkPage } from './redirect_to_network';
export { getTimelinesUrl, RedirectToTimelinesPage } from './redirect_to_timelines';
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/link_to.tsx b/x-pack/legacy/plugins/siem/public/components/link_to/link_to.tsx
index 1fdae4ba9b301..0360c1004f151 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/link_to.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/link_to.tsx
@@ -13,33 +13,49 @@ import { RedirectToNetworkPage } from './redirect_to_network';
import { RedirectToOverviewPage } from './redirect_to_overview';
import { RedirectToTimelinesPage } from './redirect_to_timelines';
import { HostsTableType } from '../../store/hosts/model';
+import { SiemPageName } from '../../pages/home/types';
+
interface LinkToPageProps {
match: RouteMatch<{}>;
}
export const LinkToPage = pure(({ match }) => (
-
-
+
+
-
-
+
+
));
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_hosts.tsx b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_hosts.tsx
index ee4ff75595c66..05139320b171d 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_hosts.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_hosts.tsx
@@ -9,6 +9,7 @@ import { RouteComponentProps } from 'react-router-dom';
import { RedirectWrapper } from './redirect_wrapper';
import { HostsTableType } from '../../store/hosts/model';
+import { SiemPageName } from '../../pages/home/types';
export type HostComponentProps = RouteComponentProps<{
detailName: string;
@@ -24,7 +25,7 @@ export const RedirectToHostsPage = ({
}: HostComponentProps) => {
const defaultSelectedTab = HostsTableType.hosts;
const selectedTab = tabName ? tabName : defaultSelectedTab;
- const to = `/hosts/${selectedTab}${search}`;
+ const to = `/${SiemPageName.hosts}/${selectedTab}${search}`;
return ;
};
@@ -37,16 +38,18 @@ export const RedirectToHostDetailsPage = ({
}: HostComponentProps) => {
const defaultSelectedTab = HostsTableType.authentications;
const selectedTab = tabName ? tabName : defaultSelectedTab;
- const to = `/hosts/${detailName}/${selectedTab}${search}`;
+ const to = `/${SiemPageName.hosts}/${detailName}/${selectedTab}${search}`;
return ;
};
-export const getHostsUrl = () => '#/link-to/hosts';
+const baseHostsUrl = `#/link-to/${SiemPageName.hosts}`;
-export const getTabsOnHostsUrl = (tabName: HostsTableType) => `#/link-to/hosts/${tabName}`;
+export const getHostsUrl = () => baseHostsUrl;
-export const getHostDetailsUrl = (detailName: string) => `#/link-to/hosts/${detailName}`;
+export const getTabsOnHostsUrl = (tabName: HostsTableType) => `${baseHostsUrl}/${tabName}`;
+
+export const getHostDetailsUrl = (detailName: string) => `${baseHostsUrl}/${detailName}`;
export const getTabsOnHostDetailsUrl = (detailName: string, tabName: HostsTableType) => {
- return `#/link-to/hosts/${detailName}/${tabName}`;
+ return `${baseHostsUrl}/${detailName}/${tabName}`;
};
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_network.tsx b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_network.tsx
index 50b23486d42a2..4a1fa3b6d3b38 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_network.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_network.tsx
@@ -8,6 +8,7 @@ import React from 'react';
import { RouteComponentProps } from 'react-router-dom';
import { RedirectWrapper } from './redirect_wrapper';
+import { SiemPageName } from '../../pages/home/types';
export type NetworkComponentProps = RouteComponentProps<{
detailName: string;
@@ -20,7 +21,15 @@ export const RedirectToNetworkPage = ({
},
location: { search },
}: NetworkComponentProps) => (
-
+
);
-export const getNetworkUrl = () => '#/link-to/network';
+const baseNetworkUrl = `#/link-to/${SiemPageName.network}`;
+export const getNetworkUrl = () => baseNetworkUrl;
+export const getIPDetailsUrl = (detailName: string) => `${baseNetworkUrl}/ip/${detailName}`;
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_overview.tsx b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_overview.tsx
index 0dd706aafaddc..c1168a44aa6be 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_overview.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_overview.tsx
@@ -7,13 +7,14 @@
import React from 'react';
import { RouteComponentProps } from 'react-router';
import { RedirectWrapper } from './redirect_wrapper';
+import { SiemPageName } from '../../pages/home/types';
export type OverviewComponentProps = RouteComponentProps<{
search: string;
}>;
export const RedirectToOverviewPage = ({ location: { search } }: OverviewComponentProps) => (
-
+
);
-export const getOverviewUrl = () => '#/link-to/overview';
+export const getOverviewUrl = () => `#/link-to/${SiemPageName.overview}`;
diff --git a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_timelines.tsx b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_timelines.tsx
index fc44b71570476..153166f00a0c0 100644
--- a/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_timelines.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/link_to/redirect_to_timelines.tsx
@@ -7,15 +7,14 @@
import React from 'react';
import { RouteComponentProps } from 'react-router';
import { RedirectWrapper } from './redirect_wrapper';
+import { SiemPageName } from '../../pages/home/types';
export type TimelineComponentProps = RouteComponentProps<{
search: string;
}>;
-export const TIMELINES_PAGE_NAME = 'timelines';
-
export const RedirectToTimelinesPage = ({ location: { search } }: TimelineComponentProps) => (
-
+
);
-export const getTimelinesUrl = () => `#/link-to/${TIMELINES_PAGE_NAME}`;
+export const getTimelinesUrl = () => `#/link-to/${SiemPageName.timelines}`;
diff --git a/x-pack/legacy/plugins/siem/public/components/links/index.tsx b/x-pack/legacy/plugins/siem/public/components/links/index.tsx
index d9e98b4f11662..e260e40c41ac5 100644
--- a/x-pack/legacy/plugins/siem/public/components/links/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/links/index.tsx
@@ -9,11 +9,12 @@ import * as React from 'react';
import { pure } from 'recompose';
import { encodeIpv6 } from '../../lib/helpers';
+import { getHostDetailsUrl, getIPDetailsUrl } from '../link_to';
// Internal Links
export const HostDetailsLink = pure<{ children?: React.ReactNode; hostName: string }>(
({ children, hostName }) => (
-
+
{children ? children : hostName}
)
@@ -23,7 +24,7 @@ HostDetailsLink.displayName = 'HostDetailsLink';
export const IPDetailsLink = pure<{ children?: React.ReactNode; ip: string }>(
({ children, ip }) => (
-
+
{children ? children : ip}
)
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.test.ts b/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.test.ts
deleted file mode 100644
index f9d648368daf6..0000000000000
--- a/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.test.ts
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { cloneDeep } from 'lodash/fp';
-import { getSizeFromAnomalies } from './get_size_from_anomalies';
-import { mockAnomalies } from '../mock';
-
-describe('get_size_from_anomalies', () => {
- let anomalies = cloneDeep(mockAnomalies);
-
- beforeEach(() => {
- anomalies = cloneDeep(mockAnomalies);
- });
-
- test('returns 0 if anomalies is null', () => {
- const size = getSizeFromAnomalies(null);
- expect(size).toEqual(0);
- });
-
- test('returns anomalies length', () => {
- const size = getSizeFromAnomalies(anomalies);
- expect(size).toEqual(2);
- });
-});
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.ts b/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.ts
deleted file mode 100644
index e77a16c7a7f76..0000000000000
--- a/x-pack/legacy/plugins/siem/public/components/ml/anomaly/get_size_from_anomalies.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { Anomalies } from '../types';
-
-export const getSizeFromAnomalies = (anomalies: Anomalies | null): number => {
- if (anomalies == null) {
- return 0;
- } else {
- return anomalies.anomalies.length;
- }
-};
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_host_conditional_container.tsx b/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_host_conditional_container.tsx
index 1ff89d7ffe625..8bd97304a7e21 100644
--- a/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_host_conditional_container.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_host_conditional_container.tsx
@@ -11,6 +11,8 @@ import { QueryString } from 'ui/utils/query_string';
import { addEntitiesToKql } from './add_entities_to_kql';
import { replaceKQLParts } from './replace_kql_parts';
import { emptyEntity, multipleEntities, getMultipleEntities } from './entity_helpers';
+import { SiemPageName } from '../../../pages/home/types';
+import { HostsTableType } from '../../../store/hosts/model';
interface QueryStringType {
'?_g': string;
@@ -34,7 +36,7 @@ export const MlHostConditionalContainer = React.memo(({
queryStringDecoded.query = replaceKQLParts(queryStringDecoded.query);
}
const reEncoded = QueryString.encode(queryStringDecoded);
- return ;
+ return ;
}}
/>
(({
}
if (emptyEntity(hostName)) {
const reEncoded = QueryString.encode(queryStringDecoded);
- return ;
+ return (
+
+ );
} else if (multipleEntities(hostName)) {
const hosts: string[] = getMultipleEntities(hostName);
queryStringDecoded.query = addEntitiesToKql(
@@ -62,10 +66,16 @@ export const MlHostConditionalContainer = React.memo(({
queryStringDecoded.query || ''
);
const reEncoded = QueryString.encode(queryStringDecoded);
- return ;
+ return (
+
+ );
} else {
const reEncoded = QueryString.encode(queryStringDecoded);
- return ;
+ return (
+
+ );
}
}}
/>
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_network_conditional_container.tsx b/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_network_conditional_container.tsx
index cf9abfe5c46b6..8778818829cf7 100644
--- a/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_network_conditional_container.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/ml/conditional_links/ml_network_conditional_container.tsx
@@ -11,6 +11,7 @@ import { QueryString } from 'ui/utils/query_string';
import { addEntitiesToKql } from './add_entities_to_kql';
import { replaceKQLParts } from './replace_kql_parts';
import { emptyEntity, getMultipleEntities, multipleEntities } from './entity_helpers';
+import { SiemPageName } from '../../../pages/home/types';
interface QueryStringType {
'?_g': string;
@@ -34,7 +35,7 @@ export const MlNetworkConditionalContainer = React.memo ;
+ return ;
}}
/>
;
+ return ;
} else if (multipleEntities(ip)) {
const ips: string[] = getMultipleEntities(ip);
queryStringDecoded.query = addEntitiesToKql(
@@ -62,10 +63,10 @@ export const MlNetworkConditionalContainer = React.memo ;
+ return ;
} else {
const reEncoded = QueryString.encode(queryStringDecoded);
- return ;
+ return ;
}
}}
/>
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_host_table.tsx b/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_host_table.tsx
index 29554aff3027c..327967e8fed82 100644
--- a/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_host_table.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_host_table.tsx
@@ -14,7 +14,6 @@ import { getAnomaliesHostTableColumnsCurated } from './get_anomalies_host_table_
import { convertAnomaliesToHosts } from './convert_anomalies_to_hosts';
import { Loader } from '../../loader';
import { getIntervalFromAnomalies } from '../anomaly/get_interval_from_anomalies';
-import { getSizeFromAnomalies } from '../anomaly/get_size_from_anomalies';
import { AnomaliesHostTableProps } from '../types';
import { hasMlUserPermissions } from '../permissions/has_ml_user_permissions';
import { MlCapabilitiesContext } from '../permissions/ml_capabilities_provider';
@@ -51,9 +50,9 @@ export const AnomaliesHostTable = React.memo(
narrowDateRange
);
const pagination = {
- pageIndex: 0,
- pageSize: 10,
- totalItemCount: getSizeFromAnomalies(tableData),
+ initialPageIndex: 0,
+ initialPageSize: 10,
+ totalItemCount: hosts.length,
pageSizeOptions: [5, 10, 20, 50],
hidePerPageOptions: false,
};
diff --git a/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_network_table.tsx b/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_network_table.tsx
index 109019fbde280..f5f2e81cee201 100644
--- a/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_network_table.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/ml/tables/anomalies_network_table.tsx
@@ -14,7 +14,6 @@ import { Loader } from '../../loader';
import { AnomaliesNetworkTableProps } from '../types';
import { getAnomaliesNetworkTableColumnsCurated } from './get_anomalies_network_table_columns';
import { getIntervalFromAnomalies } from '../anomaly/get_interval_from_anomalies';
-import { getSizeFromAnomalies } from '../anomaly/get_size_from_anomalies';
import { hasMlUserPermissions } from '../permissions/has_ml_user_permissions';
import { MlCapabilitiesContext } from '../permissions/ml_capabilities_provider';
import { BasicTable } from './basic_table';
@@ -49,9 +48,9 @@ export const AnomaliesNetworkTable = React.memo(
narrowDateRange
);
const pagination = {
- pageIndex: 0,
- pageSize: 10,
- totalItemCount: getSizeFromAnomalies(tableData),
+ initialPageIndex: 0,
+ initialPageSize: 10,
+ totalItemCount: networks.length,
pageSizeOptions: [5, 10, 20, 50],
hidePerPageOptions: false,
};
diff --git a/x-pack/legacy/plugins/siem/public/components/navigation/breadcrumbs/index.ts b/x-pack/legacy/plugins/siem/public/components/navigation/breadcrumbs/index.ts
index be185de965f5e..8d9ebb964ce63 100644
--- a/x-pack/legacy/plugins/siem/public/components/navigation/breadcrumbs/index.ts
+++ b/x-pack/legacy/plugins/siem/public/components/navigation/breadcrumbs/index.ts
@@ -10,7 +10,7 @@ import { getOr } from 'lodash/fp';
import { APP_NAME } from '../../../../common/constants';
import { getBreadcrumbs as getHostDetailsBreadcrumbs } from '../../../pages/hosts/details/utils';
import { getBreadcrumbs as getIPDetailsBreadcrumbs } from '../../../pages/network/ip_details';
-import { SiemPageName } from '../../../pages/home/home_navigations';
+import { SiemPageName } from '../../../pages/home/types';
import { RouteSpyState } from '../../../utils/route/types';
import { getOverviewUrl } from '../../link_to';
diff --git a/x-pack/legacy/plugins/siem/public/components/navigation/tab_navigation/index.test.tsx b/x-pack/legacy/plugins/siem/public/components/navigation/tab_navigation/index.test.tsx
index ac4b78c5b61f5..f58519dc0e4c5 100644
--- a/x-pack/legacy/plugins/siem/public/components/navigation/tab_navigation/index.test.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/navigation/tab_navigation/index.test.tsx
@@ -9,7 +9,8 @@ import * as React from 'react';
import { TabNavigation } from './';
import { TabNavigationProps } from './types';
-import { navTabs, SiemPageName } from '../../../pages/home/home_navigations';
+import { navTabs } from '../../../pages/home/home_navigations';
+import { SiemPageName } from '../../../pages/home/types';
import { HostsTableType } from '../../../store/hosts/model';
import { navTabsHostDetails } from '../../../pages/hosts/details/nav_tabs';
import { CONSTANTS } from '../../url_state/constants';
diff --git a/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/columns.tsx b/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/columns.tsx
index 290891def3da8..abb57c7d7f55e 100644
--- a/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/columns.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/columns.tsx
@@ -141,7 +141,10 @@ export const getNetworkTopCountriesColumns = (
{
align: 'right',
field: `node.${flowTarget}.${getOppositeField(flowTarget)}_ips`,
- name: flowTarget === FlowTargetSourceDest.source ? i18n.SOURCE_IPS : i18n.DESTINATION_IPS,
+ name:
+ getOppositeField(flowTarget) === FlowTargetSourceDest.source
+ ? i18n.SOURCE_IPS
+ : i18n.DESTINATION_IPS,
sortable: true,
render: ips => {
if (ips != null) {
diff --git a/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/translations.ts b/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/translations.ts
index 2fad2687daf1e..70450f08a7d92 100644
--- a/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/translations.ts
+++ b/x-pack/legacy/plugins/siem/public/components/page/network/network_top_countries_table/translations.ts
@@ -34,14 +34,14 @@ export const FLOWS = i18n.translate('xpack.siem.networkTopCountriesTable.column.
export const DESTINATION_COUNTRIES = i18n.translate(
'xpack.siem.networkTopCountriesTable.heading.destinationCountries',
{
- defaultMessage: 'Top Destination Countries',
+ defaultMessage: 'Destination Countries',
}
);
export const SOURCE_COUNTRIES = i18n.translate(
'xpack.siem.networkTopCountriesTable.heading.sourceCountries',
{
- defaultMessage: 'Top Source Countries',
+ defaultMessage: 'Source Countries',
}
);
diff --git a/x-pack/legacy/plugins/siem/public/components/page/overview/overview_host/index.tsx b/x-pack/legacy/plugins/siem/public/components/page/overview/overview_host/index.tsx
index bb96353c862c8..3d4a2bc31f2fc 100644
--- a/x-pack/legacy/plugins/siem/public/components/page/overview/overview_host/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/page/overview/overview_host/index.tsx
@@ -17,6 +17,7 @@ import {
} from '../../../../containers/overview/overview_host';
import { inputsModel } from '../../../../store/inputs';
import { OverviewHostStats } from '../overview_host_stats';
+import { getHostsUrl } from '../../../link_to';
export interface OwnProps {
startDate: number;
@@ -55,7 +56,7 @@ export const OverviewHost = pure(({ endDate, startDate, setQu
}
>
-
+
diff --git a/x-pack/legacy/plugins/siem/public/components/page/overview/overview_network/index.tsx b/x-pack/legacy/plugins/siem/public/components/page/overview/overview_network/index.tsx
index c27395d157ebf..c1629a50341db 100644
--- a/x-pack/legacy/plugins/siem/public/components/page/overview/overview_network/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/components/page/overview/overview_network/index.tsx
@@ -17,6 +17,7 @@ import {
} from '../../../../containers/overview/overview_network';
import { inputsModel } from '../../../../store/inputs';
import { OverviewNetworkStats } from '../overview_network_stats';
+import { getNetworkUrl } from '../../../link_to';
export interface OwnProps {
startDate: number;
@@ -58,7 +59,7 @@ export const OverviewNetwork = pure(({ endDate, startDate, setQuery })
/>
}
>
-
+
;
export const navTabs: SiemNavTab = {
[SiemPageName.overview]: {
diff --git a/x-pack/legacy/plugins/siem/public/pages/home/index.tsx b/x-pack/legacy/plugins/siem/public/pages/home/index.tsx
index 94a1fcd203bfc..037c900568299 100644
--- a/x-pack/legacy/plugins/siem/public/pages/home/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/home/index.tsx
@@ -10,6 +10,7 @@ import * as React from 'react';
import { Redirect, Route, Switch } from 'react-router-dom';
import { pure } from 'recompose';
import styled from 'styled-components';
+import { i18n } from '@kbn/i18n';
import { AutoSizer } from '../../components/auto_sizer';
import { DragDropContextWrapper } from '../../components/drag_and_drop/drag_drop_context_wrapper';
@@ -29,6 +30,7 @@ import { MlPopover } from '../../components/ml_popover/ml_popover';
import { MlHostConditionalContainer } from '../../components/ml/conditional_links/ml_host_conditional_container';
import { MlNetworkConditionalContainer } from '../../components/ml/conditional_links/ml_network_conditional_container';
import { navTabs } from './home_navigations';
+import { SiemPageName } from './types';
import { UseUrlState } from '../../components/url_state';
import { SpyRoute } from '../../utils/route/spy_routes';
@@ -103,7 +105,11 @@ export const HomePage = pure(() => (
-
+
@@ -137,21 +143,27 @@ export const HomePage = pure(() => (
-
- } />
+
+ }
+ />
(
)}
/>
(
)}
/>
- } />
+ }
+ />
;
diff --git a/x-pack/legacy/plugins/siem/public/pages/hosts/details/nav_tabs.tsx b/x-pack/legacy/plugins/siem/public/pages/hosts/details/nav_tabs.tsx
index 6072b51254fb1..9867e745c3190 100644
--- a/x-pack/legacy/plugins/siem/public/pages/hosts/details/nav_tabs.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/hosts/details/nav_tabs.tsx
@@ -8,9 +8,10 @@ import { omit } from 'lodash/fp';
import * as i18n from './../translations';
import { HostDetailsNavTab } from './types';
import { HostsTableType } from '../../../store/hosts/model';
+import { SiemPageName } from '../../home/types';
const getTabsOnHostDetailsUrl = (hostName: string, tabName: HostsTableType) =>
- `#/hosts/${hostName}/${tabName}`;
+ `#/${SiemPageName.hosts}/${hostName}/${tabName}`;
export const navTabsHostDetails = (
hostName: string,
diff --git a/x-pack/legacy/plugins/siem/public/pages/hosts/index.tsx b/x-pack/legacy/plugins/siem/public/pages/hosts/index.tsx
index e578809131405..6596d4c65c00e 100644
--- a/x-pack/legacy/plugins/siem/public/pages/hosts/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/hosts/index.tsx
@@ -18,9 +18,10 @@ import {
import { HostsBody } from './hosts_body';
import { HostsTableType } from '../../store/hosts/model';
import { GlobalTime } from '../../containers/global_time';
+import { SiemPageName } from '../home/types';
import { Hosts } from './hosts';
-const hostsPagePath = `/:pageName(hosts)`;
+const hostsPagePath = `/:pageName(${SiemPageName.hosts})`;
const getHostsTabPath = (pagePath: string) =>
`${pagePath}/:tabName(` +
@@ -236,9 +237,12 @@ export const HostsContainer = React.memo(({ url }) => (
)}
/>
(
-
+
)}
/>
diff --git a/x-pack/legacy/plugins/siem/public/pages/hosts/nav_tabs.tsx b/x-pack/legacy/plugins/siem/public/pages/hosts/nav_tabs.tsx
index 8ba33f796a577..0756efe1f9b6e 100644
--- a/x-pack/legacy/plugins/siem/public/pages/hosts/nav_tabs.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/hosts/nav_tabs.tsx
@@ -8,8 +8,9 @@ import { omit } from 'lodash/fp';
import * as i18n from './translations';
import { HostsTableType } from '../../store/hosts/model';
import { HostsNavTab } from './navigation/types';
+import { SiemPageName } from '../home/types';
-const getTabsOnHostsUrl = (tabName: HostsTableType) => `#/hosts/${tabName}`;
+const getTabsOnHostsUrl = (tabName: HostsTableType) => `#/${SiemPageName.hosts}/${tabName}`;
export const navTabsHosts = (hasMlUserPermissions: boolean): HostsNavTab => {
const hostsNavTabs = {
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/index.tsx b/x-pack/legacy/plugins/siem/public/pages/network/index.tsx
index 558e2d89d04e5..009a89bb3889f 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/index.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/network/index.tsx
@@ -13,12 +13,13 @@ import { hasMlUserPermissions } from '../../components/ml/permissions/has_ml_use
import { IPDetails } from './ip_details';
import { Network } from './network';
import { GlobalTime } from '../../containers/global_time';
+import { SiemPageName } from '../home/types';
import { getNetworkRoutePath } from './navigation';
import { NetworkRouteType } from './navigation/types';
type Props = Partial> & { url: string };
-const networkPagePath = `/:pageName(network)`;
+const networkPagePath = `/:pageName(${SiemPageName.network})`;
const ipDetailsPagePath = `${networkPagePath}/ip/:detailName`;
export const NetworkContainer = React.memo(() => {
@@ -66,9 +67,9 @@ export const NetworkContainer = React.memo(() => {
)}
/>
(
-
+
)}
/>
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details.tsx
deleted file mode 100644
index ef3af241a2267..0000000000000
--- a/x-pack/legacy/plugins/siem/public/pages/network/ip_details.tsx
+++ /dev/null
@@ -1,468 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { EuiHorizontalRule, EuiSpacer, EuiFlexItem } from '@elastic/eui';
-import { getOr } from 'lodash/fp';
-import React, { useEffect } from 'react';
-import { connect } from 'react-redux';
-import { StickyContainer } from 'react-sticky';
-import { Breadcrumb } from 'ui/chrome';
-
-import { FiltersGlobal } from '../../components/filters_global';
-import { HeaderPage } from '../../components/header_page';
-import { LastEventTime } from '../../components/last_event_time';
-import { getNetworkUrl } from '../../components/link_to/redirect_to_network';
-import { AnomalyTableProvider } from '../../components/ml/anomaly/anomaly_table_provider';
-import { networkToCriteria } from '../../components/ml/criteria/network_to_criteria';
-import { scoreIntervalToDateTime } from '../../components/ml/score/score_interval_to_datetime';
-import { AnomaliesNetworkTable } from '../../components/ml/tables/anomalies_network_table';
-import { manageQuery } from '../../components/page/manage_query';
-import { FlowTargetSelectConnected } from '../../components/page/network/flow_target_select_connected';
-import { IpOverview } from '../../components/page/network/ip_overview';
-import { NetworkTopNFlowTable } from '../../components/page/network/network_top_n_flow_table';
-import { TlsTable } from '../../components/page/network/tls_table';
-import { UsersTable } from '../../components/page/network/users_table';
-import { SiemSearchBar } from '../../components/search_bar';
-import { IpOverviewQuery } from '../../containers/ip_overview';
-import { NetworkTopNFlowQuery } from '../../containers/network_top_n_flow';
-import { indicesExistOrDataTemporarilyUnavailable, WithSource } from '../../containers/source';
-import { TlsQuery } from '../../containers/tls';
-import { UsersQuery } from '../../containers/users';
-import { FlowTargetSourceDest, LastEventIndexKey } from '../../graphql/types';
-import { decodeIpv6 } from '../../lib/helpers';
-import { convertToBuildEsQuery } from '../../lib/keury';
-import { ConditionalFlexGroup } from '../../pages/network/navigation/conditional_flex_group';
-import { networkModel, networkSelectors, State, inputsSelectors } from '../../store';
-import { setAbsoluteRangeDatePicker as dispatchAbsoluteRangeDatePicker } from '../../store/inputs/actions';
-import { setIpDetailsTablesActivePageToZero as dispatchIpDetailsTablesActivePageToZero } from '../../store/network/actions';
-import { SpyRoute } from '../../utils/route/spy_routes';
-import { NetworkEmptyPage } from './network_empty_page';
-import { NetworkTopCountriesQuery } from '../../containers/network_top_countries';
-import { NetworkTopCountriesTable } from '../../components/page/network/network_top_countries_table';
-import * as i18n from './translations';
-import { IPDetailsComponentProps } from './types';
-
-const TlsTableManage = manageQuery(TlsTable);
-const UsersTableManage = manageQuery(UsersTable);
-const IpOverviewManage = manageQuery(IpOverview);
-const NetworkTopNFlowTableManage = manageQuery(NetworkTopNFlowTable);
-const NetworkTopCountriesTableManage = manageQuery(NetworkTopCountriesTable);
-
-export const IPDetailsComponent = React.memo(
- ({
- detailName,
- filters,
- flowTarget,
- from,
- isInitializing,
- query,
- setAbsoluteRangeDatePicker,
- setIpDetailsTablesActivePageToZero,
- setQuery,
- to,
- }) => {
- useEffect(() => {
- setIpDetailsTablesActivePageToZero(null);
- }, [detailName]);
- return (
- <>
-
- {({ indicesExist, indexPattern }) => {
- const ip = decodeIpv6(detailName);
- const filterQuery = convertToBuildEsQuery({
- indexPattern,
- queries: [query],
- filters,
- });
- return indicesExistOrDataTemporarilyUnavailable(indicesExist) ? (
-
-
-
-
-
- }
- title={ip}
- draggableArguments={{ field: `${flowTarget}.ip`, value: ip }}
- >
-
-
-
-
- {({ id, inspect, ipOverviewData, loading, refetch }) => (
-
- {({ isLoadingAnomaliesData, anomaliesData }) => (
- {
- const fromTo = scoreIntervalToDateTime(score, interval);
- setAbsoluteRangeDatePicker({
- id: 'global',
- from: fromTo.from,
- to: fromTo.to,
- });
- }}
- />
- )}
-
- )}
-
-
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- loading,
- loadPage,
- networkTopNFlow,
- pageInfo,
- refetch,
- totalCount,
- }) => (
-
- )}
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- loading,
- loadPage,
- networkTopNFlow,
- pageInfo,
- refetch,
- totalCount,
- }) => (
-
- )}
-
-
-
-
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- loading,
- loadPage,
- networkTopCountries,
- pageInfo,
- refetch,
- totalCount,
- }) => (
-
- )}
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- loading,
- loadPage,
- networkTopCountries,
- pageInfo,
- refetch,
- totalCount,
- }) => (
-
- )}
-
-
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- users,
- totalCount,
- pageInfo,
- loading,
- loadPage,
- refetch,
- }) => (
-
- )}
-
-
-
-
-
- {({
- id,
- inspect,
- isInspected,
- tls,
- totalCount,
- pageInfo,
- loading,
- loadPage,
- refetch,
- }) => (
-
- )}
-
-
-
-
- {
- const fromTo = scoreIntervalToDateTime(score, interval);
- setAbsoluteRangeDatePicker({
- id: 'global',
- from: fromTo.from,
- to: fromTo.to,
- });
- }}
- />
-
- ) : (
- <>
-
-
-
- >
- );
- }}
-
-
- >
- );
- }
-);
-
-IPDetailsComponent.displayName = 'IPDetailsComponent';
-
-const makeMapStateToProps = () => {
- const getGlobalQuerySelector = inputsSelectors.globalQuerySelector();
- const getGlobalFiltersQuerySelector = inputsSelectors.globalFiltersQuerySelector();
- const getIpDetailsFlowTargetSelector = networkSelectors.ipDetailsFlowTargetSelector();
- return (state: State) => ({
- query: getGlobalQuerySelector(state),
- filters: getGlobalFiltersQuerySelector(state),
- flowTarget: getIpDetailsFlowTargetSelector(state),
- });
-};
-
-export const IPDetails = connect(
- makeMapStateToProps,
- {
- setAbsoluteRangeDatePicker: dispatchAbsoluteRangeDatePicker,
- setIpDetailsTablesActivePageToZero: dispatchIpDetailsTablesActivePageToZero,
- }
-)(IPDetailsComponent);
-
-export const getBreadcrumbs = (ip: string | undefined, search: string[]): Breadcrumb[] => {
- const breadcrumbs = [
- {
- text: i18n.PAGE_TITLE,
- href: `${getNetworkUrl()}${search && search[0] ? search[0] : ''}`,
- },
- ];
- if (ip) {
- return [
- ...breadcrumbs,
- {
- text: decodeIpv6(ip),
- href: '',
- },
- ];
- } else {
- return breadcrumbs;
- }
-};
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/__snapshots__/ip_details.test.tsx.snap b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/__snapshots__/index.test.tsx.snap
similarity index 100%
rename from x-pack/legacy/plugins/siem/public/pages/network/__snapshots__/ip_details.test.tsx.snap
rename to x-pack/legacy/plugins/siem/public/pages/network/ip_details/__snapshots__/index.test.tsx.snap
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details.test.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.test.tsx
similarity index 89%
rename from x-pack/legacy/plugins/siem/public/pages/network/ip_details.test.tsx
rename to x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.test.tsx
index 5cee83cfaeb7a..b4e37d34f1574 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/ip_details.test.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.test.tsx
@@ -13,18 +13,18 @@ import { MockedProvider } from 'react-apollo/test-utils';
import { ActionCreator } from 'typescript-fsa';
import { npSetup } from 'ui/new_platform';
-import '../../mock/match_media';
+import '../../../mock/match_media';
-import { mocksSource } from '../../containers/source/mock';
-import { FlowTarget } from '../../graphql/types';
-import { apolloClientObservable, mockGlobalState, TestProviders } from '../../mock';
-import { MockNpSetUp, mockUiSettings } from '../../mock/ui_settings';
-import { createStore, State } from '../../store';
-import { InputsModelId } from '../../store/inputs/constants';
+import { mocksSource } from '../../../containers/source/mock';
+import { FlowTarget } from '../../../graphql/types';
+import { apolloClientObservable, mockGlobalState, TestProviders } from '../../../mock';
+import { MockNpSetUp, mockUiSettings } from '../../../mock/ui_settings';
+import { createStore, State } from '../../../store';
+import { InputsModelId } from '../../../store/inputs/constants';
-import { IPDetailsComponent, IPDetails } from './ip_details';
+import { IPDetailsComponent, IPDetails } from './index';
-jest.mock('../../lib/settings/use_kibana_ui_setting');
+jest.mock('../../../lib/settings/use_kibana_ui_setting');
type Action = 'PUSH' | 'POP' | 'REPLACE';
const pop: Action = 'POP';
@@ -37,7 +37,7 @@ mockNpSetup.core.uiSettings = mockUiSettings;
// Test will fail because we will to need to mock some core services to make the test work
// For now let's forget about SiemSearchBar
-jest.mock('../../components/search_bar', () => ({
+jest.mock('../../../components/search_bar', () => ({
SiemSearchBar: () => null,
}));
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.tsx
new file mode 100644
index 0000000000000..9e7ef2975e2b5
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/index.tsx
@@ -0,0 +1,277 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { EuiHorizontalRule, EuiSpacer, EuiFlexItem } from '@elastic/eui';
+import React, { useCallback, useEffect } from 'react';
+import { connect } from 'react-redux';
+import { StickyContainer } from 'react-sticky';
+
+import { FiltersGlobal } from '../../../components/filters_global';
+import { HeaderPage } from '../../../components/header_page';
+import { LastEventTime } from '../../../components/last_event_time';
+import { AnomalyTableProvider } from '../../../components/ml/anomaly/anomaly_table_provider';
+import { networkToCriteria } from '../../../components/ml/criteria/network_to_criteria';
+import { scoreIntervalToDateTime } from '../../../components/ml/score/score_interval_to_datetime';
+import { AnomaliesNetworkTable } from '../../../components/ml/tables/anomalies_network_table';
+import { manageQuery } from '../../../components/page/manage_query';
+import { FlowTargetSelectConnected } from '../../../components/page/network/flow_target_select_connected';
+import { IpOverview } from '../../../components/page/network/ip_overview';
+import { SiemSearchBar } from '../../../components/search_bar';
+import { IpOverviewQuery } from '../../../containers/ip_overview';
+import { indicesExistOrDataTemporarilyUnavailable, WithSource } from '../../../containers/source';
+import { FlowTargetSourceDest, LastEventIndexKey } from '../../../graphql/types';
+import { decodeIpv6 } from '../../../lib/helpers';
+import { convertToBuildEsQuery } from '../../../lib/keury';
+import { ConditionalFlexGroup } from '../../../pages/network/navigation/conditional_flex_group';
+import { networkModel, networkSelectors, State, inputsSelectors } from '../../../store';
+import { setAbsoluteRangeDatePicker as dispatchAbsoluteRangeDatePicker } from '../../../store/inputs/actions';
+import { setIpDetailsTablesActivePageToZero as dispatchIpDetailsTablesActivePageToZero } from '../../../store/network/actions';
+import { SpyRoute } from '../../../utils/route/spy_routes';
+import { NetworkEmptyPage } from '../network_empty_page';
+
+import { IPDetailsComponentProps } from './types';
+export { getBreadcrumbs } from './utils';
+import { TlsQueryTable } from './tls_query_table';
+import { UsersQueryTable } from './users_query_table';
+import { NetworkTopNFlowQueryTable } from './network_top_n_flow_query_table';
+import { NetworkTopCountriesQueryTable } from './network_top_countries_query_table';
+
+const IpOverviewManage = manageQuery(IpOverview);
+
+export const IPDetailsComponent = React.memo(
+ ({
+ detailName,
+ filters,
+ flowTarget,
+ from,
+ isInitializing,
+ query,
+ setAbsoluteRangeDatePicker,
+ setIpDetailsTablesActivePageToZero,
+ setQuery,
+ to,
+ }) => {
+ const narrowDateRange = useCallback(
+ (score, interval) => {
+ const fromTo = scoreIntervalToDateTime(score, interval);
+ setAbsoluteRangeDatePicker({
+ id: 'global',
+ from: fromTo.from,
+ to: fromTo.to,
+ });
+ },
+ [scoreIntervalToDateTime, setAbsoluteRangeDatePicker]
+ );
+
+ useEffect(() => {
+ setIpDetailsTablesActivePageToZero(null);
+ }, [detailName]);
+
+ return (
+ <>
+
+ {({ indicesExist, indexPattern }) => {
+ const ip = decodeIpv6(detailName);
+ const filterQuery = convertToBuildEsQuery({
+ indexPattern,
+ queries: [query],
+ filters,
+ });
+ return indicesExistOrDataTemporarilyUnavailable(indicesExist) ? (
+
+
+
+
+
+ }
+ title={ip}
+ draggableArguments={{ field: `${flowTarget}.ip`, value: ip }}
+ >
+
+
+
+
+ {({ id, inspect, ipOverviewData, loading, refetch }) => (
+
+ {({ isLoadingAnomaliesData, anomaliesData }) => (
+ {
+ const fromTo = scoreIntervalToDateTime(score, interval);
+ setAbsoluteRangeDatePicker({
+ id: 'global',
+ from: fromTo.from,
+ to: fromTo.to,
+ });
+ }}
+ />
+ )}
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ) : (
+ <>
+
+
+
+ >
+ );
+ }}
+
+
+ >
+ );
+ }
+);
+
+IPDetailsComponent.displayName = 'IPDetailsComponent';
+
+const makeMapStateToProps = () => {
+ const getGlobalQuerySelector = inputsSelectors.globalQuerySelector();
+ const getGlobalFiltersQuerySelector = inputsSelectors.globalFiltersQuerySelector();
+ const getIpDetailsFlowTargetSelector = networkSelectors.ipDetailsFlowTargetSelector();
+ return (state: State) => ({
+ query: getGlobalQuerySelector(state),
+ filters: getGlobalFiltersQuerySelector(state),
+ flowTarget: getIpDetailsFlowTargetSelector(state),
+ });
+};
+
+export const IPDetails = connect(
+ makeMapStateToProps,
+ {
+ setAbsoluteRangeDatePicker: dispatchAbsoluteRangeDatePicker,
+ setIpDetailsTablesActivePageToZero: dispatchIpDetailsTablesActivePageToZero,
+ }
+)(IPDetailsComponent);
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_countries_query_table.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_countries_query_table.tsx
new file mode 100644
index 0000000000000..8f3505009b9a5
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_countries_query_table.tsx
@@ -0,0 +1,68 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React from 'react';
+import { getOr } from 'lodash/fp';
+import { manageQuery } from '../../../components/page/manage_query';
+import { NetworkWithIndexComponentsQueryTableProps } from './types';
+import { NetworkTopCountriesQuery } from '../../../containers/network_top_countries';
+import { NetworkTopCountriesTable } from '../../../components/page/network/network_top_countries_table';
+
+const NetworkTopCountriesTableManage = manageQuery(NetworkTopCountriesTable);
+
+export const NetworkTopCountriesQueryTable = ({
+ endDate,
+ filterQuery,
+ flowTarget,
+ ip,
+ setQuery,
+ skip,
+ startDate,
+ type,
+ indexPattern,
+}: NetworkWithIndexComponentsQueryTableProps) => (
+
+ {({
+ id,
+ inspect,
+ isInspected,
+ loading,
+ loadPage,
+ networkTopCountries,
+ pageInfo,
+ refetch,
+ totalCount,
+ }) => (
+
+ )}
+
+);
+
+NetworkTopCountriesQueryTable.displayName = 'NetworkTopCountriesQueryTable';
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_n_flow_query_table.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_n_flow_query_table.tsx
new file mode 100644
index 0000000000000..47d68471fb69b
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/network_top_n_flow_query_table.tsx
@@ -0,0 +1,68 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { getOr } from 'lodash/fp';
+import React from 'react';
+import { manageQuery } from '../../../components/page/manage_query';
+import { NetworkTopNFlowTable } from '../../../components/page/network/network_top_n_flow_table';
+import { NetworkTopNFlowQuery } from '../../../containers/network_top_n_flow';
+import { NetworkWithIndexComponentsQueryTableProps } from './types';
+
+const NetworkTopNFlowTableManage = manageQuery(NetworkTopNFlowTable);
+
+export const NetworkTopNFlowQueryTable = ({
+ endDate,
+ filterQuery,
+ flowTarget,
+ ip,
+ setQuery,
+ skip,
+ startDate,
+ type,
+ indexPattern,
+}: NetworkWithIndexComponentsQueryTableProps) => (
+
+ {({
+ id,
+ inspect,
+ isInspected,
+ loading,
+ loadPage,
+ networkTopNFlow,
+ pageInfo,
+ refetch,
+ totalCount,
+ }) => (
+
+ )}
+
+);
+
+NetworkTopNFlowQueryTable.displayName = 'NetworkTopNFlowQueryTable';
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/tls_query_table.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/tls_query_table.tsx
new file mode 100644
index 0000000000000..ad3ffb8cb0a57
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/tls_query_table.tsx
@@ -0,0 +1,55 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { getOr } from 'lodash/fp';
+import React from 'react';
+import { manageQuery } from '../../../components/page/manage_query';
+import { TlsTable } from '../../../components/page/network/tls_table';
+import { TlsQuery } from '../../../containers/tls';
+import { TlsQueryTableComponentProps } from './types';
+
+const TlsTableManage = manageQuery(TlsTable);
+
+export const TlsQueryTable = ({
+ endDate,
+ filterQuery,
+ flowTarget,
+ ip,
+ setQuery,
+ skip,
+ startDate,
+ type,
+}: TlsQueryTableComponentProps) => (
+
+ {({ id, inspect, isInspected, tls, totalCount, pageInfo, loading, loadPage, refetch }) => (
+
+ )}
+
+);
+
+TlsQueryTable.displayName = 'TlsQueryTable';
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/types.ts b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/types.ts
new file mode 100644
index 0000000000000..008409197f77d
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/types.ts
@@ -0,0 +1,73 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { Filter } from '@kbn/es-query';
+import { StaticIndexPattern } from 'ui/index_patterns';
+import { ActionCreator } from 'typescript-fsa';
+import { Query } from 'src/plugins/data/common';
+
+import { NetworkType } from '../../../store/network/model';
+import { ESTermQuery } from '../../../../common/typed_json';
+import { InspectQuery, Refetch } from '../../../store/inputs/model';
+import { FlowTarget, FlowTargetSourceDest } from '../../../graphql/types';
+import { InputsModelId } from '../../../store/inputs/constants';
+import { GlobalTimeArgs } from '../../../containers/global_time';
+
+export const type = NetworkType.details;
+
+type SetAbsoluteRangeDatePicker = ActionCreator<{
+ id: InputsModelId;
+ from: number;
+ to: number;
+}>;
+
+interface IPDetailsComponentReduxProps {
+ filters: Filter[];
+ flowTarget: FlowTarget;
+ query: Query;
+}
+
+interface IPDetailsComponentDispatchProps {
+ setAbsoluteRangeDatePicker: SetAbsoluteRangeDatePicker;
+ setIpDetailsTablesActivePageToZero: ActionCreator;
+}
+
+export type IPDetailsComponentProps = IPDetailsComponentReduxProps &
+ IPDetailsComponentDispatchProps &
+ GlobalTimeArgs & { detailName: string };
+
+interface OwnProps {
+ type: NetworkType;
+ startDate: number;
+ endDate: number;
+ filterQuery: string | ESTermQuery;
+ ip: string;
+ skip: boolean;
+ setQuery: ({
+ id,
+ inspect,
+ loading,
+ refetch,
+ }: {
+ id: string;
+ inspect: InspectQuery | null;
+ loading: boolean;
+ refetch: Refetch;
+ }) => void;
+}
+
+export type NetworkComponentsQueryProps = OwnProps & {
+ flowTarget: FlowTarget;
+};
+
+export type TlsQueryTableComponentProps = OwnProps & {
+ flowTarget: FlowTargetSourceDest;
+};
+
+export type NetworkWithIndexComponentsQueryTableProps = OwnProps & {
+ flowTarget: FlowTargetSourceDest;
+ indexPattern: StaticIndexPattern;
+};
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/users_query_table.tsx b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/users_query_table.tsx
new file mode 100644
index 0000000000000..d2f6102e86595
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/users_query_table.tsx
@@ -0,0 +1,56 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React from 'react';
+import { getOr } from 'lodash/fp';
+import { manageQuery } from '../../../components/page/manage_query';
+import { UsersQuery } from '../../../containers/users';
+import { NetworkComponentsQueryProps } from './types';
+import { UsersTable } from '../../../components/page/network/users_table';
+
+const UsersTableManage = manageQuery(UsersTable);
+
+export const UsersQueryTable = ({
+ endDate,
+ filterQuery,
+ flowTarget,
+ ip,
+ setQuery,
+ skip,
+ startDate,
+ type,
+}: NetworkComponentsQueryProps) => (
+
+ {({ id, inspect, isInspected, users, totalCount, pageInfo, loading, loadPage, refetch }) => (
+
+ )}
+
+);
+
+UsersQueryTable.displayName = 'UsersQueryTable';
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/ip_details/utils.ts b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/utils.ts
new file mode 100644
index 0000000000000..222bf108b4fad
--- /dev/null
+++ b/x-pack/legacy/plugins/siem/public/pages/network/ip_details/utils.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { Breadcrumb } from 'ui/chrome';
+
+import { decodeIpv6 } from '../../../lib/helpers';
+import { getNetworkUrl } from '../../../components/link_to/redirect_to_network';
+import { networkModel } from '../../../store/network';
+import * as i18n from '../translations';
+
+export const type = networkModel.NetworkType.details;
+
+export const getBreadcrumbs = (ip: string | undefined, search: string[]): Breadcrumb[] => {
+ const breadcrumbs = [
+ {
+ text: i18n.PAGE_TITLE,
+ href: `${getNetworkUrl()}${search && search[0] ? search[0] : ''}`,
+ },
+ ];
+
+ if (ip) {
+ return [
+ ...breadcrumbs,
+ {
+ text: decodeIpv6(ip),
+ href: '',
+ },
+ ];
+ } else {
+ return breadcrumbs;
+ }
+};
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/navigation/nav_tabs.tsx b/x-pack/legacy/plugins/siem/public/pages/network/navigation/nav_tabs.tsx
index 037697e2ee57a..ef097373b3ae7 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/navigation/nav_tabs.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/network/navigation/nav_tabs.tsx
@@ -12,17 +12,10 @@ const getTabsOnNetworkUrl = (tabName: NetworkRouteType) => `#/network/${tabName}
export const navTabsNetwork = (hasMlUserPermissions: boolean): NetworkNavTab => {
const networkNavTabs = {
- [NetworkRouteType.ips]: {
- id: NetworkRouteType.ips,
- name: i18n.NAVIGATION_IPS_TITLE,
- href: getTabsOnNetworkUrl(NetworkRouteType.ips),
- disabled: false,
- urlKey: 'network',
- },
- [NetworkRouteType.countries]: {
- id: NetworkRouteType.countries,
- name: i18n.NAVIGATION_COUNTRIES_TITLE,
- href: getTabsOnNetworkUrl(NetworkRouteType.countries),
+ [NetworkRouteType.flows]: {
+ id: NetworkRouteType.flows,
+ name: i18n.NAVIGATION_FLOWS_TITLE,
+ href: getTabsOnNetworkUrl(NetworkRouteType.flows),
disabled: false,
urlKey: 'network',
},
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/navigation/network_routes.tsx b/x-pack/legacy/plugins/siem/public/pages/network/navigation/network_routes.tsx
index 136a4f29359f5..fff9b1e7a0968 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/navigation/network_routes.tsx
+++ b/x-pack/legacy/plugins/siem/public/pages/network/navigation/network_routes.tsx
@@ -6,7 +6,7 @@
import React, { useCallback } from 'react';
import { Route, Switch } from 'react-router-dom';
-import { EuiFlexItem } from '@elastic/eui';
+import { EuiFlexItem, EuiSpacer } from '@elastic/eui';
import { FlowTargetSourceDest } from '../../../graphql/types';
import { scoreIntervalToDateTime } from '../../../components/ml/score/score_interval_to_datetime';
@@ -68,37 +68,38 @@ export const NetworkRoutes = ({
render={() => }
/>
(
-
-
-
-
+ <>
+
+
+
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ >
)}
/>
}
/>
- (
-
-
-
-
-
-
-
-
-
- )}
- />
}
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/navigation/types.ts b/x-pack/legacy/plugins/siem/public/pages/network/navigation/types.ts
index 3e5d00c51151a..a3639da3cc132 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/navigation/types.ts
+++ b/x-pack/legacy/plugins/siem/public/pages/network/navigation/types.ts
@@ -47,9 +47,8 @@ export type NetworkRoutesProps = GlobalTimeArgs & {
setAbsoluteRangeDatePicker: SetAbsoluteRangeDatePicker;
};
-export type KeyNetworkNavTabWithoutMlPermission = NetworkRouteType.countries &
- NetworkRouteType.dns &
- NetworkRouteType.ips &
+export type KeyNetworkNavTabWithoutMlPermission = NetworkRouteType.dns &
+ NetworkRouteType.flows &
NetworkRouteType.tls;
type KeyNetworkNavTabWithMlPermission = KeyNetworkNavTabWithoutMlPermission &
@@ -60,8 +59,7 @@ type KeyNetworkNavTab = KeyNetworkNavTabWithoutMlPermission | KeyNetworkNavTabWi
export type NetworkNavTab = Record;
export enum NetworkRouteType {
- ips = 'ips',
- countries = 'countries',
+ flows = 'flows',
dns = 'dns',
anomalies = 'anomalies',
tls = 'tls',
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/navigation/utils.ts b/x-pack/legacy/plugins/siem/public/pages/network/navigation/utils.ts
index a46925c6b4504..a1cb9d61b9c47 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/navigation/utils.ts
+++ b/x-pack/legacy/plugins/siem/public/pages/network/navigation/utils.ts
@@ -12,14 +12,13 @@ export const getNetworkRoutePath: GetNetworkRoutePath = (
hasMlUserPermission
) => {
if (capabilitiesFetched && !hasMlUserPermission) {
- return `${pagePath}/:tabName(${NetworkRouteType.ips}|${NetworkRouteType.dns}|${NetworkRouteType.countries}|${NetworkRouteType.tls})`;
+ return `${pagePath}/:tabName(${NetworkRouteType.flows}|${NetworkRouteType.dns}|${NetworkRouteType.tls})`;
}
return (
`${pagePath}/:tabName(` +
- `${NetworkRouteType.ips}|` +
+ `${NetworkRouteType.flows}|` +
`${NetworkRouteType.dns}|` +
- `${NetworkRouteType.countries}|` +
`${NetworkRouteType.anomalies}|` +
`${NetworkRouteType.tls})`
);
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/translations.ts b/x-pack/legacy/plugins/siem/public/pages/network/translations.ts
index d3945c3aca59c..25c117319248a 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/translations.ts
+++ b/x-pack/legacy/plugins/siem/public/pages/network/translations.ts
@@ -27,17 +27,10 @@ export const EMPTY_ACTION_SECONDARY = i18n.translate('xpack.siem.network.emptyAc
defaultMessage: 'Go to documentation',
});
-export const NAVIGATION_IPS_TITLE = i18n.translate('xpack.siem.network.navigation.ipsTitle', {
- defaultMessage: 'IPs',
+export const NAVIGATION_FLOWS_TITLE = i18n.translate('xpack.siem.network.navigation.flowsTitle', {
+ defaultMessage: 'Flows',
});
-export const NAVIGATION_COUNTRIES_TITLE = i18n.translate(
- 'xpack.siem.network.navigation.countriesTitle',
- {
- defaultMessage: 'Top Countries',
- }
-);
-
export const NAVIGATION_DNS_TITLE = i18n.translate('xpack.siem.network.navigation.dnsTitle', {
defaultMessage: 'DNS',
});
diff --git a/x-pack/legacy/plugins/siem/public/pages/network/types.ts b/x-pack/legacy/plugins/siem/public/pages/network/types.ts
index be58572cb6621..46c868729b832 100644
--- a/x-pack/legacy/plugins/siem/public/pages/network/types.ts
+++ b/x-pack/legacy/plugins/siem/public/pages/network/types.ts
@@ -9,7 +9,6 @@ import { RouteComponentProps } from 'react-router-dom';
import { ActionCreator } from 'typescript-fsa';
import { Query } from 'src/plugins/data/common';
-import { FlowTarget } from '../../graphql/types';
import { GlobalTimeArgs } from '../../containers/global_time';
import { InputsModelId } from '../../store/inputs/constants';
@@ -32,18 +31,3 @@ export type NetworkComponentProps = NetworkComponentReduxProps &
hasMlUserPermissions: boolean;
capabilitiesFetched: boolean;
};
-
-interface IPDetailsComponentReduxProps {
- filters: Filter[];
- flowTarget: FlowTarget;
- query: Query;
-}
-
-interface IPDetailsComponentDispatchProps {
- setAbsoluteRangeDatePicker: SetAbsoluteRangeDatePicker;
- setIpDetailsTablesActivePageToZero: ActionCreator;
-}
-
-export type IPDetailsComponentProps = IPDetailsComponentReduxProps &
- IPDetailsComponentDispatchProps &
- GlobalTimeArgs & { detailName: string };
diff --git a/x-pack/legacy/plugins/siem/scripts/convert_saved_search_to_signals.js b/x-pack/legacy/plugins/siem/scripts/convert_saved_search_to_signals.js
index feadc0b667916..78281efa09e52 100644
--- a/x-pack/legacy/plugins/siem/scripts/convert_saved_search_to_signals.js
+++ b/x-pack/legacy/plugins/siem/scripts/convert_saved_search_to_signals.js
@@ -29,7 +29,7 @@ const path = require('path');
// doing a search to KQL conversion before posting it as a signal or checking it
// into another repository.
const INTERVAL = '24h';
-const SEVERITY = 1;
+const SEVERITY = 'low';
const TYPE = 'kql';
const FROM = 'now-24h';
const TO = 'now';
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/build_events_reindex.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/build_events_reindex.ts
index b84657b52769c..c7bdc0263e4ce 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/build_events_reindex.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/build_events_reindex.ts
@@ -20,9 +20,9 @@ interface BuildEventsReIndexParams {
maxDocs: string;
filter: Record | undefined;
kql: string | undefined;
- severity: number;
+ severity: string;
name: string;
- timeDetected: number;
+ timeDetected: string;
ruleRevision: number;
id: string;
type: string;
@@ -131,6 +131,7 @@ export const buildEventsReIndex = ({
def parent = [
"id": ctx._id,
"type": "event",
+ "index": ctx._index,
"depth": 1
];
@@ -140,14 +141,15 @@ export const buildEventsReIndex = ({
"rule_type": "${type}",
"parent": parent,
"name": "${name}",
- "severity": ${severity},
+ "severity": "${severity}",
"description": "${description}",
- "time_detected": "${timeDetected}",
+ "original_time": ctx._source['@timestamp'],
"index_patterns": indexPatterns,
"references": references
];
ctx._source.signal = signal;
+ ctx._source['@timestamp'] = "${timeDetected}";
`,
lang: 'painless',
},
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/signals_alert_type.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/signals_alert_type.ts
index 3ce3161a442cd..aad2b2b19d67a 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/signals_alert_type.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/signals_alert_type.ts
@@ -34,7 +34,7 @@ export const signalsAlertType = ({ logger }: { logger: Logger }): SignalAlertTyp
kql: schema.nullable(schema.string()),
maxSignals: schema.number({ defaultValue: 100 }),
name: schema.string(),
- severity: schema.number(),
+ severity: schema.string(),
to: schema.string(),
type: schema.string(),
references: schema.arrayOf(schema.string(), { defaultValue: [] }),
@@ -87,7 +87,7 @@ export const signalsAlertType = ({ logger }: { logger: Logger }): SignalAlertTyp
severity,
description,
name,
- timeDetected: Date.now(),
+ timeDetected: new Date().toISOString(),
filter,
maxDocs: maxSignals,
ruleRevision: 1,
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/types.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/types.ts
index c2307e366a886..2d7b9dbb98d76 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/types.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/types.ts
@@ -16,6 +16,7 @@ import {
} from '../../../../../alerting/server/types';
import { AlertsClient } from '../../../../../alerting/server/alerts_client';
import { ActionsClient } from '../../../../../actions/server/actions_client';
+import { SearchResponse } from '../../types';
export interface SignalAlertParams {
description: string;
@@ -28,7 +29,7 @@ export interface SignalAlertParams {
kql: string | undefined;
maxSignals: string;
name: string;
- severity: number;
+ severity: string;
type: 'filter' | 'kql';
to: string;
references: string[];
@@ -78,26 +79,30 @@ export interface SignalsRequest extends Hapi.Request {
}
export type SignalExecutorOptions = Omit & {
- params: {
- description: string;
- from: string;
- id: string;
- index: string[];
- interval: string;
- enabled: boolean;
- filter: Record | undefined;
- kql: string | undefined;
- maxSignals: string;
- name: string;
- severity: number;
- type: 'filter' | 'kql';
- to: string;
- references: string[];
+ params: SignalAlertParams & {
scrollSize: number;
scrollLock: string;
};
};
+export type SearchTypes =
+ | string
+ | string[]
+ | number
+ | number[]
+ | boolean
+ | boolean[]
+ | object
+ | object[];
+
+export interface SignalSource {
+ [key: string]: SearchTypes;
+ '@timestamp': string;
+}
+
+export type SignalSearchResponse = SearchResponse;
+export type SignalSourceHit = SignalSearchResponse['hits']['hits'][0];
+
// This returns true because by default a SignalAlertTypeDefinition is an AlertType
// since we are only increasing the strictness of params.
export const isAlertExecutor = (obj: SignalAlertTypeDefinition): obj is AlertType => {
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/utils.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/utils.ts
index 11cd86934545a..312f484b96e00 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/utils.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/alerts/utils.ts
@@ -4,40 +4,40 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { SearchResponse, SearchHit, SignalHit } from '../../types';
+import { SignalHit } from '../../types';
import { Logger } from '../../../../../../../../src/core/server';
import { AlertServices } from '../../../../../alerting/server/types';
+import { SignalSourceHit, SignalSearchResponse, SignalAlertParams } from './types';
// format scroll search result for signals index.
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-export const buildBulkBody = (doc: SearchHit, signalParams: Record): SignalHit => {
- const indexPatterns = signalParams.index.map((element: string) => `"${element}"`).join(',');
- const refs = signalParams.references.map((element: string) => `"${element}"`).join(',');
+export const buildBulkBody = (doc: SignalSourceHit, signalParams: SignalAlertParams): SignalHit => {
return {
...doc._source,
signal: {
+ '@timestamp': new Date().toISOString(),
rule_revision: 1,
rule_id: signalParams.id,
rule_type: signalParams.type,
parent: {
id: doc._id,
type: 'event',
+ index: doc._index,
depth: 1,
},
name: signalParams.name,
severity: signalParams.severity,
description: signalParams.description,
- time_detected: Date.now(),
- index_patterns: indexPatterns,
- references: refs,
+ original_time: doc._source['@timestamp'],
+ index_patterns: signalParams.index,
+ references: signalParams.references,
},
};
};
// Bulk Index documents.
export const singleBulkIndex = async (
- sr: SearchResponse,
- params: Record, // eslint-disable-line @typescript-eslint/no-explicit-any
+ sr: SignalSearchResponse,
+ params: SignalAlertParams,
service: AlertServices,
logger: Logger
): Promise => {
@@ -45,7 +45,7 @@ export const singleBulkIndex = async (
logger.warn('First search result yielded 0 documents');
return false;
}
- const bulkBody = sr.hits.hits.flatMap((doc: SearchHit) => [
+ const bulkBody = sr.hits.hits.flatMap(doc => [
{
index: {
_index: process.env.SIGNALS_INDEX || '.siem-signals-10-01-2019',
@@ -68,10 +68,10 @@ export const singleBulkIndex = async (
// Given a scroll id, grab the next set of documents
export const singleScroll = async (
scrollId: string | undefined,
- params: Record, // eslint-disable-line @typescript-eslint/no-explicit-any
+ params: SignalAlertParams & { scrollLock?: number }, // TODO: Finish plumbing the scrollLock all the way to the REST endpoint if this algorithm continues to use it.
service: AlertServices,
logger: Logger
-): Promise> => {
+): Promise => {
const scroll = params.scrollLock ? params.scrollLock : '1m';
try {
const nextScrollResult = await service.callCluster('scroll', {
@@ -87,8 +87,8 @@ export const singleScroll = async (
// scroll through documents and re-index using bulk endpoint.
export const scrollAndBulkIndex = async (
- someResult: SearchResponse,
- params: Record, // eslint-disable-line @typescript-eslint/no-explicit-any
+ someResult: SignalSearchResponse,
+ params: SignalAlertParams,
service: AlertServices,
logger: Logger
): Promise => {
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/__mocks__/request_responses.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/__mocks__/request_responses.ts
index e4b3ccccbdbaf..1b6f529a51418 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/__mocks__/request_responses.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/__mocks__/request_responses.ts
@@ -16,7 +16,7 @@ export const getUpdateRequest = (): ServerInjectOptions => ({
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -55,7 +55,7 @@ export const getCreateRequest = (): ServerInjectOptions => ({
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -82,7 +82,7 @@ export const createAlertResult = () => ({
kql: 'user.name: root or user.name: admin',
maxSignals: 100,
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
to: 'now',
type: 'kql',
references: [],
@@ -136,7 +136,7 @@ export const updateAlertResult = () => ({
kql: 'user.name: root or user.name: admin',
maxSignals: 100,
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
to: 'now',
type: 'kql',
references: [],
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals.route.test.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals.route.test.ts
index 1575ab5b16a3c..ca9e50b348220 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals.route.test.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals.route.test.ts
@@ -78,7 +78,7 @@ describe('create_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -103,7 +103,7 @@ describe('create_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -128,7 +128,7 @@ describe('create_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'filter',
from: 'now-6m',
to: 'now',
@@ -153,7 +153,7 @@ describe('create_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'something-made-up', // This is a made up type that causes the 400
from: 'now-6m',
to: 'now',
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals_route.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals_route.ts
index 597f064b2c796..709b4fa55a991 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals_route.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/create_signals_route.ts
@@ -30,7 +30,7 @@ export const createCreateSignalsRoute: Hapi.ServerRoute = {
kql: Joi.string(),
max_signals: Joi.number().default(100),
name: Joi.string().required(),
- severity: Joi.number().required(),
+ severity: Joi.string().required(),
to: Joi.string().required(),
type: Joi.string()
.valid('filter', 'kql')
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.test.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.test.ts
index eff6923b3fbda..05c6528337608 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.test.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.test.ts
@@ -77,7 +77,7 @@ describe('update_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -102,7 +102,7 @@ describe('update_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
@@ -127,7 +127,7 @@ describe('update_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'filter',
from: 'now-6m',
to: 'now',
@@ -152,7 +152,7 @@ describe('update_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'something-made-up', // This is a made up type that causes the 400
from: 'now-6m',
to: 'now',
@@ -177,7 +177,7 @@ describe('update_signals', () => {
index: ['auditbeat-*', 'filebeat-*', 'packetbeat-*', 'winlogbeat-*'],
interval: '5m',
name: 'Detect Root/Admin Users',
- severity: 1,
+ severity: 'high',
type: 'kql',
from: 'now-6m',
to: 'now',
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.ts b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.ts
index 7a437f04e7f2d..df4f860e19561 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/routes/update_signals_route.ts
@@ -37,7 +37,7 @@ export const createUpdateSignalsRoute: Hapi.ServerRoute = {
kql: Joi.string(),
max_signals: Joi.number().default(100),
name: Joi.string(),
- severity: Joi.number(),
+ severity: Joi.string(),
to: Joi.string(),
type: Joi.string().valid('filter', 'kql'),
references: Joi.array().default([]),
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/post_x_signals.sh b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/post_x_signals.sh
index 1aaecca37ba6f..27f6a40a16df7 100755
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/post_x_signals.sh
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/post_x_signals.sh
@@ -27,7 +27,7 @@ do {
\"index\": [\"auditbeat-*\", \"filebeat-*\", \"packetbeat-*\", \"winlogbeat-*\"],
\"interval\": \"24h\",
\"name\": \"Detect Root/Admin Users\",
- \"severity\": 1,
+ \"severity\": \"high\",
\"type\": \"kql\",
\"from\": \"now-6m\",
\"to\": \"now\",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_1.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_1.json
index 8bf96b6da47d0..7bbe7ab163460 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_1.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_1.json
@@ -4,9 +4,10 @@
"index": ["auditbeat-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"],
"interval": "5m",
"name": "Detect Root/Admin Users",
- "severity": 1,
+ "severity": "high",
"type": "kql",
"from": "now-6m",
"to": "now",
- "kql": "user.name: root or user.name: admin"
+ "kql": "user.name: root or user.name: admin",
+ "references": ["http://www.example.com", "https://ww.example.com"]
}
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_2.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_2.json
index 93686f527264e..efbba4e0e46f5 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_2.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_2.json
@@ -4,7 +4,7 @@
"index": ["auditbeat-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"],
"interval": "24h",
"name": "Detect Root/Admin Users over a long period of time",
- "severity": 1,
+ "severity": "high",
"type": "kql",
"from": "now-1y",
"to": "now",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_3.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_3.json
index fc2f390ced790..db4e5174bc757 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_3.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_3.json
@@ -4,7 +4,7 @@
"index": ["auditbeat-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"],
"interval": "5m",
"name": "Detect Root/Admin Users",
- "severity": 1,
+ "severity": "high",
"type": "kql",
"from": "now-16y",
"to": "now-15y",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_filter_9999.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_filter_9999.json
index f4d6b154c4ca7..9ab529ad4d9ce 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_filter_9999.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_filter_9999.json
@@ -4,7 +4,7 @@
"index": ["auditbeat-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"],
"interval": "5m",
"name": "Detect Root/Admin Users",
- "severity": 1,
+ "severity": "high",
"type": "filter",
"from": "now-6m",
"to": "now",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_update_1.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_update_1.json
index 660c9585369bc..cad577eeefdc2 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_update_1.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/root_or_admin_update_1.json
@@ -4,7 +4,7 @@
"index": ["winlogbeat-*"],
"interval": "9m",
"name": "Just watch other winlogbeat users",
- "severity": 500,
+ "severity": "low",
"enabled": false,
"type": "filter",
"from": "now-5d",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/watch_longmont_3.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/watch_longmont_3.json
index 94090b7158aee..1ae71b0a4aa17 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/watch_longmont_3.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/scripts/signals/watch_longmont_3.json
@@ -4,7 +4,7 @@
"index": ["auditbeat-*", "filebeat-*", "packetbeat-*", "winlogbeat-*"],
"interval": "24h",
"name": "Detect Longmont activity",
- "severity": 2,
+ "severity": "high",
"type": "kql",
"from": "now-1y",
"to": "now",
diff --git a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals_mapping.json b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals_mapping.json
index 4f1e07e2f5d76..df4ea9bc3a0b2 100644
--- a/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals_mapping.json
+++ b/x-pack/legacy/plugins/siem/server/lib/detection_engine/signals_mapping.json
@@ -9,6 +9,9 @@
"properties": {
"parent": {
"properties": {
+ "index": {
+ "type": "keyword"
+ },
"id": {
"type": "keyword"
},
@@ -20,7 +23,7 @@
}
}
},
- "time_detected": {
+ "original_time": {
"type": "date"
},
"rule_revision": {
@@ -45,106 +48,112 @@
"type": "text"
},
"severity": {
- "type": "long"
+ "type": "keyword"
},
"references": {
"type": "text"
- },
- "error": {
- "properties": {
- "code": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "message": {
- "type": "text",
- "norms": false
- },
- "type": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
}
}
},
"agent": {
"properties": {
"ephemeral_id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "hostname": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "as": {
+ "properties": {
+ "number": {
+ "type": "long"
+ },
+ "organization": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
}
}
},
"client": {
"properties": {
"address": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "as": {
+ "properties": {
+ "number": {
+ "type": "long"
+ },
+ "organization": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
},
"bytes": {
"type": "long"
},
"domain": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -152,8 +161,18 @@
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "nat": {
+ "properties": {
+ "ip": {
+ "type": "ip"
+ },
+ "port": {
+ "type": "long"
+ }
+ }
},
"packets": {
"type": "long"
@@ -161,39 +180,55 @@
"port": {
"type": "long"
},
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"user": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"email": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
}
@@ -204,68 +239,60 @@
"account": {
"properties": {
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"availability_zone": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"instance": {
"properties": {
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"machine": {
"properties": {
"type": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "project": {
- "properties": {
- "id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"provider": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"container": {
"properties": {
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"image": {
"properties": {
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"tag": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -273,60 +300,75 @@
"type": "object"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"runtime": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"destination": {
"properties": {
"address": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "as": {
+ "properties": {
+ "number": {
+ "type": "long"
+ },
+ "organization": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
},
"bytes": {
"type": "long"
},
"domain": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -334,101 +376,211 @@
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "nat": {
+ "properties": {
+ "ip": {
+ "type": "ip"
+ },
+ "port": {
+ "type": "long"
+ }
+ }
},
"packets": {
"type": "long"
},
- "path": {
- "type": "keyword",
- "ignore_above": 1024
- },
"port": {
"type": "long"
},
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"user": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"email": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
+ },
+ "dns": {
+ "properties": {
+ "answers": {
+ "properties": {
+ "class": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "data": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "ttl": {
+ "type": "long"
+ },
+ "type": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "header_flags": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "op_code": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "question": {
+ "properties": {
+ "class": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "subdomain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "type": {
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
+ },
+ "resolved_ip": {
+ "type": "ip"
+ },
+ "response_code": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "type": {
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"ecs": {
"properties": {
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"error": {
"properties": {
"code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"message": {
- "type": "text",
- "norms": false
+ "norms": false,
+ "type": "text"
+ },
+ "stack_trace": {
+ "doc_values": false,
+ "ignore_above": 1024,
+ "index": false,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"event": {
"properties": {
"action": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"category": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "code": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"created": {
"type": "date"
},
"dataset": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"duration": {
"type": "long"
@@ -437,32 +589,34 @@
"type": "date"
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"kind": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"module": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "origin": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"original": {
- "type": "keyword",
- "ignore_above": 1024
+ "doc_values": false,
+ "ignore_above": 1024,
+ "index": false,
+ "type": "keyword"
},
"outcome": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "provider": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"risk_score": {
"type": "float"
@@ -470,6 +624,9 @@
"risk_score_norm": {
"type": "float"
},
+ "sequence": {
+ "type": "long"
+ },
"severity": {
"type": "long"
},
@@ -477,359 +634,313 @@
"type": "date"
},
"timezone": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"file": {
"properties": {
+ "accessed": {
+ "type": "date"
+ },
+ "created": {
+ "type": "date"
+ },
"ctime": {
"type": "date"
},
"device": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "directory": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"extension": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"gid": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "hash": {
+ "properties": {
+ "md5": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "sha1": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "sha256": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "sha512": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
},
"inode": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"mode": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"mtime": {
"type": "date"
},
- "origin": {
- "type": "keyword",
- "fields": {
- "raw": {
- "type": "keyword",
- "ignore_above": 1024
- }
- },
- "ignore_above": 1024
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"owner": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"path": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "selinux": {
- "properties": {
- "domain": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "level": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "role": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "user": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "setgid": {
- "type": "boolean"
- },
- "setuid": {
- "type": "boolean"
+ "ignore_above": 1024,
+ "type": "keyword"
},
"size": {
"type": "long"
},
"target_path": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"uid": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
"properties": {
- "blake2b_256": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "blake2b_384": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "blake2b_512": {
- "type": "keyword",
- "ignore_above": 1024
- },
"md5": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"sha1": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha224": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"sha256": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha384": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha3_224": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha3_256": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha3_384": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha3_512": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"sha512": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha512_224": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "sha512_256": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "xxh64": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"host": {
"properties": {
"architecture": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "containerized": {
- "type": "boolean"
+ "ignore_above": 1024,
+ "type": "keyword"
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hostname": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"ip": {
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"os": {
"properties": {
- "build": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "codename": {
- "type": "keyword",
- "ignore_above": 1024
- },
"family": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"kernel": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"platform": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "uptime": {
+ "type": "long"
},
"user": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"email": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
}
@@ -845,8 +956,8 @@
"type": "long"
},
"content": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -854,12 +965,12 @@
"type": "long"
},
"method": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"referrer": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -871,8 +982,8 @@
"type": "long"
},
"content": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -885,8 +996,8 @@
}
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -896,61 +1007,115 @@
"log": {
"properties": {
"level": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "logger": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "origin": {
+ "properties": {
+ "file": {
+ "properties": {
+ "line": {
+ "type": "integer"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "function": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
},
"original": {
- "type": "keyword",
- "ignore_above": 1024
+ "doc_values": false,
+ "ignore_above": 1024,
+ "index": false,
+ "type": "keyword"
+ },
+ "syslog": {
+ "properties": {
+ "facility": {
+ "properties": {
+ "code": {
+ "type": "long"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "priority": {
+ "type": "long"
+ },
+ "severity": {
+ "properties": {
+ "code": {
+ "type": "long"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
}
}
},
"message": {
- "type": "text",
- "norms": false
+ "norms": false,
+ "type": "text"
},
"network": {
"properties": {
"application": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"bytes": {
"type": "long"
},
"community_id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"direction": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"forwarded_ip": {
"type": "ip"
},
"iana_number": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"packets": {
"type": "long"
},
"protocol": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"transport": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -959,164 +1124,221 @@
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hostname": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"ip": {
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"os": {
"properties": {
"family": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"kernel": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"platform": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
+ "product": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"serial_number": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"vendor": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"organization": {
"properties": {
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"os": {
"properties": {
"family": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"kernel": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"platform": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
- "process": {
+ "package": {
"properties": {
- "args": {
- "type": "keyword",
- "ignore_above": 1024
+ "architecture": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
- "created": {
- "type": "keyword",
- "ignore_above": 1024
+ "checksum": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "description": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "install_scope": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "installed": {
+ "type": "date"
+ },
+ "license": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "path": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "size": {
+ "type": "long"
},
- "entity_id": {
- "type": "keyword",
- "ignore_above": 1024
+ "version": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "process": {
+ "properties": {
+ "args": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"executable": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"hash": {
"properties": {
+ "md5": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"sha1": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "sha256": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "sha512": {
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "pgid": {
+ "type": "long"
},
"pid": {
"type": "long"
@@ -1131,16 +1353,23 @@
"properties": {
"id": {
"type": "long"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"title": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "uptime": {
+ "type": "long"
},
"working_directory": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -1154,48 +1383,63 @@
"server": {
"properties": {
"address": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "as": {
+ "properties": {
+ "number": {
+ "type": "long"
+ },
+ "organization": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
},
"bytes": {
"type": "long"
},
"domain": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -1203,8 +1447,18 @@
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "nat": {
+ "properties": {
+ "ip": {
+ "type": "ip"
+ },
+ "port": {
+ "type": "long"
+ }
+ }
},
"packets": {
"type": "long"
@@ -1212,39 +1466,55 @@
"port": {
"type": "long"
},
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"user": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"email": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
}
@@ -1253,80 +1523,99 @@
"service": {
"properties": {
"ephemeral_id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "node": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
},
"state": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"type": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"source": {
"properties": {
"address": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "as": {
+ "properties": {
+ "number": {
+ "type": "long"
+ },
+ "organization": {
+ "properties": {
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
},
"bytes": {
"type": "long"
},
"domain": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"geo": {
"properties": {
"city_name": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "continent": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"continent_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"country_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"location": {
"type": "geo_point"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_iso_code": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"region_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -1334,256 +1623,235 @@
"type": "ip"
},
"mac": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "nat": {
+ "properties": {
+ "ip": {
+ "type": "ip"
+ },
+ "port": {
+ "type": "long"
+ }
+ }
},
"packets": {
"type": "long"
},
- "path": {
- "type": "keyword",
- "ignore_above": 1024
- },
"port": {
"type": "long"
},
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"user": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"email": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
}
}
},
"tags": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "threat": {
+ "properties": {
+ "framework": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "tactic": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "reference": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "technique": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "name": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "reference": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ }
+ }
+ },
+ "trace": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
+ },
+ "transaction": {
+ "properties": {
+ "id": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ }
+ }
},
"url": {
"properties": {
"domain": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "extension": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"fragment": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"original": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"password": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"path": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"port": {
"type": "long"
},
"query": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "registered_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"scheme": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
+ "top_level_domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"username": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"user": {
"properties": {
- "audit": {
- "properties": {
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "effective": {
- "properties": {
- "group": {
- "properties": {
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
},
"email": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "entity_id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "filesystem": {
- "properties": {
- "group": {
- "properties": {
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full_name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"group": {
"properties": {
+ "domain": {
+ "ignore_above": 1024,
+ "type": "keyword"
+ },
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"hash": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"id": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name_map": {
- "type": "object"
- },
- "saved": {
- "properties": {
- "group": {
- "properties": {
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "id": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "name": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "selinux": {
- "properties": {
- "category": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "domain": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "level": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "role": {
- "type": "keyword",
- "ignore_above": 1024
- },
- "user": {
- "type": "keyword",
- "ignore_above": 1024
- }
- }
- },
- "terminal": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
@@ -1592,50 +1860,50 @@
"device": {
"properties": {
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"original": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"os": {
"properties": {
"family": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"full": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"kernel": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"name": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"platform": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
},
"version": {
- "type": "keyword",
- "ignore_above": 1024
+ "ignore_above": 1024,
+ "type": "keyword"
}
}
}
diff --git a/x-pack/legacy/plugins/siem/server/lib/types.ts b/x-pack/legacy/plugins/siem/server/lib/types.ts
index b3e44f7882070..d690954f88057 100644
--- a/x-pack/legacy/plugins/siem/server/lib/types.ts
+++ b/x-pack/legacy/plugins/siem/server/lib/types.ts
@@ -64,18 +64,20 @@ export interface SiemContext {
export interface SignalHit {
signal: {
+ '@timestamp': string;
rule_revision: number;
- rule_id: number;
+ rule_id: string;
rule_type: string;
parent: {
id: string;
type: string;
+ index: string;
depth: number;
};
name: string;
- severity: number;
+ severity: string;
description: string;
- time_detected: number;
+ original_time: string;
index_patterns: string[];
references: string[];
};
diff --git a/x-pack/legacy/plugins/uptime/public/uptime_app.tsx b/x-pack/legacy/plugins/uptime/public/uptime_app.tsx
index cbebb2defcabf..172b31a50ffbc 100644
--- a/x-pack/legacy/plugins/uptime/public/uptime_app.tsx
+++ b/x-pack/legacy/plugins/uptime/public/uptime_app.tsx
@@ -154,7 +154,7 @@ const Application = (props: UptimeAppProps) => {
-
+
{
/>
-
+
diff --git a/x-pack/package.json b/x-pack/package.json
index 8f685675bd923..5248344883809 100644
--- a/x-pack/package.json
+++ b/x-pack/package.json
@@ -179,7 +179,7 @@
"supertest": "^3.1.0",
"supertest-as-promised": "^4.0.2",
"tmp": "0.1.0",
- "tree-kill": "^1.1.0",
+ "tree-kill": "^1.2.1",
"ts-loader": "^6.0.4",
"typescript": "3.5.3",
"vinyl-fs": "^3.0.3",
diff --git a/x-pack/plugins/code/server/plugin.ts b/x-pack/plugins/code/server/plugin.ts
index 208ee6de014e0..40f4bc8d4749e 100644
--- a/x-pack/plugins/code/server/plugin.ts
+++ b/x-pack/plugins/code/server/plugin.ts
@@ -9,11 +9,12 @@ import { first } from 'rxjs/operators';
import { TypeOf } from '@kbn/config-schema';
import {
CoreSetup,
+ IClusterClient,
LoggerFactory,
PluginInitializerContext,
RecursiveReadonly,
} from 'src/core/server';
-import { deepFreeze } from '../../../../src/core/utils';
+// import { deepFreeze } from '../../../../src/core/utils';
import { PluginSetupContract as FeaturesSetupContract } from '../../features/server';
import { CodeConfigSchema } from './config';
import { SAVED_OBJ_REPO } from '../../../legacy/plugins/code/common/constants';
@@ -26,6 +27,10 @@ export interface PluginSetupContract {
legacy: {
config: TypeOf;
logger: LoggerFactory;
+ http: any;
+ elasticsearch: {
+ adminClient$: IClusterClient;
+ };
};
}
@@ -74,13 +79,17 @@ export class CodePlugin {
},
});
- return deepFreeze({
+ return {
/** @deprecated */
legacy: {
config,
logger: this.initializerContext.logger,
+ http: coreSetup.http,
+ elasticsearch: {
+ adminClient$: await coreSetup.elasticsearch.adminClient$.pipe(first()).toPromise(),
+ },
},
- });
+ };
}
public start() {
diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json
index 429e8e391a457..e12d4405aa745 100644
--- a/x-pack/plugins/translations/translations/ja-JP.json
+++ b/x-pack/plugins/translations/translations/ja-JP.json
@@ -261,13 +261,8 @@
"common.ui.aggTypes.timeInterval.scaledHelpText": "現在 {bucketDescription} にスケーリングされています",
"common.ui.aggTypes.timeInterval.selectIntervalPlaceholder": "間隔を選択",
"common.ui.aggTypes.timeInterval.selectOptionHelpText": "オプションを選択するかカスタム値を作成します。例30s、20m、24h、2d、1w、1M",
- "common.ui.courier.fetch.failedToClearRequestErrorMessage": "返答から未完全または重複のリクエストを消去できませんでした。",
"common.ui.courier.fetch.requestTimedOutNotificationMessage": "リクエストがタイムアウトしたため、データが不完全な可能性があります",
- "common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage": "リクエストが 2 度中断されましたか?",
- "common.ui.courier.fetch.requireErrorHandlerErrorMessage": "{errorHandler} が必要です",
"common.ui.courier.fetch.shardsFailedNotificationMessage": "{shardsTotal} 件中 {shardsFailed} 件のシャードでエラーが発生しました",
- "common.ui.courier.fetch.unableContinueRequestErrorMessage": "{type} リクエストを続行できません",
- "common.ui.courier.fetch.unableStartRequestErrorMessage": "既に開始済みのためリクエストは開始できません",
"common.ui.courier.hitsDescription": "クエリにより返されたドキュメントの数です。",
"common.ui.courier.hitsLabel": "ヒット数",
"common.ui.courier.hitsTotalDescription": "クエリに一致するドキュメントの数です。",
@@ -298,14 +293,6 @@
"common.ui.dualRangeControl.mustSetBothErrorMessage": "下と上の値の両方を設定する必要があります",
"common.ui.dualRangeControl.outsideOfRangeErrorMessage": "値は {min} と {max} の間でなければなりません",
"common.ui.dualRangeControl.upperValidErrorMessage": "上の値は下の値以上でなければなりません",
- "common.ui.errorAllowExplicitIndex.breadcrumbs.errorText": "エラー",
- "common.ui.errorAllowExplicitIndex.errorDescription": "ご使用の Elasticsearch クラスターの {allowExplicitIndexConfig} 設定が {allowExplicitIndexValue} に設定されているようです。これにより Kibana が検索リクエストを行うことができません。この機能は、ダッシュボードに多数のパネルがある際に素早く一貫して読み込まれるように、Elasticsearch に複数インデックスを検索する単独のリクエストを送るのに使用します。",
- "common.ui.errorAllowExplicitIndex.errorDisclaimer": "申し訳ございませんが、この問題が解決されるまでディスカバリ、可視化、ダッシュボードなどの Kibana の特定のアプリはご利用いただけません。",
- "common.ui.errorAllowExplicitIndex.errorTitle": "おっと!",
- "common.ui.errorAllowExplicitIndex.howToFix.goBackText": "ブラウザの戻るボタンで前の画面に戻ります。",
- "common.ui.errorAllowExplicitIndex.howToFix.removeConfigItemText": "Elasticsearch の構成ファイルから {allowExplicitIndexConfig} を削除します。",
- "common.ui.errorAllowExplicitIndex.howToFix.restartText": "Elasticsearch を再起動します。",
- "common.ui.errorAllowExplicitIndex.howToFixErrorTitle": "どうすれば良いのでしょう?",
"common.ui.errorAutoCreateIndex.breadcrumbs.errorText": "エラー",
"common.ui.errorAutoCreateIndex.errorDescription": "Elasticsearch クラスターの {autoCreateIndexActionConfig} 設定が原因で、Kibana が保存されたオブジェクトを格納するインデックスを自動的に作成できないようです。Kibana は、保存されたオブジェクトインデックスが適切なマッピング/スキーマを使用し Kibana から Elasticsearch へのポーリングの回数を減らすための最適な手段であるため、この Elasticsearch の機能を使用します。",
"common.ui.errorAutoCreateIndex.errorDisclaimer": "申し訳ございませんが、この問題が解決されるまで Kibana で何も保存することができません。",
diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json
index 1180539e4f263..ee4a617d50e75 100644
--- a/x-pack/plugins/translations/translations/zh-CN.json
+++ b/x-pack/plugins/translations/translations/zh-CN.json
@@ -261,13 +261,8 @@
"common.ui.aggTypes.timeInterval.scaledHelpText": "当前缩放至 {bucketDescription}",
"common.ui.aggTypes.timeInterval.selectIntervalPlaceholder": "选择时间间隔",
"common.ui.aggTypes.timeInterval.selectOptionHelpText": "选择选项或创建定制值示例:30s、20m、24h、2d、1w、1M",
- "common.ui.courier.fetch.failedToClearRequestErrorMessage": "无法从响应中清除不完整或重复的请求。",
"common.ui.courier.fetch.requestTimedOutNotificationMessage": "由于您的请求超时,因此数据可能不完整",
- "common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage": "请求已中止两次?",
- "common.ui.courier.fetch.requireErrorHandlerErrorMessage": "“{errorHandler}” 必填",
"common.ui.courier.fetch.shardsFailedNotificationMessage": "{shardsTotal} 个分片有 {shardsFailed} 个失败",
- "common.ui.courier.fetch.unableContinueRequestErrorMessage": "无法继续 {type} 请求",
- "common.ui.courier.fetch.unableStartRequestErrorMessage": "无法启动请求,因此其已启动",
"common.ui.courier.hitsDescription": "查询返回的文档数目。",
"common.ui.courier.hitsLabel": "命中",
"common.ui.courier.hitsTotalDescription": "匹配查询的文档数目。",
@@ -298,14 +293,6 @@
"common.ui.dualRangeControl.mustSetBothErrorMessage": "下限值和上限值都须设置",
"common.ui.dualRangeControl.outsideOfRangeErrorMessage": "值必须是在 {min} 到 {max} 的范围内",
"common.ui.dualRangeControl.upperValidErrorMessage": "上限值必须大于或等于下限值",
- "common.ui.errorAllowExplicitIndex.breadcrumbs.errorText": "错误",
- "common.ui.errorAllowExplicitIndex.errorDescription": "似乎您的 Elasticsearch 集群已将设置 {allowExplicitIndexConfig} 设置为 {allowExplicitIndexValue},这使 Kibana 无法执行搜索请求。使用此功能,我们可以向 Elasticsearch 发送单个请求来搜索多个索引,这样,当仪表板上有多个面板时,面板可快速且一致地加载。",
- "common.ui.errorAllowExplicitIndex.errorDisclaimer": "但是,只有解决了此问题后,您才能使用 Kibana 中的某些应用,如 Discover、Visualize 和仪表板。",
- "common.ui.errorAllowExplicitIndex.errorTitle": "糟糕!",
- "common.ui.errorAllowExplicitIndex.howToFix.goBackText": "使用浏览器的后退按钮返回您之前正做的工作。",
- "common.ui.errorAllowExplicitIndex.howToFix.removeConfigItemText": "从 Elasticsearch 配置文件中删除 {allowExplicitIndexConfig}",
- "common.ui.errorAllowExplicitIndex.howToFix.restartText": "重新启动 Elasticsearch。",
- "common.ui.errorAllowExplicitIndex.howToFixErrorTitle": "那么,我如何解决此问题?",
"common.ui.errorAutoCreateIndex.breadcrumbs.errorText": "错误",
"common.ui.errorAutoCreateIndex.errorDescription": "似乎 Elasticsearch 集群的 {autoCreateIndexActionConfig} 设置使 Kibana 无法自动创建用于存储已保存对象的索引。Kibana 将使用此 Elasticsearch 功能,因为这是确保已保存对象索引使用正确映射/架构的最好方式,而且其允许 Kibana 较少地轮询 Elasticsearch。",
"common.ui.errorAutoCreateIndex.errorDisclaimer": "但是,只有解决了此问题后,您才能在 Kibana 保存内容。",
diff --git a/x-pack/test/functional/apps/code/file_tree.ts b/x-pack/test/functional/apps/code/file_tree.ts
index 669d9eabcf1e9..6fa56610b77f6 100644
--- a/x-pack/test/functional/apps/code/file_tree.ts
+++ b/x-pack/test/functional/apps/code/file_tree.ts
@@ -22,7 +22,8 @@ export default function exploreRepositoryFunctionalTests({
const exists = async (selector: string) =>
await testSubjects.exists(selector, { allowHidden: true });
- describe('File Tree', function() {
+ // FLAKY: https://github.com/elastic/kibana/issues/48048
+ describe.skip('File Tree', function() {
this.tags('smoke');
const repositoryListSelector = 'codeRepositoryList > codeRepositoryItem';
diff --git a/yarn.lock b/yarn.lock
index b73aa8b51852a..7da14519d6e5f 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -14686,10 +14686,10 @@ history@^4.7.2:
value-equal "^0.4.0"
warning "^3.0.0"
-hjson@3.1.2:
- version "3.1.2"
- resolved "https://registry.yarnpkg.com/hjson/-/hjson-3.1.2.tgz#1ae8a3a897a1fab8d45180f98e9abf9b56f95b55"
- integrity sha512-2ILrho8eRl2Bniy61mDFiXRAloYqH2T6OwWkoF/8y55DPFgG2RcqQGNXIfBLp432dnAbLOpBJ4pJs63W3X27EA==
+hjson@3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/hjson/-/hjson-3.2.0.tgz#76203ea69bc1c7c88422b48402cc34df8ff8de0e"
+ integrity sha512-XntQJHUg9QRKZYlADJKVoBMvuDbZve1X4Nd6ZIXCVPMwMvLhGjU9pGqW1XX8yPqBJSBAQwpcBDGTspURotVxbA==
hmac-drbg@^1.0.0:
version "1.0.1"
@@ -27294,10 +27294,10 @@ traverse@0.6.6, traverse@~0.6.6:
resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137"
integrity sha1-y99WD9e5r2MlAv7UD5GMFX6pcTc=
-tree-kill@^1.1.0, tree-kill@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.0.tgz#5846786237b4239014f05db156b643212d4c6f36"
- integrity sha512-DlX6dR0lOIRDFxI0mjL9IYg6OTncLm/Zt+JiBhE5OlFcAR8yc9S7FFXU9so0oda47frdM/JFsk7UjNt9vscKcg==
+tree-kill@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.1.tgz#5398f374e2f292b9dcc7b2e71e30a5c3bb6c743a"
+ integrity sha512-4hjqbObwlh2dLyW4tcz0Ymw0ggoaVDMveUB9w8kFSQScdRLo0gxO9J7WFcUBo+W3C1TLdFIEwNOWebgZZ0RH9Q==
treeify@^1.0.1, treeify@^1.1.0:
version "1.1.0"