diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md deleted file mode 100644 index 01f8d4c951465..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.body.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [body](./kibana-plugin-public.httperrorresponse.body.md) - -## HttpErrorResponse.body property - -Signature: - -```typescript -body?: HttpBody; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md index 1955bb57c50bf..aa669df796a09 100644 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.md +++ b/docs/development/core/public/kibana-plugin-public.httperrorresponse.md @@ -8,15 +8,12 @@ Signature: ```typescript -export interface HttpErrorResponse +export interface HttpErrorResponse extends HttpResponse ``` ## Properties | Property | Type | Description | | --- | --- | --- | -| [body](./kibana-plugin-public.httperrorresponse.body.md) | HttpBody | | | [error](./kibana-plugin-public.httperrorresponse.error.md) | Error | IHttpFetchError | | -| [request](./kibana-plugin-public.httperrorresponse.request.md) | Request | | -| [response](./kibana-plugin-public.httperrorresponse.response.md) | Response | | diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md deleted file mode 100644 index fcb33fc12fbf4..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.request.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [request](./kibana-plugin-public.httperrorresponse.request.md) - -## HttpErrorResponse.request property - -Signature: - -```typescript -request?: Request; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md b/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md deleted file mode 100644 index e6c7f9675a1d7..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httperrorresponse.response.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpErrorResponse](./kibana-plugin-public.httperrorresponse.md) > [response](./kibana-plugin-public.httperrorresponse.response.md) - -## HttpErrorResponse.response property - -Signature: - -```typescript -response?: Response; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md index 6f4205f3362fe..ca43ea31f0e2e 100644 --- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md +++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.response.md @@ -9,7 +9,7 @@ Define an interceptor to be executed after a response is received. Signature: ```typescript -response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; +response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; ``` ## Parameters @@ -21,5 +21,5 @@ response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Pro Returns: -`Promise | HttpResponse | void` +`Promise | InterceptedHttpResponse | void` diff --git a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md index 1e7cd5e61186e..b8abd50e45461 100644 --- a/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md +++ b/docs/development/core/public/kibana-plugin-public.httpinterceptor.responseerror.md @@ -9,7 +9,7 @@ Define an interceptor to be executed if a response interceptor throws an error o Signature: ```typescript -responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; +responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; ``` ## Parameters @@ -21,5 +21,5 @@ responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptC Returns: -`Promise | HttpResponse | void` +`Promise | InterceptedHttpResponse | void` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md b/docs/development/core/public/kibana-plugin-public.httpresponse.body.md deleted file mode 100644 index c590c9ec49d1b..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.body.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [body](./kibana-plugin-public.httpresponse.body.md) - -## HttpResponse.body property - -Signature: - -```typescript -body?: HttpBody; -``` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.md b/docs/development/core/public/kibana-plugin-public.httpresponse.md index b2ec48fd4d6b5..e44515cc8a1e0 100644 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.md +++ b/docs/development/core/public/kibana-plugin-public.httpresponse.md @@ -8,14 +8,12 @@ Signature: ```typescript -export interface HttpResponse +export interface HttpResponse extends InterceptedHttpResponse ``` ## Properties | Property | Type | Description | | --- | --- | --- | -| [body](./kibana-plugin-public.httpresponse.body.md) | HttpBody | | -| [request](./kibana-plugin-public.httpresponse.request.md) | Request | | -| [response](./kibana-plugin-public.httpresponse.response.md) | Response | | +| [request](./kibana-plugin-public.httpresponse.request.md) | Readonly<Request> | | diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md index 4cb1ded29152e..84ab1bc7af853 100644 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.request.md +++ b/docs/development/core/public/kibana-plugin-public.httpresponse.request.md @@ -7,5 +7,5 @@ Signature: ```typescript -request?: Request; +request: Readonly; ``` diff --git a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md b/docs/development/core/public/kibana-plugin-public.httpresponse.response.md deleted file mode 100644 index 44c8eb4295f1c..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpresponse.response.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpResponse](./kibana-plugin-public.httpresponse.md) > [response](./kibana-plugin-public.httpresponse.response.md) - -## HttpResponse.response property - -Signature: - -```typescript -response?: Response; -``` diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md new file mode 100644 index 0000000000000..fc6d34c0b74f2 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.body.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [body](./kibana-plugin-public.interceptedhttpresponse.body.md) + +## InterceptedHttpResponse.body property + +Signature: + +```typescript +body?: HttpBody; +``` diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md new file mode 100644 index 0000000000000..c4a7f4d6b2afa --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.md @@ -0,0 +1,20 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) + +## InterceptedHttpResponse interface + + +Signature: + +```typescript +export interface InterceptedHttpResponse +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [body](./kibana-plugin-public.interceptedhttpresponse.body.md) | HttpBody | | +| [response](./kibana-plugin-public.interceptedhttpresponse.response.md) | Response | | + diff --git a/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md new file mode 100644 index 0000000000000..dceb55113ee78 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.interceptedhttpresponse.response.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) > [response](./kibana-plugin-public.interceptedhttpresponse.response.md) + +## InterceptedHttpResponse.response property + +Signature: + +```typescript +response?: Response; +``` diff --git a/docs/development/core/public/kibana-plugin-public.md b/docs/development/core/public/kibana-plugin-public.md index eeea889e262b3..e787621c3aaf9 100644 --- a/docs/development/core/public/kibana-plugin-public.md +++ b/docs/development/core/public/kibana-plugin-public.md @@ -61,6 +61,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [IContextContainer](./kibana-plugin-public.icontextcontainer.md) | An object that handles registration of context providers and configuring handlers with context. | | [IHttpFetchError](./kibana-plugin-public.ihttpfetcherror.md) | | | [IHttpInterceptController](./kibana-plugin-public.ihttpinterceptcontroller.md) | Used to halt a request Promise chain in a [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md). | +| [InterceptedHttpResponse](./kibana-plugin-public.interceptedhttpresponse.md) | | | [LegacyCoreSetup](./kibana-plugin-public.legacycoresetup.md) | Setup interface exposed to the legacy platform via the ui/new_platform module. | | [LegacyCoreStart](./kibana-plugin-public.legacycorestart.md) | Start interface exposed to the legacy platform via the ui/new_platform module. | | [LegacyNavLink](./kibana-plugin-public.legacynavlink.md) | | diff --git a/package.json b/package.json index 674092c95161a..9bbe9a48a6c53 100644 --- a/package.json +++ b/package.json @@ -171,7 +171,7 @@ "hapi": "^17.5.3", "hapi-auth-cookie": "^9.0.0", "history": "^4.9.0", - "hjson": "3.1.2", + "hjson": "3.2.0", "hoek": "^5.0.4", "http-proxy-agent": "^2.1.0", "https-proxy-agent": "^2.2.2", @@ -443,7 +443,7 @@ "strip-ansi": "^3.0.1", "supertest": "^3.1.0", "supertest-as-promised": "^4.0.2", - "tree-kill": "^1.1.0", + "tree-kill": "^1.2.1", "typescript": "3.5.3", "typings-tester": "^0.3.2", "vinyl-fs": "^3.0.3", diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json index 2e7e4c5500a3c..5c64be294f707 100644 --- a/packages/kbn-dev-utils/package.json +++ b/packages/kbn-dev-utils/package.json @@ -17,7 +17,7 @@ "getopts": "^2.2.5", "moment": "^2.20.1", "rxjs": "^6.2.1", - "tree-kill": "^1.2.0", + "tree-kill": "^1.2.1", "tslib": "^1.9.3" }, "devDependencies": { diff --git a/packages/kbn-dev-utils/src/proc_runner/proc.ts b/packages/kbn-dev-utils/src/proc_runner/proc.ts index 3b7d595e4b8cf..f29fb5f4b17f6 100644 --- a/packages/kbn-dev-utils/src/proc_runner/proc.ts +++ b/packages/kbn-dev-utils/src/proc_runner/proc.ts @@ -26,7 +26,7 @@ import chalk from 'chalk'; import treeKill from 'tree-kill'; import { promisify } from 'util'; -const treeKillAsync = promisify(treeKill); +const treeKillAsync = promisify((...args: [number, string, any]) => treeKill(...args)); import { ToolingLog } from '../tooling_log'; import { observeLines } from './observe_lines'; diff --git a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js index 59e5f4d6faf8a..676992e4dddc8 100644 --- a/packages/kbn-es-query/src/es_query/__tests__/from_filters.js +++ b/packages/kbn-es-query/src/es_query/__tests__/from_filters.js @@ -55,6 +55,32 @@ describe('build query', function () { expect(result.filter).to.eql(expectedESQueries); }); + it('should remove disabled filters', function () { + const filters = [ + { + match_all: {}, + meta: { type: 'match_all', negate: true, disabled: true }, + }, + ]; + + const expectedESQueries = []; + + const result = buildQueryFromFilters(filters); + + expect(result.must_not).to.eql(expectedESQueries); + }); + + it('should remove falsy filters', function () { + const filters = [null, undefined]; + + const expectedESQueries = []; + + const result = buildQueryFromFilters(filters); + + expect(result.must_not).to.eql(expectedESQueries); + expect(result.must).to.eql(expectedESQueries); + }); + it('should place negated filters in the must_not clause', function () { const filters = [ { diff --git a/packages/kbn-es-query/src/es_query/from_filters.js b/packages/kbn-es-query/src/es_query/from_filters.js index b8193b7469a20..10f9cf82fc972 100644 --- a/packages/kbn-es-query/src/es_query/from_filters.js +++ b/packages/kbn-es-query/src/es_query/from_filters.js @@ -60,6 +60,7 @@ const cleanFilter = function (filter) { }; export function buildQueryFromFilters(filters = [], indexPattern, ignoreFilterIfFieldNotInIndex) { + filters = filters.filter(filter => filter && !_.get(filter, ['meta', 'disabled'])); return { must: [], filter: filters diff --git a/packages/kbn-es/package.json b/packages/kbn-es/package.json index 5521d57c22e86..5280c671450fa 100644 --- a/packages/kbn-es/package.json +++ b/packages/kbn-es/package.json @@ -17,7 +17,7 @@ "node-fetch": "^2.6.0", "simple-git": "^1.91.0", "tar-fs": "^1.16.3", - "tree-kill": "^1.1.0", + "tree-kill": "^1.2.1", "yauzl": "^2.10.0" } } diff --git a/src/core/public/chrome/ui/header/header.tsx b/src/core/public/chrome/ui/header/header.tsx index f24b0ed1681aa..4e73f49527856 100644 --- a/src/core/public/chrome/ui/header/header.tsx +++ b/src/core/public/chrome/ui/header/header.tsx @@ -406,12 +406,26 @@ class HeaderUI extends Component { data-test-subj="navDrawer" isLocked={isLocked} onIsLockedUpdate={onIsLockedUpdate} + aria-label={i18n.translate('core.ui.primaryNav.screenReaderLabel', { + defaultMessage: 'Primary', + })} > - + + ); diff --git a/src/core/public/http/http_service.test.ts b/src/core/public/http/http_service.test.ts index dddd2cc5ec36f..13906b91ed8df 100644 --- a/src/core/public/http/http_service.test.ts +++ b/src/core/public/http/http_service.test.ts @@ -24,6 +24,7 @@ import fetchMock from 'fetch-mock/es5/client'; import { readFileSync } from 'fs'; import { join } from 'path'; import { setup, SetupTap } from '../../../test_utils/public/http_test_setup'; +import { HttpResponse } from './types'; function delay(duration: number) { return new Promise(r => setTimeout(r, duration)); @@ -394,12 +395,12 @@ describe('interception', () => { const unusedSpy = jest.fn(); - http.intercept({ response: unusedSpy }); http.intercept({ responseError(response, controller) { controller.halt(); }, }); + http.intercept({ response: unusedSpy, responseError: unusedSpy }); http.post('/my/path').then(unusedSpy, unusedSpy); await delay(1000); @@ -416,21 +417,21 @@ describe('interception', () => { request: unusedSpy, requestError: usedSpy, response: unusedSpy, - responseError: usedSpy, + responseError: unusedSpy, }); http.intercept({ request() { throw new Error('Interception Error'); }, response: unusedSpy, - responseError: usedSpy, + responseError: unusedSpy, }); - http.intercept({ request: usedSpy, response: unusedSpy, responseError: usedSpy }); + http.intercept({ request: usedSpy, response: unusedSpy, responseError: unusedSpy }); await expect(http.fetch('/my/path')).rejects.toThrow(/Interception Error/); expect(fetchMock.called()).toBe(false); expect(unusedSpy).toHaveBeenCalledTimes(0); - expect(usedSpy).toHaveBeenCalledTimes(5); + expect(usedSpy).toHaveBeenCalledTimes(2); }); it('should succeed if request throws but caught by interceptor', async () => { @@ -458,26 +459,76 @@ describe('interception', () => { expect(usedSpy).toHaveBeenCalledTimes(4); }); - describe('request availability during interception', () => { - it('should not be available to responseError when request throws', async () => { - expect.assertions(3); + it('should accumulate request information', async () => { + const routes = ['alpha', 'beta', 'gamma']; + const createRequest = jest.fn( + (request: Request) => new Request(`/api/${routes.shift()}`, request) + ); - let spiedRequest: Request | undefined; + http.intercept({ + request: createRequest, + }); + http.intercept({ + requestError(httpErrorRequest) { + return httpErrorRequest.request; + }, + }); + http.intercept({ + request(request) { + throw new Error('Invalid'); + }, + }); + http.intercept({ + request: createRequest, + }); + http.intercept({ + request: createRequest, + }); - http.intercept({ - request() { - throw new Error('Internal Server Error'); - }, - responseError({ request }) { - spiedRequest = request; - }, - }); + await expect(http.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); + expect(fetchMock.called()).toBe(true); + expect(routes.length).toBe(0); + expect(createRequest.mock.calls[0][0].url).toContain('/my/route'); + expect(createRequest.mock.calls[1][0].url).toContain('/api/alpha'); + expect(createRequest.mock.calls[2][0].url).toContain('/api/beta'); + expect(fetchMock.lastCall()!.request.url).toContain('/api/gamma'); + }); - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(fetchMock.called()).toBe(false); - expect(spiedRequest).toBeUndefined(); + it('should accumulate response information', async () => { + const bodies = ['alpha', 'beta', 'gamma']; + const createResponse = jest.fn((httpResponse: HttpResponse) => ({ + body: bodies.shift(), + })); + + http.intercept({ + response: createResponse, + }); + http.intercept({ + response: createResponse, }); + http.intercept({ + response(httpResponse) { + throw new Error('Invalid'); + }, + }); + http.intercept({ + responseError({ error, ...httpResponse }) { + return httpResponse; + }, + }); + http.intercept({ + response: createResponse, + }); + + await expect(http.fetch('/my/route')).resolves.toEqual('gamma'); + expect(fetchMock.called()).toBe(true); + expect(bodies.length).toBe(0); + expect(createResponse.mock.calls[0][0].body).toEqual({ foo: 'bar' }); + expect(createResponse.mock.calls[1][0].body).toBe('alpha'); + expect(createResponse.mock.calls[2][0].body).toBe('beta'); + }); + describe('request availability during interception', () => { it('should be available to responseError when response throws', async () => { let spiedRequest: Request | undefined; @@ -514,22 +565,6 @@ describe('interception', () => { await expect(http.fetch('/my/path')).rejects.toThrow(); expect(spiedResponse).toBeDefined(); }); - - it('should not be available to responseError when request throws', async () => { - let spiedResponse: Response | undefined; - - http.intercept({ - request() { - throw new Error('Internal Server Error'); - }, - responseError({ response }) { - spiedResponse = response; - }, - }); - - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(spiedResponse).toBeUndefined(); - }); }); it('should actually halt request interceptors in reverse order', async () => { diff --git a/src/core/public/http/http_setup.ts b/src/core/public/http/http_setup.ts index 5ca3b23c5a69c..a10358926de1f 100644 --- a/src/core/public/http/http_setup.ts +++ b/src/core/public/http/http_setup.ts @@ -110,15 +110,14 @@ export const setup = ( (promise, interceptor) => promise.then( async (current: Request) => { + next = current; checkHalt(controller); if (!interceptor.request) { return current; } - next = (await interceptor.request(current, controller)) || current; - - return next; + return (await interceptor.request(current, controller)) || current; }, async error => { checkHalt(controller, error); @@ -155,17 +154,21 @@ export const setup = ( (promise, interceptor) => promise.then( async httpResponse => { + current = httpResponse; checkHalt(controller); if (!interceptor.response) { return httpResponse; } - current = (await interceptor.response(httpResponse, controller)) || httpResponse; - - return current; + return { + ...httpResponse, + ...((await interceptor.response(httpResponse, controller)) || {}), + }; }, async error => { + const request = error.request || (current && current.request); + checkHalt(controller, error); if (!interceptor.responseError) { @@ -176,7 +179,7 @@ export const setup = ( const next = await interceptor.responseError( { error, - request: error.request || (current && current.request), + request, response: error.response || (current && current.response), body: error.body || (current && current.body), }, @@ -189,17 +192,14 @@ export const setup = ( throw error; } - return next; + return { ...next, request }; } catch (err) { checkHalt(controller, err); throw err; } } ), - responsePromise.then(httpResponse => { - current = httpResponse; - return httpResponse; - }) + responsePromise ); return finalHttpResponse.body; @@ -249,18 +249,23 @@ export const setup = ( // We wrap the interception in a separate promise to ensure that when // a halt is called we do not resolve or reject, halting handling of the promise. return new Promise(async (resolve, reject) => { - try { - const value = await interceptResponse( - interceptRequest(initialRequest, controller).then(fetcher), - controller - ); - - resolve(value); - } catch (err) { + function rejectIfNotHalted(err: any) { if (!(err instanceof HttpInterceptHaltError)) { reject(err); } } + + try { + const request = await interceptRequest(initialRequest, controller); + + try { + resolve(await interceptResponse(fetcher(request), controller)); + } catch (err) { + rejectIfNotHalted(err); + } + } catch (err) { + rejectIfNotHalted(err); + } }); } diff --git a/src/core/public/http/types.ts b/src/core/public/http/types.ts index 793d03c6fde80..96500d566b3e5 100644 --- a/src/core/public/http/types.ts +++ b/src/core/public/http/types.ts @@ -226,12 +226,16 @@ export type HttpHandler = (path: string, options?: HttpFetchOptions) => Promise< export type HttpBody = BodyInit | null | any; /** @public */ -export interface HttpResponse { - request?: Request; +export interface InterceptedHttpResponse { response?: Response; body?: HttpBody; } +/** @public */ +export interface HttpResponse extends InterceptedHttpResponse { + request: Readonly; +} + /** @public */ export interface IHttpFetchError extends Error { readonly request: Request; @@ -248,11 +252,8 @@ export interface IHttpFetchError extends Error { } /** @public */ -export interface HttpErrorResponse { +export interface HttpErrorResponse extends HttpResponse { error: Error | IHttpFetchError; - request?: Request; - response?: Response; - body?: HttpBody; } /** @public */ export interface HttpErrorRequest { @@ -295,7 +296,7 @@ export interface HttpInterceptor { response?( httpResponse: HttpResponse, controller: IHttpInterceptController - ): Promise | HttpResponse | void; + ): Promise | InterceptedHttpResponse | void; /** * Define an interceptor to be executed if a response interceptor throws an error or returns a rejected Promise. @@ -305,7 +306,7 @@ export interface HttpInterceptor { responseError?( httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController - ): Promise | HttpResponse | void; + ): Promise | InterceptedHttpResponse | void; } /** diff --git a/src/core/public/index.ts b/src/core/public/index.ts index 054012fb82761..f1085b86395b8 100644 --- a/src/core/public/index.ts +++ b/src/core/public/index.ts @@ -113,6 +113,7 @@ export { IBasePath, IHttpInterceptController, IHttpFetchError, + InterceptedHttpResponse, } from './http'; export { diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 2eae59b709867..ec8a22fe5953c 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -426,15 +426,9 @@ export interface HttpErrorRequest { } // @public (undocumented) -export interface HttpErrorResponse { - // (undocumented) - body?: HttpBody; +export interface HttpErrorResponse extends HttpResponse { // (undocumented) error: Error | IHttpFetchError; - // (undocumented) - request?: Request; - // (undocumented) - response?: Response; } // @public @@ -463,8 +457,8 @@ export interface HttpHeadersInit { export interface HttpInterceptor { request?(request: Request, controller: IHttpInterceptController): Promise | Request | void; requestError?(httpErrorRequest: HttpErrorRequest, controller: IHttpInterceptController): Promise | Request | void; - response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; - responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | HttpResponse | void; + response?(httpResponse: HttpResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; + responseError?(httpErrorResponse: HttpErrorResponse, controller: IHttpInterceptController): Promise | InterceptedHttpResponse | void; } // @public @@ -486,13 +480,9 @@ export interface HttpRequestInit { } // @public (undocumented) -export interface HttpResponse { - // (undocumented) - body?: HttpBody; - // (undocumented) - request?: Request; +export interface HttpResponse extends InterceptedHttpResponse { // (undocumented) - response?: Response; + request: Readonly; } // @public (undocumented) @@ -563,6 +553,14 @@ export interface IHttpInterceptController { halted: boolean; } +// @public (undocumented) +export interface InterceptedHttpResponse { + // (undocumented) + body?: HttpBody; + // (undocumented) + response?: Response; +} + // @public export type IToasts = Pick; diff --git a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js index 7d20d07ba05f6..cd4eac04df0e2 100644 --- a/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js +++ b/src/legacy/core_plugins/input_control_vis/public/control/list_control_factory.js @@ -123,12 +123,12 @@ class ListControl extends Control { this.useTimeFilter, ancestorFilters ); - this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued()); + const abortSignal = this.abortController.signal; this.lastQuery = query; let resp; try { - resp = await searchSource.fetch(); + resp = await searchSource.fetch({ abortSignal }); } catch(error) { // If the fetch was aborted then no need to surface this error in the UI if (error.name === 'AbortError') return; diff --git a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js index cb1c3111addf5..7febe228d614c 100644 --- a/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js +++ b/src/legacy/core_plugins/input_control_vis/public/control/range_control_factory.js @@ -66,11 +66,11 @@ class RangeControl extends Control { const aggs = minMaxAgg(indexPattern.fields.getByName(fieldName)); const searchSource = createSearchSource(this.kbnApi, null, indexPattern, aggs, this.useTimeFilter); - this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued()); + const abortSignal = this.abortController.signal; let resp; try { - resp = await searchSource.fetch(); + resp = await searchSource.fetch({ abortSignal }); } catch(error) { // If the fetch was aborted then no need to surface this error in the UI if (error.name === 'AbortError') return; diff --git a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts index 071861548a055..6fcfde0a5b06b 100644 --- a/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts +++ b/src/legacy/core_plugins/interpreter/public/functions/esaggs.ts @@ -29,7 +29,6 @@ import chrome from 'ui/chrome'; import { TimeRange } from 'src/plugins/data/public'; import { SearchSource } from '../../../../ui/public/courier/search_source'; // @ts-ignore -import { SearchSourceProvider } from '../../../../ui/public/courier/search_source'; import { FilterBarQueryFilterProvider } from '../../../../ui/public/filter_manager/query_filter'; import { buildTabularInspectorData } from '../../../../ui/public/inspector/build_tabular_inspector_data'; @@ -100,8 +99,8 @@ const handleCourierRequest = async ({ return aggs.toDsl(metricsAtAllLevels); }); - requestSearchSource.onRequestStart((paramSearchSource: SearchSource, searchRequest: unknown) => { - return aggs.onSearchRequestStart(paramSearchSource, searchRequest); + requestSearchSource.onRequestStart((paramSearchSource: SearchSource, options: any) => { + return aggs.onSearchRequestStart(paramSearchSource, options); }); if (timeRange) { @@ -118,7 +117,7 @@ const handleCourierRequest = async ({ const queryHash = calculateObjectHash(reqBody); // We only need to reexecute the query, if forceFetch was true or the hash of the request body has changed // since the last request - const shouldQuery = forceFetch || searchSource.lastQuery !== queryHash; + const shouldQuery = forceFetch || (searchSource as any).lastQuery !== queryHash; if (shouldQuery) { inspectorAdapters.requests.reset(); @@ -139,18 +138,13 @@ const handleCourierRequest = async ({ request.stats(getRequestInspectorStats(requestSearchSource)); try { - // Abort any in-progress requests before fetching again - if (abortSignal) { - abortSignal.addEventListener('abort', () => requestSearchSource.cancelQueued()); - } - - const response = await requestSearchSource.fetch(); + const response = await requestSearchSource.fetch({ abortSignal }); - searchSource.lastQuery = queryHash; + (searchSource as any).lastQuery = queryHash; request.stats(getResponseInspectorStats(searchSource, response)).ok({ json: response }); - searchSource.rawResponse = response; + (searchSource as any).rawResponse = response; } catch (e) { // Log any error during request to the inspector request.error({ json: e }); @@ -166,7 +160,7 @@ const handleCourierRequest = async ({ // Note that rawResponse is not deeply cloned here, so downstream applications using courier // must take care not to mutate it, or it could have unintended side effects, e.g. displaying // response data incorrectly in the inspector. - let resp = searchSource.rawResponse; + let resp = (searchSource as any).rawResponse; for (const agg of aggs.aggs) { if (has(agg, 'type.postFlightRequest')) { resp = await agg.type.postFlightRequest( @@ -180,7 +174,7 @@ const handleCourierRequest = async ({ } } - searchSource.finalResponse = resp; + (searchSource as any).finalResponse = resp; const parsedTimeRange = timeRange ? getTime(aggs.indexPattern, timeRange) : null; const tabifyParams = { @@ -191,23 +185,24 @@ const handleCourierRequest = async ({ const tabifyCacheHash = calculateObjectHash({ tabifyAggs: aggs, ...tabifyParams }); // We only need to reexecute tabify, if either we did a new request or some input params to tabify changed - const shouldCalculateNewTabify = shouldQuery || searchSource.lastTabifyHash !== tabifyCacheHash; + const shouldCalculateNewTabify = + shouldQuery || (searchSource as any).lastTabifyHash !== tabifyCacheHash; if (shouldCalculateNewTabify) { - searchSource.lastTabifyHash = tabifyCacheHash; - searchSource.tabifiedResponse = tabifyAggResponse( + (searchSource as any).lastTabifyHash = tabifyCacheHash; + (searchSource as any).tabifiedResponse = tabifyAggResponse( aggs, - searchSource.finalResponse, + (searchSource as any).finalResponse, tabifyParams ); } inspectorAdapters.data.setTabularLoader( - () => buildTabularInspectorData(searchSource.tabifiedResponse, queryFilter), + () => buildTabularInspectorData((searchSource as any).tabifiedResponse, queryFilter), { returnsFormattedValues: true } ); - return searchSource.tabifiedResponse; + return (searchSource as any).tabifiedResponse; }; export const esaggs = (): ExpressionFunction => ({ @@ -249,7 +244,6 @@ export const esaggs = (): ExpressionFunction ({ const { visData, visConfig, params } = config; const visType = config.visType || visConfig.type; const $injector = await chrome.dangerouslyGetActiveInjector(); + const $rootScope = $injector.get('$rootScope') as any; const Private = $injector.get('Private') as any; const Vis = Private(VisProvider); if (handlers.vis) { // special case in visualize, we need to render first (without executing the expression), for maps to work if (visConfig) { - handlers.vis.setCurrentState({ type: visType, params: visConfig }); + $rootScope.$apply(() => { + handlers.vis.setCurrentState({ type: visType, params: visConfig }); + }); } } else { handlers.vis = new Vis({ diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js index b93cc8e936fd3..ecb22b20e4d86 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js +++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/_stubs.js @@ -19,6 +19,7 @@ import sinon from 'sinon'; import moment from 'moment'; +import { SearchSource } from 'ui/courier'; export function createIndexPatternsStub() { return { @@ -31,7 +32,10 @@ export function createIndexPatternsStub() { }; } -export function createSearchSourceStubProvider(hits, timeField) { +/** + * A stubbed search source with a `fetch` method that returns all of `_stubHits`. + */ +export function createSearchSourceStub(hits, timeField) { const searchSourceStub = { _stubHits: hits, _stubTimeField: timeField, @@ -41,13 +45,37 @@ export function createSearchSourceStubProvider(hits, timeField) { }), }; - searchSourceStub.setParent = sinon.stub().returns(searchSourceStub); - searchSourceStub.setField = sinon.stub().returns(searchSourceStub); - searchSourceStub.getField = sinon.spy(key => { + searchSourceStub.setParent = sinon.stub(SearchSource.prototype, 'setParent').returns(searchSourceStub); + searchSourceStub.setField = sinon.stub(SearchSource.prototype, 'setField').returns(searchSourceStub); + searchSourceStub.getField = sinon.stub(SearchSource.prototype, 'getField').callsFake(key => { const previousSetCall = searchSourceStub.setField.withArgs(key).lastCall; return previousSetCall ? previousSetCall.args[1] : null; }); - searchSourceStub.fetch = sinon.spy(() => { + searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => Promise.resolve({ + hits: { + hits: searchSourceStub._stubHits, + total: searchSourceStub._stubHits.length, + }, + })); + + searchSourceStub._restore = () => { + searchSourceStub.setParent.restore(); + searchSourceStub.setField.restore(); + searchSourceStub.getField.restore(); + searchSourceStub.fetch.restore(); + }; + + return searchSourceStub; +} + +/** + * A stubbed search source with a `fetch` method that returns a filtered set of `_stubHits`. + */ +export function createContextSearchSourceStub(hits, timeField = '@timestamp') { + const searchSourceStub = createSearchSourceStub(hits, timeField); + + searchSourceStub.fetch.restore(); + searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => { const timeField = searchSourceStub._stubTimeField; const lastQuery = searchSourceStub.setField.withArgs('query').lastCall.args[1]; const timeRange = lastQuery.query.constant_score.filter.range[timeField]; @@ -71,7 +99,5 @@ export function createSearchSourceStubProvider(hits, timeField) { }); }); - return function SearchSourceStubProvider() { - return searchSourceStub; - }; + return searchSourceStub; } diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js index 582de1c8fa74c..46e66177b516a 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js +++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/anchor.js @@ -19,55 +19,34 @@ import expect from '@kbn/expect'; import ngMock from 'ng_mock'; -import sinon from 'sinon'; -import { createIndexPatternsStub } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createSearchSourceStub } from './_stubs'; import { fetchAnchorProvider } from '../anchor'; -function createSearchSourceStubProvider(hits) { - const searchSourceStub = { - _stubHits: hits, - }; - - searchSourceStub.setParent = sinon.stub().returns(searchSourceStub); - searchSourceStub.setField = sinon.stub().returns(searchSourceStub); - searchSourceStub.fetch = sinon.spy(() => Promise.resolve({ - hits: { - hits: searchSourceStub._stubHits, - total: searchSourceStub._stubHits.length, - }, - })); - - return function SearchSourceStubProvider() { - return searchSourceStub; - }; -} - describe('context app', function () { beforeEach(ngMock.module('kibana')); describe('function fetchAnchor', function () { let fetchAnchor; - let SearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - SearchSourceStub = createSearchSourceStubProvider([ + searchSourceStub = createSearchSourceStub([ { _id: 'hit1' }, ]); - Private.stub(SearchSourceProvider, SearchSourceStub); - fetchAnchor = Private(fetchAnchorProvider); })); - it('should use the `fetch` method of the SearchSource', function () { - const searchSourceStub = new SearchSourceStub(); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should use the `fetch` method of the SearchSource', function () { return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { expect(searchSourceStub.fetch.calledOnce).to.be(true); @@ -75,8 +54,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setParentSpy = searchSourceStub.setParent; @@ -86,8 +63,6 @@ describe('context app', function () { }); it('should set the SearchSource index pattern', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setFieldSpy = searchSourceStub.setField; @@ -96,8 +71,6 @@ describe('context app', function () { }); it('should set the SearchSource version flag to true', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setVersionSpy = searchSourceStub.setField.withArgs('version'); @@ -107,8 +80,6 @@ describe('context app', function () { }); it('should set the SearchSource size to 1', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setSizeSpy = searchSourceStub.setField.withArgs('size'); @@ -118,8 +89,6 @@ describe('context app', function () { }); it('should set the SearchSource query to an ids query', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setQuerySpy = searchSourceStub.setField.withArgs('query'); @@ -140,8 +109,6 @@ describe('context app', function () { }); it('should set the SearchSource sort order', function () { - const searchSourceStub = new SearchSourceStub(); - return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) .then(() => { const setSortSpy = searchSourceStub.setField.withArgs('sort'); @@ -154,7 +121,6 @@ describe('context app', function () { }); it('should reject with an error when no hits were found', function () { - const searchSourceStub = new SearchSourceStub(); searchSourceStub._stubHits = []; return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }]) @@ -169,7 +135,6 @@ describe('context app', function () { }); it('should return the first hit after adding an anchor marker', function () { - const searchSourceStub = new SearchSourceStub(); searchSourceStub._stubHits = [ { property1: 'value1' }, { property2: 'value2' }, diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js index 88efc8efc5d30..2bf3da42e24e5 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js +++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/predecessors.js @@ -22,8 +22,7 @@ import ngMock from 'ng_mock'; import moment from 'moment'; import * as _ from 'lodash'; -import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs'; import { fetchContextProvider } from '../context'; @@ -38,16 +37,14 @@ describe('context app', function () { describe('function fetchPredecessors', function () { let fetchPredecessors; - let getSearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp', MS_PER_DAY * 8); - Private.stub(SearchSourceProvider, getSearchSourceStub); - + searchSourceStub = createContextSearchSourceStub([], '@timestamp', MS_PER_DAY * 8); fetchPredecessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => { const anchor = { _source: { @@ -69,8 +66,11 @@ describe('context app', function () { }; })); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should perform exactly one query when enough hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 2), searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 1), @@ -97,7 +97,6 @@ describe('context app', function () { }); it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3010), searchSourceStub._createStubHit(MS_PER_DAY * 3002), @@ -134,7 +133,6 @@ describe('context app', function () { }); it('should perform multiple queries until the expected hit count is returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 1700), searchSourceStub._createStubHit(MS_PER_DAY * 1200), @@ -185,8 +183,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchPredecessors( 'INDEX_PATTERN_ID', '@timestamp', @@ -206,8 +202,6 @@ describe('context app', function () { }); it('should set the tiebreaker sort order to the opposite as the time field', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchPredecessors( 'INDEX_PATTERN_ID', '@timestamp', diff --git a/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js b/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js index 57f7673d31183..b8bec40f2859c 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js +++ b/src/legacy/core_plugins/kibana/public/context/api/__tests__/successors.js @@ -22,8 +22,7 @@ import ngMock from 'ng_mock'; import moment from 'moment'; import * as _ from 'lodash'; -import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs'; -import { SearchSourceProvider } from 'ui/courier'; +import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs'; import { fetchContextProvider } from '../context'; @@ -37,15 +36,14 @@ describe('context app', function () { describe('function fetchSuccessors', function () { let fetchSuccessors; - let getSearchSourceStub; + let searchSourceStub; beforeEach(ngMock.module(function createServiceStubs($provide) { $provide.value('indexPatterns', createIndexPatternsStub()); })); beforeEach(ngMock.inject(function createPrivateStubs(Private) { - getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp'); - Private.stub(SearchSourceProvider, getSearchSourceStub); + searchSourceStub = createContextSearchSourceStub([], '@timestamp'); fetchSuccessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => { const anchor = { @@ -68,8 +66,11 @@ describe('context app', function () { }; })); + afterEach(() => { + searchSourceStub._restore(); + }); + it('should perform exactly one query when enough hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 5000), searchSourceStub._createStubHit(MS_PER_DAY * 4000), @@ -96,7 +97,6 @@ describe('context app', function () { }); it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3010), searchSourceStub._createStubHit(MS_PER_DAY * 3002), @@ -133,7 +133,6 @@ describe('context app', function () { }); it('should perform multiple queries until the expected hit count is returned', function () { - const searchSourceStub = getSearchSourceStub(); searchSourceStub._stubHits = [ searchSourceStub._createStubHit(MS_PER_DAY * 3000), searchSourceStub._createStubHit(MS_PER_DAY * 3000 - 1), @@ -187,8 +186,6 @@ describe('context app', function () { }); it('should configure the SearchSource to not inherit from the implicit root', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchSuccessors( 'INDEX_PATTERN_ID', '@timestamp', @@ -208,8 +205,6 @@ describe('context app', function () { }); it('should set the tiebreaker sort order to the same as the time field', function () { - const searchSourceStub = getSearchSourceStub(); - return fetchSuccessors( 'INDEX_PATTERN_ID', '@timestamp', diff --git a/src/legacy/core_plugins/kibana/public/context/api/anchor.js b/src/legacy/core_plugins/kibana/public/context/api/anchor.js index bab75e14e8ed3..02a309eaa0165 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/anchor.js +++ b/src/legacy/core_plugins/kibana/public/context/api/anchor.js @@ -21,11 +21,9 @@ import _ from 'lodash'; import { i18n } from '@kbn/i18n'; -import { SearchSourceProvider } from 'ui/courier'; - -export function fetchAnchorProvider(indexPatterns, Private) { - const SearchSource = Private(SearchSourceProvider); +import { SearchSource } from 'ui/courier'; +export function fetchAnchorProvider(indexPatterns) { return async function fetchAnchor( indexPatternId, anchorId, diff --git a/src/legacy/core_plugins/kibana/public/context/api/context.ts b/src/legacy/core_plugins/kibana/public/context/api/context.ts index baecf8a673521..48ac59f1f0855 100644 --- a/src/legacy/core_plugins/kibana/public/context/api/context.ts +++ b/src/legacy/core_plugins/kibana/public/context/api/context.ts @@ -18,8 +18,7 @@ */ // @ts-ignore -import { SearchSourceProvider, SearchSource } from 'ui/courier'; -import { IPrivate } from 'ui/private'; +import { SearchSource } from 'ui/courier'; import { Filter } from '@kbn/es-query'; import { IndexPatterns, IndexPattern } from 'ui/index_patterns'; import { reverseSortDir, SortDirection } from './utils/sorting'; @@ -42,9 +41,7 @@ const DAY_MILLIS = 24 * 60 * 60 * 1000; // look from 1 day up to 10000 days into the past and future const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000].map(days => days * DAY_MILLIS); -function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) { - const SearchSourcePrivate: any = Private(SearchSourceProvider); - +function fetchContextProvider(indexPatterns: IndexPatterns) { return { fetchSurroundingDocs, }; @@ -116,7 +113,7 @@ function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) { } async function createSearchSource(indexPattern: IndexPattern, filters: Filter[]) { - return new SearchSourcePrivate() + return new SearchSource() .setParent(false) .setField('index', indexPattern) .setField('filter', filters); diff --git a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts index f9f5cfe0214b2..01468eadffb84 100644 --- a/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts +++ b/src/legacy/core_plugins/kibana/public/dashboard/__tests__/get_saved_dashboard_mock.ts @@ -17,6 +17,7 @@ * under the License. */ +import { searchSourceMock } from '../../../../../ui/public/courier/search_source/mocks'; import { SavedObjectDashboard } from '../saved_dashboard/saved_dashboard'; export function getSavedDashboardMock( @@ -26,10 +27,7 @@ export function getSavedDashboardMock( id: '123', title: 'my dashboard', panelsJSON: '[]', - searchSource: { - getOwnField: (param: any) => param, - setField: () => {}, - }, + searchSource: searchSourceMock, copyOnSave: false, timeRestore: false, timeTo: 'now', diff --git a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html index e9424534cd9d2..6c076092c76d5 100644 --- a/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html +++ b/src/legacy/core_plugins/kibana/public/dev_tools/partials/dev_tools_app.html @@ -1,9 +1,9 @@ -
- +
+ -
-
+
+ diff --git a/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js b/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js index bd07b53e44801..735189dbd4c86 100644 --- a/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js +++ b/src/legacy/core_plugins/kibana/public/discover/controllers/discover.js @@ -229,7 +229,10 @@ function discoverController( // the saved savedSearch const savedSearch = $route.current.locals.savedSearch; + + let abortController; $scope.$on('$destroy', () => { + if (abortController) abortController.abort(); savedSearch.destroy(); subscriptions.unsubscribe(); }); @@ -752,7 +755,8 @@ function discoverController( $scope.updateTime(); // Abort any in-progress requests before fetching again - $scope.searchSource.cancelQueued(); + if (abortController) abortController.abort(); + abortController = new AbortController(); $scope.updateDataSource() .then(setupVisualization) @@ -760,7 +764,9 @@ function discoverController( $state.save(); $scope.fetchStatus = fetchStatuses.LOADING; logInspectorRequest(); - return $scope.searchSource.fetch(); + return $scope.searchSource.fetch({ + abortSignal: abortController.signal + }); }) .then(onResults) .catch((error) => { @@ -1039,8 +1045,8 @@ function discoverController( ); visSavedObject.vis = $scope.vis; - $scope.searchSource.onRequestStart((searchSource, searchRequest) => { - return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, searchRequest); + $scope.searchSource.onRequestStart((searchSource, options) => { + return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, options); }); $scope.searchSource.setField('aggs', function () { diff --git a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts index d5bf868f3bf72..eaec11ff893ed 100644 --- a/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts +++ b/src/legacy/core_plugins/kibana/public/discover/embeddable/search_embeddable.ts @@ -102,12 +102,13 @@ export class SearchEmbeddable extends Embeddable private inspectorAdaptors: Adapters; private searchScope?: SearchScope; private panelTitle: string = ''; - private filtersSearchSource: SearchSource; + private filtersSearchSource?: SearchSource; private searchInstance?: JQLite; private autoRefreshFetchSubscription?: Subscription; private subscription?: Subscription; public readonly type = SEARCH_EMBEDDABLE_TYPE; private filterGen: FilterManager; + private abortController?: AbortController; private prevTimeRange?: TimeRange; private prevFilters?: Filter[]; @@ -193,7 +194,7 @@ export class SearchEmbeddable extends Embeddable if (this.autoRefreshFetchSubscription) { this.autoRefreshFetchSubscription.unsubscribe(); } - this.savedSearch.searchSource.cancelQueued(); + if (this.abortController) this.abortController.abort(); } private initializeSearchScope() { @@ -273,7 +274,8 @@ export class SearchEmbeddable extends Embeddable const { searchSource } = this.savedSearch; // Abort any in-progress requests - searchSource.cancelQueued(); + if (this.abortController) this.abortController.abort(); + this.abortController = new AbortController(); searchSource.setField('size', config.get('discover:sampleSize')); searchSource.setField( @@ -299,7 +301,9 @@ export class SearchEmbeddable extends Embeddable try { // Make the request - const resp = await searchSource.fetch(); + const resp = await searchSource.fetch({ + abortSignal: this.abortController.signal, + }); this.searchScope.isLoading = false; @@ -337,8 +341,8 @@ export class SearchEmbeddable extends Embeddable searchScope.sharedItemTitle = this.panelTitle; if (isFetchRequired) { - this.filtersSearchSource.setField('filter', this.input.filters); - this.filtersSearchSource.setField('query', this.input.query); + this.filtersSearchSource!.setField('filter', this.input.filters); + this.filtersSearchSource!.setField('query', this.input.query); this.fetch(); diff --git a/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts b/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts index 202fca6ee7b52..fa6145c45f55f 100644 --- a/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts +++ b/src/legacy/core_plugins/kibana/public/doc_viewer/doc_viewer_directive.ts @@ -22,15 +22,26 @@ import { uiModules } from 'ui/modules'; import { DocViewer } from './doc_viewer'; uiModules.get('apps/discover').directive('docViewer', (reactDirective: any) => { - return reactDirective(DocViewer, undefined, { - restrict: 'E', - scope: { - hit: '=', - indexPattern: '=', - filter: '=?', - columns: '=?', - onAddColumn: '=?', - onRemoveColumn: '=?', - }, - }); + return reactDirective( + DocViewer, + [ + 'hit', + ['indexPattern', { watchDepth: 'reference' }], + ['filter', { watchDepth: 'reference' }], + ['columns', { watchDepth: 'collection' }], + ['onAddColumn', { watchDepth: 'reference' }], + ['onRemoveColumn', { watchDepth: 'reference' }], + ], + { + restrict: 'E', + scope: { + hit: '=', + indexPattern: '=', + filter: '=?', + columns: '=?', + onAddColumn: '=?', + onRemoveColumn: '=?', + }, + } + ); }); diff --git a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js index f362347118dd8..e08f3a064da52 100644 --- a/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js +++ b/src/legacy/core_plugins/kibana/public/management/sections/index_patterns/edit_index_pattern/edit_index_pattern.js @@ -27,7 +27,7 @@ import { fatalError, toastNotifications } from 'ui/notify'; import uiRoutes from 'ui/routes'; import { uiModules } from 'ui/modules'; import template from './edit_index_pattern.html'; -import { FieldWildcardProvider } from 'ui/field_wildcard'; +import { fieldWildcardMatcher } from 'ui/field_wildcard'; import { IndexPatternListFactory } from 'ui/management/index_pattern_list'; import React from 'react'; import { render, unmountComponentAtNode } from 'react-dom'; @@ -173,10 +173,9 @@ uiModules.get('apps/management') .controller('managementIndexPatternsEdit', function ( $scope, $location, $route, Promise, config, indexPatterns, Private, AppState, confirmModal) { const $state = $scope.state = new AppState(); - const { fieldWildcardMatcher } = Private(FieldWildcardProvider); const indexPatternListProvider = Private(IndexPatternListFactory)(); - $scope.fieldWildcardMatcher = fieldWildcardMatcher; + $scope.fieldWildcardMatcher = (...args) => fieldWildcardMatcher(...args, config.get('metaFields')); $scope.editSectionsProvider = Private(IndicesEditSectionsProvider); $scope.kbnUrl = Private(KbnUrlProvider); $scope.indexPattern = $route.current.locals.indexPattern; diff --git a/src/legacy/ui/public/_index.scss b/src/legacy/ui/public/_index.scss index 2ce9a0a8aa06f..98675402b43cc 100644 --- a/src/legacy/ui/public/_index.scss +++ b/src/legacy/ui/public/_index.scss @@ -13,7 +13,6 @@ @import './courier/index'; @import './collapsible_sidebar/index'; @import './directives/index'; -@import './error_allow_explicit_index/index'; @import './error_auto_create_index/index'; @import './error_url_overflow/index'; @import './exit_full_screen/index'; diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js index 3c8fde7eb7135..e4ca6075c624b 100644 --- a/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js +++ b/src/legacy/ui/public/agg_types/__tests__/metrics/parent_pipeline.js @@ -203,7 +203,6 @@ describe('parent pipeline aggs', function () { }); const searchSource = {}; - const request = {}; const customMetricSpy = sinon.spy(); const customMetric = aggConfig.params.customMetric; @@ -211,9 +210,9 @@ describe('parent pipeline aggs', function () { customMetric.type.params[0].modifyAggConfigOnSearchRequestStart = customMetricSpy; aggConfig.type.params.forEach(param => { - param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request); + param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource); }); - expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true); + expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true); }); }); }); diff --git a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js index fef69155d2351..aba5db9cedadf 100644 --- a/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js +++ b/src/legacy/ui/public/agg_types/__tests__/metrics/sibling_pipeline.js @@ -145,7 +145,6 @@ describe('sibling pipeline aggs', function () { init(); const searchSource = {}; - const request = {}; const customMetricSpy = sinon.spy(); const customBucketSpy = sinon.spy(); const { customMetric, customBucket } = aggConfig.params; @@ -155,10 +154,10 @@ describe('sibling pipeline aggs', function () { customBucket.type.params[0].modifyAggConfigOnSearchRequestStart = customBucketSpy; aggConfig.type.params.forEach(param => { - param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request); + param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource); }); - expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true); - expect(customBucketSpy.calledWith(customBucket, searchSource, request)).to.be(true); + expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true); + expect(customBucketSpy.calledWith(customBucket, searchSource)).to.be(true); }); }); diff --git a/src/legacy/ui/public/agg_types/agg_config.ts b/src/legacy/ui/public/agg_types/agg_config.ts index 9898682b5d558..a5b1aa7cf9c0b 100644 --- a/src/legacy/ui/public/agg_types/agg_config.ts +++ b/src/legacy/ui/public/agg_types/agg_config.ts @@ -238,14 +238,14 @@ export class AggConfig { * @param {Courier.SearchRequest} searchRequest * @return {Promise} */ - onSearchRequestStart(searchSource: any, searchRequest: any) { + onSearchRequestStart(searchSource: any, options: any) { if (!this.type) { return Promise.resolve(); } return Promise.all( this.type.params.map((param: any) => - param.modifyAggConfigOnSearchRequestStart(this, searchSource, searchRequest) + param.modifyAggConfigOnSearchRequestStart(this, searchSource, options) ) ); } diff --git a/src/legacy/ui/public/agg_types/agg_configs.ts b/src/legacy/ui/public/agg_types/agg_configs.ts index e90d91eb7fd7f..675d37d05c33c 100644 --- a/src/legacy/ui/public/agg_types/agg_configs.ts +++ b/src/legacy/ui/public/agg_types/agg_configs.ts @@ -307,12 +307,10 @@ export class AggConfigs { return _.find(reqAgg.getResponseAggs(), { id }); } - onSearchRequestStart(searchSource: any, searchRequest: any) { + onSearchRequestStart(searchSource: any, options: any) { return Promise.all( // @ts-ignore - this.getRequestAggs().map((agg: AggConfig) => - agg.onSearchRequestStart(searchSource, searchRequest) - ) + this.getRequestAggs().map((agg: AggConfig) => agg.onSearchRequestStart(searchSource, options)) ); } } diff --git a/src/legacy/ui/public/agg_types/buckets/histogram.ts b/src/legacy/ui/public/agg_types/buckets/histogram.ts index 516f17be0643e..23edefc67d506 100644 --- a/src/legacy/ui/public/agg_types/buckets/histogram.ts +++ b/src/legacy/ui/public/agg_types/buckets/histogram.ts @@ -92,7 +92,7 @@ export const histogramBucketAgg = new BucketAggType({ modifyAggConfigOnSearchRequestStart( aggConfig: IBucketHistogramAggConfig, searchSource: any, - searchRequest: any + options: any ) { const field = aggConfig.getField(); const aggBody = field.scripted @@ -111,10 +111,8 @@ export const histogramBucketAgg = new BucketAggType({ }, }); - searchRequest.whenAborted(() => childSearchSource.cancelQueued()); - return childSearchSource - .fetch() + .fetch(options) .then((resp: any) => { aggConfig.setAutoBounds({ min: _.get(resp, 'aggregations.minAgg.value'), diff --git a/src/legacy/ui/public/agg_types/buckets/terms.ts b/src/legacy/ui/public/agg_types/buckets/terms.ts index ad470c8f64b84..bc6dd4860561e 100644 --- a/src/legacy/ui/public/agg_types/buckets/terms.ts +++ b/src/legacy/ui/public/agg_types/buckets/terms.ts @@ -111,9 +111,6 @@ export const termsBucketAgg = new BucketAggType({ if (aggConfig.params.otherBucket) { const filterAgg = buildOtherBucketAgg(aggConfigs, aggConfig, resp); if (!filterAgg) return resp; - if (abortSignal) { - abortSignal.addEventListener('abort', () => nestedSearchSource.cancelQueued()); - } nestedSearchSource.setField('aggs', filterAgg); @@ -134,7 +131,7 @@ export const termsBucketAgg = new BucketAggType({ }); request.stats(getRequestInspectorStats(nestedSearchSource)); - const response = await nestedSearchSource.fetch(); + const response = await nestedSearchSource.fetch({ abortSignal }); request.stats(getResponseInspectorStats(nestedSearchSource, response)).ok({ json: response }); resp = mergeOtherBucketAggResponse(aggConfigs, resp, response, aggConfig, filterAgg()); } diff --git a/src/legacy/ui/public/agg_types/param_types/base.ts b/src/legacy/ui/public/agg_types/param_types/base.ts index 88fc24eeb53f5..bc8ed5d485bd4 100644 --- a/src/legacy/ui/public/agg_types/param_types/base.ts +++ b/src/legacy/ui/public/agg_types/param_types/base.ts @@ -46,18 +46,17 @@ export class BaseParamType implements AggParam { /** * A function that will be called before an aggConfig is serialized and sent to ES. - * Allows aggConfig to retrieve values needed for serialization by creating a {SearchRequest} + * Allows aggConfig to retrieve values needed for serialization * Example usage: an aggregation needs to know the min/max of a field to determine an appropriate interval * - * @param {AggConfig} aggconfig + * @param {AggConfig} aggConfig * @param {Courier.SearchSource} searchSource - * @param {Courier.SearchRequest} searchRequest * @returns {Promise|undefined} */ modifyAggConfigOnSearchRequestStart: ( - aggconfig: AggConfig, + aggConfig: AggConfig, searchSource?: SearchSource, - searchRequest?: any + options?: any ) => void; constructor(config: Record) { diff --git a/src/legacy/ui/public/chrome/directives/kbn_chrome.html b/src/legacy/ui/public/chrome/directives/kbn_chrome.html index ced89287d310f..541082e68de58 100644 --- a/src/legacy/ui/public/chrome/directives/kbn_chrome.html +++ b/src/legacy/ui/public/chrome/directives/kbn_chrome.html @@ -1,9 +1,9 @@
-
+ >
diff --git a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js b/src/legacy/ui/public/courier/fetch/__tests__/call_client.js deleted file mode 100644 index 1a473446df872..0000000000000 --- a/src/legacy/ui/public/courier/fetch/__tests__/call_client.js +++ /dev/null @@ -1,349 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; -import NoDigestPromises from 'test_utils/no_digest_promises'; -import { delay } from 'bluebird'; - -import { CallClientProvider } from '../call_client'; -import { RequestStatus } from '../req_status'; -import { SearchRequestProvider } from '../request'; -import { addSearchStrategy } from '../../search_strategy'; - -describe('callClient', () => { - NoDigestPromises.activateForSuite(); - - const ABORTED = RequestStatus.ABORTED; - - let SearchRequest; - let callClient; - let fakeSearch; - let searchRequests; - let esRequestDelay; - let esShouldError; - let esPromiseAbortSpy; - - const createSearchRequest = (id, overrides = {}, errorHandler = () => {}) => { - const { source: overrideSource, ...rest } = overrides; - - const source = { - _flatten: () => Promise.resolve({ - index: id - }), - requestIsStopped: () => {}, - getField: () => 'indexPattern', - getPreferredSearchStrategyId: () => undefined, - ...overrideSource - }; - - const searchRequest = new SearchRequest({ source, errorHandler, ...rest }); - searchRequest.__testId__ = id; - return searchRequest; - }; - - beforeEach(ngMock.module('kibana')); - - beforeEach(ngMock.module(function stubEs($provide) { - esRequestDelay = 0; - esShouldError = false; - - $provide.service('es', (Promise) => { - fakeSearch = sinon.spy(({ index }) => { - const esPromise = new Promise((resolve, reject) => { - if (esShouldError) { - return reject('fake es error'); - } - - setTimeout(() => { - resolve(index); - }, esRequestDelay); - }); - - esPromise.abort = esPromiseAbortSpy = sinon.spy(); - return esPromise; - }); - - return { - search: fakeSearch - }; - }); - })); - - beforeEach(ngMock.inject(Private => { - callClient = Private(CallClientProvider); - SearchRequest = Private(SearchRequestProvider); - })); - - describe('basic contract', () => { - it('returns a promise', () => { - searchRequests = [ createSearchRequest() ]; - const callingClient = callClient(searchRequests); - expect(callingClient.then).to.be.a('function'); - }); - - it(`resolves the promise with the 'responses' property of the es.search() result`, () => { - searchRequests = [ createSearchRequest(1) ]; - - return callClient(searchRequests).then(results => { - expect(results).to.eql([1]); - }); - }); - - describe('for failing requests', () => { - beforeEach(() => { - addSearchStrategy({ - id: 'fail', - isViable: indexPattern => { - return indexPattern.type === 'fail'; - }, - search: () => { - return { - searching: Promise.reject(new Error('Search failed')), - failedSearchRequests: [], - abort: () => {}, - }; - }, - }); - }); - - it(`still bubbles up the failure`, () => { - const searchRequestFail1 = createSearchRequest('fail1', { - source: { - getField: () => ({ type: 'fail' }), - }, - }); - - const searchRequestFail2 = createSearchRequest('fail2', { - source: { - getField: () => ({ type: 'fail' }), - }, - }); - - searchRequests = [ searchRequestFail1, searchRequestFail2 ]; - - return callClient(searchRequests).then(results => { - expect(results).to.eql([ - { error: new Error('Search failed') }, - { error: new Error('Search failed') }, - ]); - }); - }); - }); - }); - - describe('implementation', () => { - it('calls searchRequest.whenAborted() as part of setup', async () => { - const whenAbortedSpy = sinon.spy(); - const searchRequest = createSearchRequest(); - searchRequest.whenAborted = whenAbortedSpy; - searchRequests = [ searchRequest ]; - - return callClient(searchRequests).then(() => { - expect(whenAbortedSpy.callCount).to.be(1); - }); - }); - }); - - describe('aborting at different points in the request lifecycle:', () => { - it('while the search body is being formed rejects with an AbortError', () => { - const searchRequest = createSearchRequest(1, { - source: { - _flatten: () => { - return new Promise(resolve => { - setTimeout(() => { - resolve({}); - }, 100); - }); - }, - requestIsStopped: () => {}, - }, - }); - - searchRequests = [ searchRequest ]; - const callingClient = callClient(searchRequests); - - // Abort the request while the search body is being formed. - setTimeout(() => { - searchRequest.abort(); - }, 20); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - - it('while the search is in flight rejects with an AbortError', () => { - esRequestDelay = 100; - - const searchRequest = createSearchRequest(); - searchRequests = [ searchRequest ]; - const callingClient = callClient(searchRequests); - - // Abort the request while the search is in flight.. - setTimeout(() => { - searchRequest.abort(); - }, 80); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - }); - - describe('aborting number of requests:', () => { - it(`aborting all searchRequests rejects with an AbortError`, () => { - const searchRequest1 = createSearchRequest(); - const searchRequest2 = createSearchRequest(); - searchRequests = [ searchRequest1, searchRequest2 ]; - const callingClient = callClient(searchRequests); - - searchRequest1.abort(); - searchRequest2.abort(); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - - it(`aborting all searchRequests calls abort() on the promise returned by searchStrategy.search()`, () => { - esRequestDelay = 100; - - const searchRequest1 = createSearchRequest(); - const searchRequest2 = createSearchRequest(); - searchRequests = [ searchRequest1, searchRequest2 ]; - - const callingClient = callClient(searchRequests); - - return Promise.all([ - delay(70).then(() => { - // At this point we expect the request to be in flight. - expect(esPromiseAbortSpy.callCount).to.be(0); - searchRequest1.abort(); - searchRequest2.abort(); - }), - callingClient.catch(() => { - expect(esPromiseAbortSpy.callCount).to.be(1); - }), - ]); - }); - - it('aborting some searchRequests rejects with an AbortError', () => { - const searchRequest1 = createSearchRequest(1); - const searchRequest2 = createSearchRequest(2); - searchRequests = [ searchRequest1, searchRequest2 ]; - const callingClient = callClient(searchRequests); - searchRequest2.abort(); - - return callingClient.catch(error => { - expect(error.name).to.be('AbortError'); - }); - }); - }); - - describe('searchRequests with multiple searchStrategies map correctly to their responses', () => { - const search = ({ searchRequests }) => { - return { - searching: Promise.resolve(searchRequests.map(searchRequest => searchRequest.__testId__)), - failedSearchRequests: [], - abort: () => {}, - }; - }; - - const searchStrategyA = { - id: 'a', - isViable: indexPattern => { - return indexPattern.type === 'a'; - }, - search, - }; - - const searchStrategyB = { - id: 'b', - isViable: indexPattern => { - return indexPattern.type === 'b'; - }, - search, - }; - - let searchRequestA; - let searchRequestB; - let searchRequestA2; - - beforeEach(() => { - addSearchStrategy(searchStrategyA); - addSearchStrategy(searchStrategyB); - - searchRequestA = createSearchRequest('a', { - source: { - getField: () => ({ type: 'a' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - - searchRequestB = createSearchRequest('b', { - source: { - getField: () => ({ type: 'b' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - - searchRequestA2 = createSearchRequest('a2', { - source: { - getField: () => ({ type: 'a' }), - getSearchStrategyForSearchRequest: () => {}, - getPreferredSearchStrategyId: () => {}, - }, - }); - }); - - it('if the searchRequests are reordered by the searchStrategies', () => { - // Add requests in an order which will be reordered by the strategies. - searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', 'a2']); - }); - }); - - it('if one is aborted after being provided', () => { - // Add requests in an order which will be reordered by the strategies. - searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - searchRequestA2.abort(); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', ABORTED]); - }); - }); - - it(`if one is already aborted when it's provided`, () => { - searchRequests = [ searchRequestA, searchRequestB, ABORTED, searchRequestA2 ]; - const callingClient = callClient(searchRequests); - - return callingClient.then(results => { - expect(results).to.eql(['a', 'b', ABORTED, 'a2']); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js b/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js deleted file mode 100644 index 19032ce1f4ca3..0000000000000 --- a/src/legacy/ui/public/courier/fetch/__tests__/fetch_now.js +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import sinon from 'sinon'; -import expect from '@kbn/expect'; -import ngMock from 'ng_mock'; - -import { CallClientProvider } from '../call_client'; -import { CallResponseHandlersProvider } from '../call_response_handlers'; -import { ContinueIncompleteProvider } from '../continue_incomplete'; -import { FetchNowProvider } from '../fetch_now'; - -function mockRequest() { - return { - strategy: 'mock', - started: true, - aborted: false, - handleFailure: sinon.spy(), - retry: sinon.spy(function () { return this; }), - continue: sinon.spy(function () { return this; }), - start: sinon.spy(function () { return this; }) - }; -} - -describe('FetchNowProvider', () => { - - let Promise; - let $rootScope; - let fetchNow; - let request; - let requests; - let fakeResponses; - - beforeEach(ngMock.module('kibana', (PrivateProvider) => { - function FakeResponsesProvider(Promise) { - fakeResponses = sinon.spy(function () { - return Promise.map(requests, mockRequest => { - return { mockRequest }; - }); - }); - return fakeResponses; - } - - PrivateProvider.swap(CallClientProvider, FakeResponsesProvider); - PrivateProvider.swap(CallResponseHandlersProvider, FakeResponsesProvider); - PrivateProvider.swap(ContinueIncompleteProvider, FakeResponsesProvider); - })); - - beforeEach(ngMock.inject((Private, $injector) => { - $rootScope = $injector.get('$rootScope'); - Promise = $injector.get('Promise'); - fetchNow = Private(FetchNowProvider); - request = mockRequest(); - requests = [ request ]; - })); - - describe('when request has not started', () => { - beforeEach(() => requests.forEach(req => req.started = false)); - - it('starts request', () => { - fetchNow(requests); - expect(request.start.called).to.be(true); - expect(request.continue.called).to.be(false); - }); - - it('waits for returned promise from start() to be fulfilled', () => { - request.start = sinon.stub().returns(Promise.resolve(request)); - fetchNow(requests); - - expect(request.start.callCount).to.be(1); - expect(fakeResponses.callCount).to.be(0); - $rootScope.$apply(); - expect(fakeResponses.callCount).to.be(3); - }); - - it('invokes request failure handler if starting fails', () => { - request.start = sinon.stub().returns(Promise.reject('some error')); - fetchNow(requests); - $rootScope.$apply(); - sinon.assert.calledWith(request.handleFailure, 'some error'); - }); - }); - - describe('when request has already started', () => { - it('continues request', () => { - fetchNow(requests); - expect(request.start.called).to.be(false); - expect(request.continue.called).to.be(true); - }); - it('waits for returned promise to be fulfilled', () => { - request.continue = sinon.stub().returns(Promise.resolve(request)); - fetchNow(requests); - - expect(request.continue.callCount).to.be(1); - expect(fakeResponses.callCount).to.be(0); - $rootScope.$apply(); - expect(fakeResponses.callCount).to.be(3); - }); - it('invokes request failure handler if continuing fails', () => { - request.continue = sinon.stub().returns(Promise.reject('some error')); - fetchNow(requests); - $rootScope.$apply(); - sinon.assert.calledWith(request.handleFailure, 'some error'); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/call_client.js b/src/legacy/ui/public/courier/fetch/call_client.js index 7ba73e741c074..971ae4c49a604 100644 --- a/src/legacy/ui/public/courier/fetch/call_client.js +++ b/src/legacy/ui/public/courier/fetch/call_client.js @@ -17,187 +17,37 @@ * under the License. */ -import { ErrorAllowExplicitIndexProvider } from '../../error_allow_explicit_index'; -import { assignSearchRequestsToSearchStrategies } from '../search_strategy'; -import { IsRequestProvider } from './is_request'; -import { RequestStatus } from './req_status'; -import { SerializeFetchParamsProvider } from './request/serialize_fetch_params'; -import { i18n } from '@kbn/i18n'; -import { createDefer } from 'ui/promises'; - -export function CallClientProvider(Private, Promise, es, config, sessionId, esShardTimeout) { - const errorAllowExplicitIndex = Private(ErrorAllowExplicitIndexProvider); - const isRequest = Private(IsRequestProvider); - const serializeFetchParams = Private(SerializeFetchParamsProvider); - - const ABORTED = RequestStatus.ABORTED; - - function callClient(searchRequests) { - // get the actual list of requests that we will be fetching - const requestsToFetch = searchRequests.filter(isRequest); - let requestsToFetchCount = requestsToFetch.length; - - if (requestsToFetchCount === 0) { - return Promise.resolve([]); - } - - // This is how we'll provide the consumer with search responses. Resolved by - // respondToSearchRequests. - const defer = createDefer(Promise); - - const abortableSearches = []; - let areAllSearchRequestsAborted = false; - - // When we traverse our search requests and send out searches, some of them may fail. We'll - // store those that don't fail here. - const activeSearchRequests = []; - - // Respond to each searchRequest with the response or ABORTED. - const respondToSearchRequests = (responsesInOriginalRequestOrder = []) => { - // We map over searchRequests because if we were originally provided an ABORTED - // request then we'll return that value. - return Promise.map(searchRequests, function (searchRequest, searchRequestIndex) { - if (searchRequest.aborted) { - return ABORTED; - } - - const status = searchRequests[searchRequestIndex]; - - if (status === ABORTED) { - return ABORTED; - } - - const activeSearchRequestIndex = activeSearchRequests.indexOf(searchRequest); - const isFailedSearchRequest = activeSearchRequestIndex === -1; - - if (isFailedSearchRequest) { - return ABORTED; - } - - return responsesInOriginalRequestOrder[searchRequestIndex]; - }) - .then( - (res) => defer.resolve(res), - (err) => defer.reject(err) - ); - }; - - // handle a request being aborted while being fetched - const requestWasAborted = Promise.method(function (searchRequest, index) { - if (searchRequests[index] === ABORTED) { - defer.reject(new Error( - i18n.translate('common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage', { - defaultMessage: 'Request was aborted twice?', - }) - )); - } - - requestsToFetchCount--; - - if (requestsToFetchCount !== 0) { - // We can't resolve early unless all searchRequests have been aborted. - return; - } - - abortableSearches.forEach(({ abort }) => { - abort(); - }); - - areAllSearchRequestsAborted = true; - - return respondToSearchRequests(); - }); - - // attach abort handlers, close over request index - searchRequests.forEach(function (searchRequest, index) { - if (!isRequest(searchRequest)) { - return; - } - - searchRequest.whenAborted(function () { - requestWasAborted(searchRequest, index).catch(defer.reject); - }); - }); - - const searchStrategiesWithRequests = assignSearchRequestsToSearchStrategies(requestsToFetch); - - // We're going to create a new async context here, so that the logic within it can execute - // asynchronously after we've returned a reference to defer.promise. - Promise.resolve().then(async () => { - // Execute each request using its search strategy. - for (let i = 0; i < searchStrategiesWithRequests.length; i++) { - const searchStrategyWithSearchRequests = searchStrategiesWithRequests[i]; - const { searchStrategy, searchRequests } = searchStrategyWithSearchRequests; - const { - searching, - abort, - failedSearchRequests, - } = await searchStrategy.search({ searchRequests, es, Promise, serializeFetchParams, config, sessionId, esShardTimeout }); - - // Collect searchRequests which have successfully been sent. - searchRequests.forEach(searchRequest => { - if (failedSearchRequests.includes(searchRequest)) { - return; - } - - activeSearchRequests.push(searchRequest); - }); - - abortableSearches.push({ - searching, - abort, - requestsCount: searchRequests.length, - }); - } - - try { - // The request was aborted while we were doing the above logic. - if (areAllSearchRequestsAborted) { - return; - } - - const segregatedResponses = await Promise.all(abortableSearches.map(async ({ searching, requestsCount }) => { - return searching.catch((e) => { - // Duplicate errors so that they correspond to the original requests. - return new Array(requestsCount).fill({ error: e }); - }); - })); - - // Assigning searchRequests to strategies means that the responses come back in a different - // order than the original searchRequests. So we'll put them back in order so that we can - // use the order to associate each response with the original request. - const responsesInOriginalRequestOrder = new Array(searchRequests.length); - segregatedResponses.forEach((responses, strategyIndex) => { - responses.forEach((response, responseIndex) => { - const searchRequest = searchStrategiesWithRequests[strategyIndex].searchRequests[responseIndex]; - const requestIndex = searchRequests.indexOf(searchRequest); - responsesInOriginalRequestOrder[requestIndex] = response; - }); - }); - - await respondToSearchRequests(responsesInOriginalRequestOrder); - } catch(error) { - if (errorAllowExplicitIndex.test(error)) { - return errorAllowExplicitIndex.takeover(); - } - - defer.reject(error); - } +import { groupBy } from 'lodash'; +import { getSearchStrategyForSearchRequest, getSearchStrategyById } from '../search_strategy'; +import { handleResponse } from './handle_response'; + +export function callClient(searchRequests, requestsOptions = [], { es, config, esShardTimeout } = {}) { + // Correlate the options with the request that they're associated with + const requestOptionEntries = searchRequests.map((request, i) => [request, requestsOptions[i]]); + const requestOptionsMap = new Map(requestOptionEntries); + + // Group the requests by the strategy used to search that specific request + const searchStrategyMap = groupBy(searchRequests, (request, i) => { + const searchStrategy = getSearchStrategyForSearchRequest(request, requestsOptions[i]); + return searchStrategy.id; + }); + + // Execute each search strategy with the group of requests, but return the responses in the same + // order in which they were received. We use a map to correlate the original request with its + // response. + const requestResponseMap = new Map(); + Object.keys(searchStrategyMap).forEach(searchStrategyId => { + const searchStrategy = getSearchStrategyById(searchStrategyId); + const requests = searchStrategyMap[searchStrategyId]; + const { searching, abort } = searchStrategy.search({ searchRequests: requests, es, config, esShardTimeout }); + requests.forEach((request, i) => { + const response = searching.then(results => handleResponse(request, results[i])); + const { abortSignal } = requestOptionsMap.get(request) || {}; + if (abortSignal) abortSignal.addEventListener('abort', abort); + requestResponseMap.set(request, response); }); + }, []); + return searchRequests.map(request => requestResponseMap.get(request)); +} - // Return the promise which acts as our vehicle for providing search responses to the consumer. - // However, if there are any errors, notify the searchRequests of them *instead* of bubbling - // them up to the consumer. - return defer.promise.catch((err) => { - // By returning the return value of this catch() without rethrowing the error, we delegate - // error-handling to the searchRequest instead of the consumer. - searchRequests.forEach((searchRequest, index) => { - if (searchRequests[index] !== ABORTED) { - searchRequest.handleFailure(err); - } - }); - }); - } - return callClient; -} diff --git a/src/legacy/ui/public/courier/fetch/call_client.test.js b/src/legacy/ui/public/courier/fetch/call_client.test.js new file mode 100644 index 0000000000000..463d6c59e479e --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/call_client.test.js @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { callClient } from './call_client'; +import { handleResponse } from './handle_response'; + +const mockResponses = [{}, {}]; +const mockAbortFns = [jest.fn(), jest.fn()]; +const mockSearchFns = [ + jest.fn(({ searchRequests }) => ({ + searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[0])), + abort: mockAbortFns[0] + })), + jest.fn(({ searchRequests }) => ({ + searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[1])), + abort: mockAbortFns[1] + })) +]; +const mockSearchStrategies = mockSearchFns.map((search, i) => ({ search, id: i })); + +jest.mock('./handle_response', () => ({ + handleResponse: jest.fn((request, response) => response) +})); + +jest.mock('../search_strategy', () => ({ + getSearchStrategyForSearchRequest: request => mockSearchStrategies[request._searchStrategyId], + getSearchStrategyById: id => mockSearchStrategies[id] +})); + +describe('callClient', () => { + beforeEach(() => { + handleResponse.mockClear(); + mockAbortFns.forEach(fn => fn.mockClear()); + mockSearchFns.forEach(fn => fn.mockClear()); + }); + + test('Executes each search strategy with its group of matching requests', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }, { + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + + callClient(searchRequests); + + expect(mockSearchFns[0]).toBeCalled(); + expect(mockSearchFns[0].mock.calls[0][0].searchRequests).toEqual([searchRequests[0], searchRequests[2]]); + expect(mockSearchFns[1]).toBeCalled(); + expect(mockSearchFns[1].mock.calls[0][0].searchRequests).toEqual([searchRequests[1], searchRequests[3]]); + }); + + test('Passes the additional arguments it is given to the search strategy', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }]; + const args = { es: {}, config: {}, esShardTimeout: 0 }; + + callClient(searchRequests, [], args); + + expect(mockSearchFns[0]).toBeCalled(); + expect(mockSearchFns[0].mock.calls[0][0]).toEqual({ searchRequests, ...args }); + }); + + test('Returns the responses in the original order', async () => { + const searchRequests = [{ + _searchStrategyId: 1 + }, { + _searchStrategyId: 0 + }]; + + const responses = await Promise.all(callClient(searchRequests)); + + expect(responses).toEqual([mockResponses[1], mockResponses[0]]); + }); + + test('Calls handleResponse with each request and response', async () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + + const responses = callClient(searchRequests); + await Promise.all(responses); + + expect(handleResponse).toBeCalledTimes(2); + expect(handleResponse).toBeCalledWith(searchRequests[0], mockResponses[0]); + expect(handleResponse).toBeCalledWith(searchRequests[1], mockResponses[1]); + }); + + test('If passed an abortSignal, calls abort on the strategy if the signal is aborted', () => { + const searchRequests = [{ + _searchStrategyId: 0 + }, { + _searchStrategyId: 1 + }]; + const abortController = new AbortController(); + const requestOptions = [{ + abortSignal: abortController.signal + }]; + + callClient(searchRequests, requestOptions); + abortController.abort(); + + expect(mockAbortFns[0]).toBeCalled(); + expect(mockAbortFns[1]).not.toBeCalled(); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/call_response_handlers.js b/src/legacy/ui/public/courier/fetch/call_response_handlers.js deleted file mode 100644 index aaf82168e385f..0000000000000 --- a/src/legacy/ui/public/courier/fetch/call_response_handlers.js +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import React from 'react'; -import { i18n } from '@kbn/i18n'; -import { EuiSpacer } from '@elastic/eui'; -import { toastNotifications } from '../../notify'; -import { RequestFailure } from './errors'; -import { RequestStatus } from './req_status'; -import { SearchError } from '../search_strategy/search_error'; -import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button'; - -export function CallResponseHandlersProvider(Promise) { - const ABORTED = RequestStatus.ABORTED; - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function callResponseHandlers(searchRequests, responses) { - return Promise.map(searchRequests, function (searchRequest, index) { - if (searchRequest === ABORTED || searchRequest.aborted) { - return ABORTED; - } - - const response = responses[index]; - - if (response.timed_out) { - toastNotifications.addWarning({ - title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', { - defaultMessage: 'Data might be incomplete because your request timed out', - }), - }); - } - - if (response._shards && response._shards.failed) { - const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', { - defaultMessage: '{shardsFailed} of {shardsTotal} shards failed', - values: { - shardsFailed: response._shards.failed, - shardsTotal: response._shards.total, - }, - }); - const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', { - defaultMessage: 'The data you are seeing might be incomplete or wrong.', - }); - - const text = ( - <> - {description} - - - - ); - - toastNotifications.addWarning({ - title, - text, - }); - } - - function progress() { - if (searchRequest.isIncomplete()) { - return INCOMPLETE; - } - - searchRequest.complete(); - return response; - } - - if (response.error) { - if (searchRequest.filterError(response)) { - return progress(); - } else { - return searchRequest.handleFailure( - response.error instanceof SearchError - ? response.error - : new RequestFailure(null, response) - ); - } - } - - return Promise.try(() => searchRequest.handleResponse(response)).then(progress); - }); - } - - return callResponseHandlers; -} diff --git a/src/legacy/ui/public/courier/fetch/continue_incomplete.js b/src/legacy/ui/public/courier/fetch/continue_incomplete.js deleted file mode 100644 index b40ebdb886748..0000000000000 --- a/src/legacy/ui/public/courier/fetch/continue_incomplete.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { RequestStatus } from './req_status'; - -export function ContinueIncompleteProvider() { - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function continueIncompleteRequests(searchRequests, responses, fetchSearchResults) { - const incompleteSearchRequests = []; - - responses.forEach(function (response, index) { - if (response === INCOMPLETE) { - incompleteSearchRequests.push(searchRequests[index]); - } - }); - - if (!incompleteSearchRequests.length) { - return responses; - } - - return fetchSearchResults(incompleteSearchRequests) - .then(function (completedResponses) { - return responses.map(function (prevResponse) { - if (prevResponse !== INCOMPLETE) { - return prevResponse; - } - - return completedResponses.shift(); - }); - }); - } - - return continueIncompleteRequests; -} diff --git a/src/legacy/ui/public/courier/fetch/fetch_now.js b/src/legacy/ui/public/courier/fetch/fetch_now.js deleted file mode 100644 index de5704d4380f4..0000000000000 --- a/src/legacy/ui/public/courier/fetch/fetch_now.js +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { fatalError } from '../../notify'; -import { CallClientProvider } from './call_client'; -import { CallResponseHandlersProvider } from './call_response_handlers'; -import { ContinueIncompleteProvider } from './continue_incomplete'; -import { RequestStatus } from './req_status'; -import { i18n } from '@kbn/i18n'; - -/** - * Fetch now provider should be used if you want the results searched and returned immediately. - * This can be slightly inefficient if a large number of requests are queued up, we can batch these - * by using fetchSoon. This introduces a slight delay which allows other requests to queue up before - * sending out requests in a batch. - * - * @param Private - * @param Promise - * @return {fetchNow} - * @constructor - */ -export function FetchNowProvider(Private, Promise) { - // core tasks - const callClient = Private(CallClientProvider); - const callResponseHandlers = Private(CallResponseHandlersProvider); - const continueIncomplete = Private(ContinueIncompleteProvider); - - const ABORTED = RequestStatus.ABORTED; - const INCOMPLETE = RequestStatus.INCOMPLETE; - - function fetchNow(searchRequests) { - return fetchSearchResults(searchRequests.map(function (searchRequest) { - if (!searchRequest.started) { - return searchRequest; - } - - return searchRequest.retry(); - })) - .catch(error => { - // If any errors occur after the search requests have resolved, then we kill Kibana. - fatalError(error, 'Courier fetch'); - }); - } - - function fetchSearchResults(searchRequests) { - function replaceAbortedRequests() { - searchRequests = searchRequests.map(searchRequest => { - if (searchRequest.aborted) { - return ABORTED; - } - - return searchRequest; - }); - } - - replaceAbortedRequests(); - return startRequests(searchRequests) - .then(function () { - replaceAbortedRequests(); - return callClient(searchRequests) - .catch(() => { - // Silently swallow errors that result from search requests so the consumer can surface - // them as notifications instead of courier forcing fatal errors. - }); - }) - .then(function (responses) { - replaceAbortedRequests(); - return callResponseHandlers(searchRequests, responses); - }) - .then(function (responses) { - replaceAbortedRequests(); - return continueIncomplete(searchRequests, responses, fetchSearchResults); - }) - .then(function (responses) { - replaceAbortedRequests(); - return responses.map(function (resp) { - switch (resp) { - case ABORTED: - return null; - case INCOMPLETE: - throw new Error( - i18n.translate('common.ui.courier.fetch.failedToClearRequestErrorMessage', { - defaultMessage: 'Failed to clear incomplete or duplicate request from responses.', - }) - ); - default: - return resp; - } - }); - }); - } - - function startRequests(searchRequests) { - return Promise.map(searchRequests, function (searchRequest) { - if (searchRequest === ABORTED) { - return searchRequest; - } - - return new Promise(function (resolve) { - const action = searchRequest.started ? searchRequest.continue : searchRequest.start; - resolve(action.call(searchRequest)); - }) - .catch(err => searchRequest.handleFailure(err)); - }); - } - - return fetchNow; -} diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.js b/src/legacy/ui/public/courier/fetch/fetch_soon.js index 266d4a6d3c9e6..ef02beddcb59a 100644 --- a/src/legacy/ui/public/courier/fetch/fetch_soon.js +++ b/src/legacy/ui/public/courier/fetch/fetch_soon.js @@ -17,41 +17,54 @@ * under the License. */ -import _ from 'lodash'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchNowProvider } from './fetch_now'; +import { callClient } from './call_client'; /** - * This is usually the right fetch provider to use, rather than FetchNowProvider, as this class introduces - * a slight delay in the request process to allow multiple requests to queue up (e.g. when a dashboard - * is loading). + * This function introduces a slight delay in the request process to allow multiple requests to queue + * up (e.g. when a dashboard is loading). */ -export function FetchSoonProvider(Private, Promise, config) { - - const fetchNow = Private(FetchNowProvider); +export async function fetchSoon(request, options, { es, config, esShardTimeout }) { + const delay = config.get('courier:batchSearches') ? 50 : 0; + return delayedFetch(request, options, { es, config, esShardTimeout }, delay); +} - const fetch = () => fetchNow(searchRequestQueue.getPending()); - const debouncedFetch = _.debounce(fetch, { - wait: 10, - maxWait: 50 +/** + * Delays executing a function for a given amount of time, and returns a promise that resolves + * with the result. + * @param fn The function to invoke + * @param ms The number of milliseconds to wait + * @return Promise A promise that resolves with the result of executing the function + */ +function delay(fn, ms) { + return new Promise(resolve => { + setTimeout(() => resolve(fn()), ms); }); +} - /** - * Fetch a list of requests - * @param {array} requests - the requests to fetch - * @async - */ - this.fetchSearchRequests = (requests) => { - requests.forEach(req => req._setFetchRequested()); - config.get('courier:batchSearches') ? debouncedFetch() : fetch(); - return Promise.all(requests.map(req => req.getCompletePromise())); - }; +// The current batch/queue of requests to fetch +let requestsToFetch = []; +let requestOptions = []; - /** - * Return a promise that resembles the success of the fetch completing so we can execute - * logic based on this state change. Individual errors are routed to their respective requests. +// The in-progress fetch (if there is one) +let fetchInProgress = null; + +/** + * Delay fetching for a given amount of time, while batching up the requests to be fetched. + * Returns a promise that resolves with the response for the given request. + * @param request The request to fetch + * @param ms The number of milliseconds to wait (and batch requests) + * @return Promise The response for the given request */ - this.fetchQueued = () => { - return this.fetchSearchRequests(searchRequestQueue.getStartable()); - }; +async function delayedFetch(request, options, { es, config, esShardTimeout }, ms) { + const i = requestsToFetch.length; + requestsToFetch = [...requestsToFetch, request]; + requestOptions = [...requestOptions, options]; + const responses = await (fetchInProgress = fetchInProgress || delay(() => { + const response = callClient(requestsToFetch, requestOptions, { es, config, esShardTimeout }); + requestsToFetch = []; + requestOptions = []; + fetchInProgress = null; + return response; + }, ms)); + return responses[i]; } diff --git a/src/legacy/ui/public/courier/fetch/fetch_soon.test.js b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js new file mode 100644 index 0000000000000..824a4ab7e12e3 --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/fetch_soon.test.js @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { fetchSoon } from './fetch_soon'; +import { callClient } from './call_client'; + +function getMockConfig(config) { + const entries = Object.entries(config); + return new Map(entries); +} + +const mockResponses = { + 'foo': {}, + 'bar': {}, + 'baz': {}, +}; + +jest.useFakeTimers(); + +jest.mock('./call_client', () => ({ + callClient: jest.fn(requests => { + // Allow a request object to specify which mockResponse it wants to receive (_mockResponseId) + // in addition to how long to simulate waiting before returning a response (_waitMs) + const responses = requests.map(request => { + const waitMs = requests.reduce((total, request) => request._waitMs || 0, 0); + return new Promise(resolve => { + resolve(mockResponses[request._mockResponseId]); + }, waitMs); + }); + return Promise.resolve(responses); + }) +})); + +describe('fetchSoon', () => { + beforeEach(() => { + callClient.mockClear(); + }); + + test('should delay by 0ms if config is set to not batch searches', () => { + const config = getMockConfig({ + 'courier:batchSearches': false + }); + const request = {}; + const options = {}; + + fetchSoon(request, options, { config }); + + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(0); + expect(callClient).toBeCalled(); + }); + + test('should delay by 50ms if config is set to batch searches', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const request = {}; + const options = {}; + + fetchSoon(request, options, { config }); + + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(0); + expect(callClient).not.toBeCalled(); + jest.advanceTimersByTime(50); + expect(callClient).toBeCalled(); + }); + + test('should send a batch of requests to callClient', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const requests = [{ foo: 1 }, { foo: 2 }]; + const options = [{ bar: 1 }, { bar: 2 }]; + + requests.forEach((request, i) => { + fetchSoon(request, options[i], { config }); + }); + + jest.advanceTimersByTime(50); + expect(callClient).toBeCalledTimes(1); + expect(callClient.mock.calls[0][0]).toEqual(requests); + expect(callClient.mock.calls[0][1]).toEqual(options); + }); + + test('should return the response to the corresponding call for multiple batched requests', async () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const requests = [{ _mockResponseId: 'foo' }, { _mockResponseId: 'bar' }]; + + const promises = requests.map(request => { + return fetchSoon(request, {}, { config }); + }); + jest.advanceTimersByTime(50); + const results = await Promise.all(promises); + + expect(results).toEqual([mockResponses.foo, mockResponses.bar]); + }); + + test('should wait for the previous batch to start before starting a new batch', () => { + const config = getMockConfig({ + 'courier:batchSearches': true + }); + const firstBatch = [{ foo: 1 }, { foo: 2 }]; + const secondBatch = [{ bar: 1 }, { bar: 2 }]; + + firstBatch.forEach(request => { + fetchSoon(request, {}, { config }); + }); + jest.advanceTimersByTime(50); + secondBatch.forEach(request => { + fetchSoon(request, {}, { config }); + }); + + expect(callClient).toBeCalledTimes(1); + expect(callClient.mock.calls[0][0]).toEqual(firstBatch); + + jest.advanceTimersByTime(50); + + expect(callClient).toBeCalledTimes(2); + expect(callClient.mock.calls[1][0]).toEqual(secondBatch); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.js b/src/legacy/ui/public/courier/fetch/get_search_params.js index 7561661d321fa..dd55201ba5540 100644 --- a/src/legacy/ui/public/courier/fetch/get_search_params.js +++ b/src/legacy/ui/public/courier/fetch/get_search_params.js @@ -17,6 +17,8 @@ * under the License. */ +const sessionId = Date.now(); + export function getMSearchParams(config) { return { rest_total_hits_as_int: true, @@ -25,13 +27,13 @@ export function getMSearchParams(config) { }; } -export function getSearchParams(config, sessionId, esShardTimeout) { +export function getSearchParams(config, esShardTimeout) { return { rest_total_hits_as_int: true, ignore_unavailable: true, ignore_throttled: getIgnoreThrottled(config), max_concurrent_shard_requests: getMaxConcurrentShardRequests(config), - preference: getPreference(config, sessionId), + preference: getPreference(config), timeout: getTimeout(esShardTimeout), }; } @@ -45,7 +47,7 @@ export function getMaxConcurrentShardRequests(config) { return maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined; } -export function getPreference(config, sessionId) { +export function getPreference(config) { const setRequestPreference = config.get('courier:setRequestPreference'); if (setRequestPreference === 'sessionId') return sessionId; return setRequestPreference === 'custom' ? config.get('courier:customRequestPreference') : undefined; diff --git a/src/legacy/ui/public/courier/fetch/get_search_params.test.js b/src/legacy/ui/public/courier/fetch/get_search_params.test.js index 9129aea05f428..380d1da963ddf 100644 --- a/src/legacy/ui/public/courier/fetch/get_search_params.test.js +++ b/src/legacy/ui/public/courier/fetch/get_search_params.test.js @@ -99,10 +99,10 @@ describe('getSearchParams', () => { test('includes timeout according to esShardTimeout if greater than 0', () => { const config = getConfigStub(); - let searchParams = getSearchParams(config, null, 0); + let searchParams = getSearchParams(config, 0); expect(searchParams.timeout).toBe(undefined); - searchParams = getSearchParams(config, null, 100); + searchParams = getSearchParams(config, 100); expect(searchParams.timeout).toBe('100ms'); }); }); diff --git a/src/legacy/ui/public/courier/fetch/handle_response.js b/src/legacy/ui/public/courier/fetch/handle_response.js new file mode 100644 index 0000000000000..fb2797369d78f --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/handle_response.js @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +import React from 'react'; +import { toastNotifications } from '../../notify/toasts'; +import { i18n } from '@kbn/i18n'; +import { EuiSpacer } from '@elastic/eui'; +import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button'; + +export function handleResponse(request, response) { + if (response.timed_out) { + toastNotifications.addWarning({ + title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', { + defaultMessage: 'Data might be incomplete because your request timed out', + }), + }); + } + + if (response._shards && response._shards.failed) { + const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', { + defaultMessage: '{shardsFailed} of {shardsTotal} shards failed', + values: { + shardsFailed: response._shards.failed, + shardsTotal: response._shards.total, + }, + }); + const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', { + defaultMessage: 'The data you are seeing might be incomplete or wrong.', + }); + + const text = ( + <> + {description} + + + + ); + + toastNotifications.addWarning({ + title, + text, + }); + } + + return response; +} diff --git a/src/legacy/ui/public/courier/fetch/handle_response.test.js b/src/legacy/ui/public/courier/fetch/handle_response.test.js new file mode 100644 index 0000000000000..0836832e6c05a --- /dev/null +++ b/src/legacy/ui/public/courier/fetch/handle_response.test.js @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { handleResponse } from './handle_response'; +import { toastNotifications } from '../../notify/toasts'; + +jest.mock('../../notify/toasts', () => { + return { + toastNotifications: { + addWarning: jest.fn() + } + }; +}); + +jest.mock('@kbn/i18n', () => { + return { + i18n: { + translate: (id, { defaultMessage }) => defaultMessage + } + }; +}); + +describe('handleResponse', () => { + beforeEach(() => { + toastNotifications.addWarning.mockReset(); + }); + + test('should notify if timed out', () => { + const request = { body: {} }; + const response = { + timed_out: true + }; + const result = handleResponse(request, response); + expect(result).toBe(response); + expect(toastNotifications.addWarning).toBeCalled(); + expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('request timed out'); + }); + + test('should notify if shards failed', () => { + const request = { body: {} }; + const response = { + _shards: { + failed: true + } + }; + const result = handleResponse(request, response); + expect(result).toBe(response); + expect(toastNotifications.addWarning).toBeCalled(); + expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('shards failed'); + }); + + test('returns the response', () => { + const request = {}; + const response = {}; + const result = handleResponse(request, response); + expect(result).toBe(response); + }); +}); diff --git a/src/legacy/ui/public/courier/fetch/index.js b/src/legacy/ui/public/courier/fetch/index.js index a5daaca5cb2c3..7b89dea1a110c 100644 --- a/src/legacy/ui/public/courier/fetch/index.js +++ b/src/legacy/ui/public/courier/fetch/index.js @@ -17,5 +17,5 @@ * under the License. */ -export { FetchSoonProvider } from './fetch_soon'; +export * from './fetch_soon'; export * from './get_search_params'; diff --git a/src/legacy/ui/public/courier/fetch/is_request.js b/src/legacy/ui/public/courier/fetch/is_request.js deleted file mode 100644 index 73c54d6f4bca1..0000000000000 --- a/src/legacy/ui/public/courier/fetch/is_request.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { SearchRequestProvider } from './request'; - -export function IsRequestProvider(Private) { - const SearchRequest = Private(SearchRequestProvider); - - return function isRequest(obj) { - return obj instanceof SearchRequest; - }; -} diff --git a/src/legacy/ui/public/courier/fetch/req_status.js b/src/legacy/ui/public/courier/fetch/req_status.js deleted file mode 100644 index d56bc6d3ad360..0000000000000 --- a/src/legacy/ui/public/courier/fetch/req_status.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export const RequestStatus = { - ABORTED: 'aborted', - INCOMPLETE: 'incomplete', -}; diff --git a/src/legacy/ui/public/courier/fetch/request/index.js b/src/legacy/ui/public/courier/fetch/request/index.js deleted file mode 100644 index 6647d0e5b2e10..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SearchRequestProvider } from './search_request'; diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js deleted file mode 100644 index ecac8cd474098..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/__tests__/search_request.js +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import sinon from 'sinon'; -import expect from '@kbn/expect'; - -import { SearchRequestProvider } from '../search_request'; -import { searchRequestQueue } from '../../../../search_request_queue'; - -describe('ui/courier/fetch search request', () => { - beforeEach(ngMock.module('kibana')); - - afterEach(() => { - searchRequestQueue.removeAll(); - }); - - it('throws exception when created without errorHandler', ngMock.inject((Private) => { - const SearchReq = Private(SearchRequestProvider); - - let caughtError = false; - try { - new SearchReq({ source: {} }); - } catch(error) { - caughtError = true; - } - expect(caughtError).to.be(true); - })); - - describe('start', () => { - it('calls this.source.requestIsStarting(request)', ngMock.inject((Private) => { - const SearchReq = Private(SearchRequestProvider); - - const spy = sinon.spy(() => Promise.resolve()); - const source = { requestIsStarting: spy }; - - const req = new SearchReq({ source, errorHandler: () => {} }); - expect(req.start()).to.have.property('then').a('function'); - sinon.assert.calledOnce(spy); - sinon.assert.calledWithExactly(spy, req); - })); - }); - - describe('clone', () => { - it('returns a search request with identical constructor arguments', ngMock.inject((Private) => { - const SearchRequest = Private(SearchRequestProvider); - - const source = {}; - const errorHandler = () => {}; - const defer = {}; - - const originalRequest = new SearchRequest({ source, errorHandler, defer }); - const clonedRequest = originalRequest.clone(); - - expect(clonedRequest).not.to.be(originalRequest); - expect(clonedRequest.source).to.be(source); - expect(clonedRequest.errorHandler).to.be(errorHandler); - expect(clonedRequest.defer).to.be(defer); - })); - - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/index.js b/src/legacy/ui/public/courier/fetch/request/search_request/index.js deleted file mode 100644 index 6647d0e5b2e10..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SearchRequestProvider } from './search_request'; diff --git a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js b/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js deleted file mode 100644 index a6ce562e462d8..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/search_request/search_request.js +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import moment from 'moment'; - -import { searchRequestQueue } from '../../../search_request_queue'; - -import { createDefer } from 'ui/promises'; -import { i18n } from '@kbn/i18n'; - -export function SearchRequestProvider(Promise) { - class SearchRequest { - constructor({ source, defer, errorHandler }) { - if (!errorHandler) { - throw new Error( - i18n.translate('common.ui.courier.fetch.requireErrorHandlerErrorMessage', { - defaultMessage: '{errorHandler} is required', - values: { errorHandler: 'errorHandler' } - }) - ); - } - - this.errorHandler = errorHandler; - this.source = source; - this.defer = defer || createDefer(Promise); - this.abortedDefer = createDefer(Promise); - this.type = 'search'; - - // Track execution time. - this.moment = undefined; - this.ms = undefined; - - // Lifecycle state. - this.started = false; - this.stopped = false; - this._isFetchRequested = false; - - searchRequestQueue.add(this); - } - - /** - * Called by the searchPoll to find requests that should be sent to the - * fetchSoon module. When a module is sent to fetchSoon its _isFetchRequested flag - * is set, and this consults that flag so requests are not send to fetchSoon - * multiple times. - * - * @return {Boolean} - */ - canStart() { - if (this.source._fetchDisabled) { - return false; - } - - if (this.stopped) { - return false; - } - - if (this._isFetchRequested) { - return false; - } - - return true; - } - - /** - * Used to find requests that were previously sent to the fetchSoon module but - * have not been started yet, so they can be started. - * - * @return {Boolean} - */ - isFetchRequestedAndPending() { - if (this.started) { - return false; - } - - return this._isFetchRequested; - } - - /** - * Called by the fetchSoon module when this request has been sent to - * be fetched. At that point the request is somewhere between `ready-to-start` - * and `started`. The fetch module then waits a short period of time to - * allow requests to build up in the request queue, and then immediately - * fetches all requests that return true from `isFetchRequestedAndPending()` - * - * @return {undefined} - */ - _setFetchRequested() { - this._isFetchRequested = true; - } - - start() { - if (this.started) { - throw new TypeError( - i18n.translate('common.ui.courier.fetch.unableStartRequestErrorMessage', { - defaultMessage: 'Unable to start request because it has already started', - }) - ); - } - - this.started = true; - this.moment = moment(); - - return this.source.requestIsStarting(this); - } - - getFetchParams() { - return this.source._flatten(); - } - - filterError() { - return false; - } - - handleResponse(resp) { - this.success = true; - this.resp = resp; - } - - handleFailure(error) { - this.success = false; - this.resp = error; - this.resp = (error && error.resp) || error; - return this.errorHandler(this, error); - } - - isIncomplete() { - return false; - } - - continue() { - throw new Error( - i18n.translate('common.ui.courier.fetch.unableContinueRequestErrorMessage', { - defaultMessage: 'Unable to continue {type} request', - values: { type: this.type } - }) - ); - } - - retry() { - const clone = this.clone(); - this.abort(); - return clone; - } - - _markStopped() { - if (this.stopped) return; - this.stopped = true; - this.source.requestIsStopped(this); - searchRequestQueue.remove(this); - } - - abort() { - this._markStopped(); - this.aborted = true; - const error = new Error('The request was aborted.'); - error.name = 'AbortError'; - this.abortedDefer.resolve(error); - this.abortedDefer = null; - this.defer.reject(error); - this.defer = null; - } - - whenAborted(cb) { - this.abortedDefer.promise.then(cb); - } - - complete() { - this._markStopped(); - this.ms = this.moment.diff() * -1; - this.defer.resolve(this.resp); - } - - getCompletePromise() { - return this.defer.promise; - } - - getCompleteOrAbortedPromise() { - return Promise.race([ this.defer.promise, this.abortedDefer.promise ]); - } - - clone = () => { - const { source, defer, errorHandler } = this; - return new SearchRequest({ source, defer, errorHandler }); - }; - } - - return SearchRequest; -} diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js deleted file mode 100644 index 807d53086e106..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { SerializeFetchParamsProvider } from './serialize_fetch_params_provider'; diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js deleted file mode 100644 index ba8912c966e3e..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { getPreference, getTimeout } from '../../get_search_params'; - -/** - * - * @param requestsFetchParams {Array.} - * @param Promise - * @param sessionId - * @return {Promise.} - */ -export function serializeFetchParams( - requestsFetchParams, - Promise, - sessionId, - config, - esShardTimeout) { - const promises = requestsFetchParams.map(function (fetchParams) { - return Promise.resolve(fetchParams.index) - .then(function (indexPattern) { - const body = { - timeout: getTimeout(esShardTimeout), - ...fetchParams.body || {}, - }; - - const index = (indexPattern && indexPattern.getIndex) ? indexPattern.getIndex() : indexPattern; - - const header = { - index, - search_type: fetchParams.search_type, - ignore_unavailable: true, - preference: getPreference(config, sessionId) - }; - - return `${JSON.stringify(header)}\n${JSON.stringify(body)}`; - }); - }); - - return Promise.all(promises).then(function (requests) { - return requests.join('\n') + '\n'; - }); -} - diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js deleted file mode 100644 index 5f4c5bf9ef45a..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params.test.js +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { serializeFetchParams } from './serialize_fetch_params'; -import _ from 'lodash'; - -const DEFAULT_SESSION_ID = '1'; - -function serializeFetchParamsWithDefaults(paramOverrides) { - const paramDefaults = { - requestFetchParams: [], - Promise, - sessionId: DEFAULT_SESSION_ID, - config: { - get: () => { - return 'sessionId'; - } - }, - timeout: 100, - }; - const params = { ...paramDefaults, ...paramOverrides }; - - return serializeFetchParams( - params.requestFetchParams, - Promise, - params.sessionId, - params.config, - params.timeout, - ); -} - -describe('when indexList is not empty', () => { - test('includes the index', () => { - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar', $foo: 'bar' } - } - ]; - return serializeFetchParamsWithDefaults({ requestFetchParams }).then(value => { - expect(_.includes(value, '"index":["logstash-123"]')).toBe(true); - }); - }); -}); - -describe('headers', () => { - - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar' } - } - ]; - - const getHeader = async (paramOverrides) => { - const request = await serializeFetchParamsWithDefaults(paramOverrides); - const requestParts = request.split('\n'); - if (requestParts.length < 2) { - throw new Error('fetch Body does not contain expected format header newline body.'); - } - return JSON.parse(requestParts[0]); - }; - - describe('search request preference', () => { - test('should be set to sessionId when courier:setRequestPreference is "sessionId"', async () => { - const config = { - get: () => { - return 'sessionId'; - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(DEFAULT_SESSION_ID); - }); - - test('should be set to custom string when courier:setRequestPreference is "custom"', async () => { - const CUSTOM_PREFERENCE = '_local'; - const config = { - get: (key) => { - if (key === 'courier:setRequestPreference') { - return 'custom'; - } else if (key === 'courier:customRequestPreference') { - return CUSTOM_PREFERENCE; - } - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(CUSTOM_PREFERENCE); - }); - - test('should not be set when courier:setRequestPreference is "none"', async () => { - const config = { - get: () => { - return 'none'; - } - }; - const header = await getHeader({ requestFetchParams, config }); - expect(header.preference).toBe(undefined); - }); - }); -}); - -describe('body', () => { - const requestFetchParams = [ - { - index: ['logstash-123'], - type: 'blah', - search_type: 'blah2', - body: { foo: 'bar' } - } - ]; - - const getBody = async (paramOverrides) => { - const request = await serializeFetchParamsWithDefaults(paramOverrides); - const requestParts = request.split('\n'); - if (requestParts.length < 2) { - throw new Error('fetch Body does not contain expected format: header newline body.'); - } - return JSON.parse(requestParts[1]); - }; - - describe('timeout', () => { - test('should set a timeout as specified', async () => { - const request = await getBody({ requestFetchParams, timeout: 200 }); - expect(request).toHaveProperty('timeout', '200ms'); - }); - - test('should not set a timeout when timeout is 0', async () => { - const request = await getBody({ requestFetchParams, timeout: 0 }); - expect(request.timeout).toBe(undefined); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js b/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js deleted file mode 100644 index 4ddcc05b927ff..0000000000000 --- a/src/legacy/ui/public/courier/fetch/request/serialize_fetch_params/serialize_fetch_params_provider.js +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { serializeFetchParams } from './serialize_fetch_params'; - -export function SerializeFetchParamsProvider(Promise, sessionId, config, esShardTimeout) { - return (fetchParams) => ( - serializeFetchParams( - fetchParams, - Promise, - sessionId, - config, - esShardTimeout) - ); -} diff --git a/src/legacy/ui/public/courier/index.js b/src/legacy/ui/public/courier/index.js index cb14298a9a3b4..5647af3d0d645 100644 --- a/src/legacy/ui/public/courier/index.js +++ b/src/legacy/ui/public/courier/index.js @@ -17,7 +17,7 @@ * under the License. */ -export { SearchSourceProvider } from './search_source'; +export { SearchSource } from './search_source'; export { addSearchStrategy, diff --git a/src/legacy/ui/public/courier/search_poll/search_poll.js b/src/legacy/ui/public/courier/search_poll/search_poll.js index 91c866c14aa49..f00c2a32e0ec6 100644 --- a/src/legacy/ui/public/courier/search_poll/search_poll.js +++ b/src/legacy/ui/public/courier/search_poll/search_poll.js @@ -19,98 +19,50 @@ import _ from 'lodash'; -import { fatalError } from '../../notify'; -import '../../promises'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchSoonProvider } from '../fetch'; import { timefilter } from 'ui/timefilter'; -export function SearchPollProvider(Private, Promise) { - const fetchSoon = Private(FetchSoonProvider); - - class SearchPoll { - constructor() { - this._isPolling = false; - this._intervalInMs = undefined; - this._timerId = null; - this._searchPromise = null; - this._isIntervalFasterThanSearch = false; - } - - setIntervalInMs = intervalInMs => { - this._intervalInMs = _.parseInt(intervalInMs); - }; - - resume = () => { - this._isPolling = true; - this.resetTimer(); - }; - - pause = () => { - this._isPolling = false; - this.clearTimer(); - }; - - resetTimer = () => { - // Cancel the pending search and schedule a new one. - this.clearTimer(); - - if (this._isPolling) { - this._timerId = setTimeout(this._search, this._intervalInMs); - } - }; +export class SearchPoll { + constructor() { + this._isPolling = false; + this._intervalInMs = undefined; + this._timerId = null; + } - clearTimer = () => { - // Cancel the pending search, if there is one. - if (this._timerId) { - clearTimeout(this._timerId); - this._timerId = null; - } - }; + setIntervalInMs = intervalInMs => { + this._intervalInMs = _.parseInt(intervalInMs); + }; - _search = () => { - // If our interval is faster than the rate at which searches return results, then trigger - // a new search as soon as the results come back. - if (this._searchPromise) { - this._isIntervalFasterThanSearch = true; - return; - } + resume = () => { + this._isPolling = true; + this.resetTimer(); + }; - // Schedule another search. - this.resetTimer(); + pause = () => { + this._isPolling = false; + this.clearTimer(); + }; - // We use resolve() here instead of try() because the latter won't trigger a $digest - // when the promise resolves. - this._searchPromise = Promise.resolve().then(() => { - timefilter.notifyShouldFetch(); - const requests = searchRequestQueue.getInactive(); + resetTimer = () => { + // Cancel the pending search and schedule a new one. + this.clearTimer(); - // The promise returned from fetchSearchRequests() only resolves when the requests complete. - // We want to continue even if the requests abort so we return a different promise. - fetchSoon.fetchSearchRequests(requests); + if (this._isPolling) { + this._timerId = setTimeout(this._search, this._intervalInMs); + } + }; - return Promise.all( - requests.map(request => request.getCompleteOrAbortedPromise()) - ); - }) - .then(() => { - this._searchPromise = null; + clearTimer = () => { + // Cancel the pending search, if there is one. + if (this._timerId) { + clearTimeout(this._timerId); + this._timerId = null; + } + }; - // If the search response comes back before the interval fires, then we'll wait - // for the interval and let it kick off the next search. But if the interval fires before - // the search returns results, then we'll need to wait for the search to return results - // and then kick off another search again. A new search will also reset the interval. - if (this._isIntervalFasterThanSearch) { - this._isIntervalFasterThanSearch = false; - this._search(); - } - }) - .catch(err => { - // If there was a problem, then kill Kibana. - fatalError(err); - }); - }; - } + _search = () => { + // Schedule another search. + this.resetTimer(); - return new SearchPoll(); + timefilter.notifyShouldFetch(); + }; } diff --git a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js deleted file mode 100644 index f6b4e4bef20c2..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/__tests__/search_request_queue.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -import { searchRequestQueue } from '../search_request_queue'; - -describe('Courier Request Queue', function () { - beforeEach(ngMock.module('kibana')); - beforeEach(() => searchRequestQueue.removeAll()); - after(() => searchRequestQueue.removeAll()); - - class MockReq { - constructor(startable = true) { - this.source = {}; - this.canStart = sinon.stub().returns(startable); - } - } - - describe('#getStartable()', function () { - it('returns only startable requests', function () { - searchRequestQueue.add(new MockReq(false)); - searchRequestQueue.add(new MockReq(true)); - expect(searchRequestQueue.getStartable()).to.have.length(1); - }); - }); - - // Note: I'm not convinced this discrepancy between how we calculate startable vs inactive requests makes any sense. - // I'm only testing here that the current, (very old) code continues to behave how it always did, but it may turn out - // that we can clean this up, or remove this. - describe('#getInactive()', function () { - it('returns only requests with started = false', function () { - searchRequestQueue.add({ started: true }); - searchRequestQueue.add({ started: false }); - searchRequestQueue.add({ started: true }); - expect(searchRequestQueue.getInactive()).to.have.length(1); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/search_request_queue/index.js b/src/legacy/ui/public/courier/search_request_queue/index.js deleted file mode 100644 index 785a59fce73d5..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { searchRequestQueue } from './search_request_queue'; diff --git a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js b/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js deleted file mode 100644 index 80d74cdad94fe..0000000000000 --- a/src/legacy/ui/public/courier/search_request_queue/search_request_queue.js +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -class SearchRequestQueue { - constructor() { - // Queue of pending requests, requests are removed as they are processed by fetch.[sourceType](). - this._searchRequests = []; - } - - getCount() { - return this._searchRequests.length; - } - - add(searchRequest) { - this._searchRequests.push(searchRequest); - } - - remove(searchRequest) { - // Remove all matching search requests. - this._searchRequests = this._searchRequests.filter( - existingSearchRequest => existingSearchRequest !== searchRequest - ); - } - - removeAll() { - this._searchRequests.length = 0; - } - - abortAll() { - this._searchRequests.forEach(searchRequest => searchRequest.abort()); - } - - getAll() { - return this._searchRequests; - } - - getSearchRequestAt(index) { - return this._searchRequests[index]; - } - - getInactive() { - return this._searchRequests.filter(searchRequest => !searchRequest.started); - } - - getStartable() { - return this._searchRequests.filter(searchRequest => searchRequest.canStart()); - } - - getPending() { - return this._searchRequests.filter(searchRequest => searchRequest.isFetchRequestedAndPending()); - } -} - -export const searchRequestQueue = new SearchRequestQueue(); diff --git a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js index ca3d21a330ce1..279e389dec114 100644 --- a/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js +++ b/src/legacy/ui/public/courier/search_source/__tests__/normalize_sort_request.js @@ -20,18 +20,17 @@ import '../../../private'; import ngMock from 'ng_mock'; import expect from '@kbn/expect'; -import { NormalizeSortRequestProvider } from '../_normalize_sort_request'; +import { normalizeSortRequest } from '../_normalize_sort_request'; import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern'; import _ from 'lodash'; describe('SearchSource#normalizeSortRequest', function () { - let normalizeSortRequest; let indexPattern; let normalizedSort; + const defaultSortOptions = { unmapped_type: 'boolean' }; beforeEach(ngMock.module('kibana')); beforeEach(ngMock.inject(function (Private) { - normalizeSortRequest = Private(NormalizeSortRequestProvider); indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider); normalizedSort = [{ @@ -44,7 +43,7 @@ describe('SearchSource#normalizeSortRequest', function () { it('should return an array', function () { const sortable = { someField: 'desc' }; - const result = normalizeSortRequest(sortable, indexPattern); + const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions); expect(result).to.be.an(Array); expect(result).to.eql(normalizedSort); // ensure object passed in is not mutated @@ -53,7 +52,7 @@ describe('SearchSource#normalizeSortRequest', function () { }); it('should make plain string sort into the more verbose format', function () { - const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern); + const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern, defaultSortOptions); expect(result).to.eql(normalizedSort); }); @@ -64,7 +63,7 @@ describe('SearchSource#normalizeSortRequest', function () { unmapped_type: 'boolean' } }]; - const result = normalizeSortRequest(sortState, indexPattern); + const result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions); expect(result).to.eql(normalizedSort); }); @@ -86,11 +85,11 @@ describe('SearchSource#normalizeSortRequest', function () { } }; - let result = normalizeSortRequest(sortState, indexPattern); + let result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); sortState[fieldName] = { order: direction }; - result = normalizeSortRequest([sortState], indexPattern); + result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); }); @@ -105,7 +104,7 @@ describe('SearchSource#normalizeSortRequest', function () { order: direction, unmapped_type: 'boolean' }; - const result = normalizeSortRequest([sortState], indexPattern); + const result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions); expect(result).to.eql([normalizedSort]); }); @@ -118,7 +117,7 @@ describe('SearchSource#normalizeSortRequest', function () { } }]; - const result = normalizeSortRequest(sortable, indexPattern); + const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions); expect(_.isEqual(result, expected)).to.be.ok(); }); diff --git a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js b/src/legacy/ui/public/courier/search_source/__tests__/search_source.js deleted file mode 100644 index ccb3c55b7a381..0000000000000 --- a/src/legacy/ui/public/courier/search_source/__tests__/search_source.js +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import ngMock from 'ng_mock'; -import expect from '@kbn/expect'; -import sinon from 'sinon'; - -import { searchRequestQueue } from '../../search_request_queue'; -import { SearchSourceProvider } from '../search_source'; -import StubIndexPattern from 'test_utils/stub_index_pattern'; - -function timeout() { - return new Promise(resolve => { - setTimeout(resolve); - }); -} - -describe('SearchSource', function () { - require('test_utils/no_digest_promises').activateForSuite(); - - let config; - let SearchSource; - let indexPattern; - let indexPattern2; - - beforeEach(ngMock.module('kibana')); - beforeEach(ngMock.inject(function (Private, _config_) { - config = _config_; - SearchSource = Private(SearchSourceProvider); - - indexPattern = new StubIndexPattern('test-*', cfg => cfg, null, []); - indexPattern2 = new StubIndexPattern('test2-*', cfg => cfg, null, []); - expect(indexPattern).to.not.be(indexPattern2); - })); - beforeEach(() => searchRequestQueue.removeAll()); - after(() => searchRequestQueue.removeAll()); - - describe('#onResults()', function () { - it('adds a request to the searchRequestQueue', function () { - const searchSource = new SearchSource(); - - expect(searchRequestQueue.getCount()).to.be(0); - searchSource.onResults(); - expect(searchRequestQueue.getCount()).to.be(1); - }); - - it('returns a promise that is resolved with the results', function () { - const searchSource = new SearchSource(); - const fakeResults = {}; - - const promise = searchSource.onResults().then((results) => { - expect(results).to.be(fakeResults); - }); - - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - searchRequest.defer.resolve(fakeResults); - return promise; - }); - }); - - describe('#destroy()', function () { - it('aborts all startable requests', function () { - const searchSource = new SearchSource(); - searchSource.onResults(); - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - sinon.stub(searchRequest, 'canStart').returns(true); - searchSource.destroy(); - expect(searchRequestQueue.getCount()).to.be(0); - }); - - it('aborts all non-startable requests', function () { - const searchSource = new SearchSource(); - searchSource.onResults(); - const searchRequest = searchRequestQueue.getSearchRequestAt(0); - sinon.stub(searchRequest, 'canStart').returns(false); - searchSource.destroy(); - expect(searchRequestQueue.getCount()).to.be(0); - }); - }); - - describe('#setField()', function () { - it('sets the value for the property', function () { - const searchSource = new SearchSource(); - searchSource.setField('aggs', 5); - expect(searchSource.getField('aggs')).to.be(5); - }); - - it('throws an error if the property is not accepted', function () { - const searchSource = new SearchSource(); - expect(() => searchSource.setField('index', 5)).to.throwError(); - }); - }); - - describe('#getField()', function () { - it('gets the value for the property', function () { - const searchSource = new SearchSource(); - searchSource.setField('aggs', 5); - expect(searchSource.getField('aggs')).to.be(5); - }); - - it('throws an error if the property is not accepted', function () { - const searchSource = new SearchSource(); - expect(() => searchSource.getField('unacceptablePropName')).to.throwError(); - }); - }); - - describe(`#setField('index')`, function () { - describe('auto-sourceFiltering', function () { - describe('new index pattern assigned', function () { - it('generates a searchSource filter', function () { - const searchSource = new SearchSource(); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - searchSource.setField('index', indexPattern); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be.a('function'); - }); - - it('removes created searchSource filter on removal', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - }); - }); - - describe('new index pattern assigned over another', function () { - it('replaces searchSource filter with new', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - const searchSourceFilter1 = searchSource.getField('source'); - searchSource.setField('index', indexPattern2); - expect(searchSource.getField('index')).to.be(indexPattern2); - expect(searchSource.getField('source')).to.be.a('function'); - expect(searchSource.getField('source')).to.not.be(searchSourceFilter1); - }); - - it('removes created searchSource filter on removal', function () { - const searchSource = new SearchSource(); - searchSource.setField('index', indexPattern); - searchSource.setField('index', indexPattern2); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(undefined); - }); - }); - - describe('ip assigned before custom searchSource filter', function () { - it('custom searchSource filter becomes new searchSource', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('index', indexPattern); - expect(searchSource.getField('source')).to.be.a('function'); - searchSource.setField('source', football); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be(football); - }); - - it('custom searchSource stays after removal', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('index', indexPattern); - searchSource.setField('source', football); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(football); - }); - }); - - describe('ip assigned after custom searchSource filter', function () { - it('leaves the custom filter in place', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('source', football); - searchSource.setField('index', indexPattern); - expect(searchSource.getField('index')).to.be(indexPattern); - expect(searchSource.getField('source')).to.be(football); - }); - - it('custom searchSource stays after removal', function () { - const searchSource = new SearchSource(); - const football = {}; - searchSource.setField('source', football); - searchSource.setField('index', indexPattern); - searchSource.setField('index', null); - expect(searchSource.getField('index')).to.be(undefined); - expect(searchSource.getField('source')).to.be(football); - }); - }); - }); - }); - - describe('#onRequestStart()', () => { - it('should be called when starting a request', async () => { - const searchSource = new SearchSource(); - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - }); - - it('should not be called on parent searchSource', async () => { - const parent = new SearchSource(); - const searchSource = new SearchSource().setParent(parent); - - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const parentFn = sinon.spy(); - parent.onRequestStart(parentFn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - expect(parentFn.notCalled).to.be(true); - }); - - it('should be called on parent searchSource if callParentStartHandlers is true', async () => { - const parent = new SearchSource(); - const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true }); - - const fn = sinon.spy(); - searchSource.onRequestStart(fn); - const parentFn = sinon.spy(); - parent.onRequestStart(parentFn); - const request = {}; - searchSource.requestIsStarting(request); - await timeout(); - expect(fn.calledWith(searchSource, request)).to.be(true); - expect(parentFn.calledWith(searchSource, request)).to.be(true); - }); - }); - - describe('#_mergeProp', function () { - describe('filter', function () { - let searchSource; - let state; - - beforeEach(function () { - searchSource = new SearchSource(); - state = {}; - }); - - [null, undefined].forEach(falsyValue => { - it(`ignores ${falsyValue} filter`, function () { - searchSource._mergeProp(state, falsyValue, 'filter'); - expect(state.filters).to.be(undefined); - }); - }); - - [false, 0, '', NaN].forEach(falsyValue => { - it(`doesn't add ${falsyValue} filter`, function () { - searchSource._mergeProp(state, falsyValue, 'filter'); - expect(state.filters).to.be.empty(); - }); - }); - - it('adds "meta.disabled: undefined" filter', function () { - const filter = { - meta: {} - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([filter]); - }); - - it('adds "meta.disabled: false" filter', function () { - const filter = { - meta: { - disabled: false - } - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([filter]); - }); - - it(`doesn't add "meta.disabled: true" filter`, function () { - const filter = { - meta: { - disabled: true - } - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.be.empty(); - }); - - describe('when courier:ignoreFilterIfFieldNotInIndex is false', function () { - it('adds filter for non-existent field', function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', false); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([ filter ]); - }); - }); - - describe('when courier:ignoreFilterIfFieldNotInIndex is true', function () { - it(`doesn't add filter for non-existent field`, function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', true); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.be.empty(); - }); - - it(`adds filter for existent field`, function () { - config.set('courier:ignoreFilterIfFieldNotInIndex', true); - const filter = { - meta: { - key: 'bar' - } - }; - state.index = { - fields: [{ name: 'bar' }] - }; - searchSource._mergeProp(state, filter, 'filter'); - expect(state.filters).to.eql([ filter ]); - }); - }); - }); - }); -}); diff --git a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js index 2b5025f14fef7..3e5d7a1374115 100644 --- a/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js +++ b/src/legacy/ui/public/courier/search_source/_normalize_sort_request.js @@ -19,59 +19,55 @@ import _ from 'lodash'; -export function NormalizeSortRequestProvider(config) { - const defaultSortOptions = config.get('sort:options'); - - /** +/** * Decorate queries with default parameters * @param {query} query object * @returns {object} */ - return function (sortObject, indexPattern) { - // [].concat({}) -> [{}], [].concat([{}]) -> [{}] - return [].concat(sortObject).map(function (sortable) { - return normalize(sortable, indexPattern); - }); - }; +export function normalizeSortRequest(sortObject, indexPattern, defaultSortOptions) { + // [].concat({}) -> [{}], [].concat([{}]) -> [{}] + return [].concat(sortObject).map(function (sortable) { + return normalize(sortable, indexPattern, defaultSortOptions); + }); +} - /* +/* Normalize the sort description to the more verbose format: { someField: "desc" } into { someField: { "order": "desc"}} */ - function normalize(sortable, indexPattern) { - const normalized = {}; - let sortField = _.keys(sortable)[0]; - let sortValue = sortable[sortField]; - const indexField = indexPattern.fields.getByName(sortField); +function normalize(sortable, indexPattern, defaultSortOptions) { + const normalized = {}; + let sortField = _.keys(sortable)[0]; + let sortValue = sortable[sortField]; + const indexField = indexPattern.fields.getByName(sortField); - if (indexField && indexField.scripted && indexField.sortable) { - let direction; - if (_.isString(sortValue)) direction = sortValue; - if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order; + if (indexField && indexField.scripted && indexField.sortable) { + let direction; + if (_.isString(sortValue)) direction = sortValue; + if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order; - sortField = '_script'; - sortValue = { - script: { - source: indexField.script, - lang: indexField.lang - }, - type: castSortType(indexField.type), - order: direction - }; - } else { - if (_.isString(sortValue)) { - sortValue = { order: sortValue }; - } - sortValue = _.defaults({}, sortValue, defaultSortOptions); - - if (sortField === '_score') { - delete sortValue.unmapped_type; - } + sortField = '_script'; + sortValue = { + script: { + source: indexField.script, + lang: indexField.lang + }, + type: castSortType(indexField.type), + order: direction + }; + } else { + if (_.isString(sortValue)) { + sortValue = { order: sortValue }; } + sortValue = _.defaults({}, sortValue, defaultSortOptions); - normalized[sortField] = sortValue; - return normalized; + if (sortField === '_score') { + delete sortValue.unmapped_type; + } } + + normalized[sortField] = sortValue; + return normalized; } // The ES API only supports sort scripts of type 'number' and 'string' diff --git a/src/legacy/ui/public/courier/search_source/index.js b/src/legacy/ui/public/courier/search_source/index.js index 5ec7cc315db1c..dcae7b3d2ff05 100644 --- a/src/legacy/ui/public/courier/search_source/index.js +++ b/src/legacy/ui/public/courier/search_source/index.js @@ -17,4 +17,4 @@ * under the License. */ -export { SearchSourceProvider } from './search_source'; +export { SearchSource } from './search_source'; diff --git a/src/legacy/ui/public/courier/search_source/mocks.ts b/src/legacy/ui/public/courier/search_source/mocks.ts new file mode 100644 index 0000000000000..bf546c1b9e7c2 --- /dev/null +++ b/src/legacy/ui/public/courier/search_source/mocks.ts @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"), you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const searchSourceMock = { + setPreferredSearchStrategyId: jest.fn(), + getPreferredSearchStrategyId: jest.fn(), + setFields: jest.fn(), + setField: jest.fn(), + getId: jest.fn(), + getFields: jest.fn(), + getField: jest.fn(), + getOwnField: jest.fn(), + create: jest.fn(), + createCopy: jest.fn(), + createChild: jest.fn(), + setParent: jest.fn(), + getParent: jest.fn(), + fetch: jest.fn(), + onRequestStart: jest.fn(), + getSearchRequestBody: jest.fn(), + destroy: jest.fn(), + history: [], +}; diff --git a/src/legacy/ui/public/courier/search_source/search_source.d.ts b/src/legacy/ui/public/courier/search_source/search_source.d.ts index 11406ff3da824..674e7ace0594c 100644 --- a/src/legacy/ui/public/courier/search_source/search_source.d.ts +++ b/src/legacy/ui/public/courier/search_source/search_source.d.ts @@ -17,4 +17,23 @@ * under the License. */ -export type SearchSource = any; +export declare class SearchSource { + setPreferredSearchStrategyId: (searchStrategyId: string) => void; + getPreferredSearchStrategyId: () => string; + setFields: (newFields: any) => SearchSource; + setField: (field: string, value: any) => SearchSource; + getId: () => string; + getFields: () => any; + getField: (field: string) => any; + getOwnField: () => any; + create: () => SearchSource; + createCopy: () => SearchSource; + createChild: (options?: any) => SearchSource; + setParent: (parent: SearchSource | boolean) => SearchSource; + getParent: () => SearchSource | undefined; + fetch: (options?: any) => Promise; + onRequestStart: (handler: (searchSource: SearchSource, options: any) => void) => void; + getSearchRequestBody: () => any; + destroy: () => void; + history: any[]; +} diff --git a/src/legacy/ui/public/courier/search_source/search_source.js b/src/legacy/ui/public/courier/search_source/search_source.js index 2ff4b6d574ca3..16efb1230e50e 100644 --- a/src/legacy/ui/public/courier/search_source/search_source.js +++ b/src/legacy/ui/public/courier/search_source/search_source.js @@ -71,16 +71,16 @@ import _ from 'lodash'; import angular from 'angular'; -import { buildEsQuery, getEsQueryConfig, filterMatchesIndex } from '@kbn/es-query'; +import { buildEsQuery, getEsQueryConfig } from '@kbn/es-query'; -import { createDefer } from 'ui/promises'; -import { NormalizeSortRequestProvider } from './_normalize_sort_request'; -import { SearchRequestProvider } from '../fetch/request'; +import { normalizeSortRequest } from './_normalize_sort_request'; -import { searchRequestQueue } from '../search_request_queue'; -import { FetchSoonProvider } from '../fetch'; -import { FieldWildcardProvider } from '../../field_wildcard'; +import { fetchSoon } from '../fetch'; +import { fieldWildcardFilter } from '../../field_wildcard'; import { getHighlightRequest } from '../../../../../plugins/data/common/field_formats'; +import { npSetup } from 'ui/new_platform'; +import chrome from '../../chrome'; +import { RequestFailure } from '../fetch/errors'; import { filterDocvalueFields } from './filter_docvalue_fields'; const FIELDS = [ @@ -114,327 +114,242 @@ function isIndexPattern(val) { return Boolean(val && typeof val.getIndex === 'function'); } -export function SearchSourceProvider(Promise, Private, config) { - const SearchRequest = Private(SearchRequestProvider); - const normalizeSortRequest = Private(NormalizeSortRequestProvider); - const fetchSoon = Private(FetchSoonProvider); - const { fieldWildcardFilter } = Private(FieldWildcardProvider); - const getConfig = (...args) => config.get(...args); +const esShardTimeout = npSetup.core.injectedMetadata.getInjectedVar('esShardTimeout'); +const config = npSetup.core.uiSettings; +const getConfig = (...args) => config.get(...args); +const forIp = Symbol('for which index pattern?'); - const forIp = Symbol('for which index pattern?'); +export class SearchSource { + constructor(initialFields) { + this._id = _.uniqueId('data_source'); - class SearchSource { - constructor(initialFields) { - this._id = _.uniqueId('data_source'); + this._searchStrategyId = undefined; + this._fields = parseInitialFields(initialFields); + this._parent = undefined; - this._searchStrategyId = undefined; - this._fields = parseInitialFields(initialFields); - this._parent = undefined; - - this.history = []; - this._requestStartHandlers = []; - this._inheritOptions = {}; - - this._filterPredicates = [ - (filter) => { - // remove null/undefined filters - return filter; - }, - (filter) => { - const disabled = _.get(filter, 'meta.disabled'); - return disabled === undefined || disabled === false; - }, - (filter, data) => { - const index = data.index || this.getField('index'); - return !config.get('courier:ignoreFilterIfFieldNotInIndex') || filterMatchesIndex(filter, index); - } - ]; - } + this.history = []; + this._requestStartHandlers = []; + this._inheritOptions = {}; + } - /***** + /***** * PUBLIC API *****/ - setPreferredSearchStrategyId(searchStrategyId) { - this._searchStrategyId = searchStrategyId; - } - - getPreferredSearchStrategyId() { - return this._searchStrategyId; - } - - setFields(newFields) { - this._fields = newFields; - return this; - } - - setField = (field, value) => { - if (!FIELDS.includes(field)) { - throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + setPreferredSearchStrategyId(searchStrategyId) { + this._searchStrategyId = searchStrategyId; + } - if (field === 'index') { - const fields = this._fields; + getPreferredSearchStrategyId() { + return this._searchStrategyId; + } - const hasSource = fields.source; - const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp); - const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index; - if (sourceIsForOurIp) { - delete fields.source; - } + setFields(newFields) { + this._fields = newFields; + return this; + } - if (value === null || value === undefined) { - delete fields.index; - return this; - } + setField(field, value) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); + } - if (!isIndexPattern(value)) { - throw new TypeError('expected indexPattern to be an IndexPattern duck.'); - } + if (field === 'index') { + const fields = this._fields; - fields[field] = value; - if (!fields.source) { - // imply source filtering based on the index pattern, but allow overriding - // it by simply setting another field for "source". When index is changed - fields.source = function () { - return value.getSourceFiltering(); - }; - fields.source[forIp] = value; - } + const hasSource = fields.source; + const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp); + const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index; + if (sourceIsForOurIp) { + delete fields.source; + } + if (value === null || value === undefined) { + delete fields.index; return this; } - if (value == null) { - delete this._fields[field]; - return this; + if (!isIndexPattern(value)) { + throw new TypeError('expected indexPattern to be an IndexPattern duck.'); } - this._fields[field] = value; - return this; - }; + fields[field] = value; + if (!fields.source) { + // imply source filtering based on the index pattern, but allow overriding + // it by simply setting another field for "source". When index is changed + fields.source = function () { + return value.getSourceFiltering(); + }; + fields.source[forIp] = value; + } - getId() { - return this._id; + return this; } - getFields() { - return _.clone(this._fields); + if (value == null) { + delete this._fields[field]; + return this; } - /** - * Get fields from the fields - */ - getField = field => { - if (!FIELDS.includes(field)) { - throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + this._fields[field] = value; + return this; + } - let searchSource = this; + getId() { + return this._id; + } - while (searchSource) { - const value = searchSource._fields[field]; - if (value !== void 0) { - return value; - } + getFields() { + return _.clone(this._fields); + } - searchSource = searchSource.getParent(); - } - }; + /** + * Get fields from the fields + */ + getField(field) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); + } - /** - * Get the field from our own fields, don't traverse up the chain - */ - getOwnField(field) { - if (!FIELDS.includes(field)) { - throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); - } + let searchSource = this; - const value = this._fields[field]; + while (searchSource) { + const value = searchSource._fields[field]; if (value !== void 0) { return value; } - } - create() { - return new SearchSource(); + searchSource = searchSource.getParent(); } + } - createCopy() { - const json = angular.toJson(this._fields); - const newSearchSource = new SearchSource(json); - // when serializing the internal fields we lose the internal classes used in the index - // pattern, so we have to set it again to workaround this behavior - newSearchSource.setField('index', this.getField('index')); - newSearchSource.setParent(this.getParent()); - return newSearchSource; + /** + * Get the field from our own fields, don't traverse up the chain + */ + getOwnField(field) { + if (!FIELDS.includes(field)) { + throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`); } - createChild(options = {}) { - const childSearchSource = new SearchSource(); - childSearchSource.setParent(this, options); - return childSearchSource; + const value = this._fields[field]; + if (value !== void 0) { + return value; } + } - /** + create() { + return new SearchSource(); + } + + createCopy() { + const json = angular.toJson(this._fields); + const newSearchSource = new SearchSource(json); + // when serializing the internal fields we lose the internal classes used in the index + // pattern, so we have to set it again to workaround this behavior + newSearchSource.setField('index', this.getField('index')); + newSearchSource.setParent(this.getParent()); + return newSearchSource; + } + + createChild(options = {}) { + const childSearchSource = new SearchSource(); + childSearchSource.setParent(this, options); + return childSearchSource; + } + + /** * Set a searchSource that this source should inherit from * @param {SearchSource} searchSource - the parent searchSource * @return {this} - chainable */ - setParent(parent, options = {}) { - this._parent = parent; - this._inheritOptions = options; - return this; - } + setParent(parent, options = {}) { + this._parent = parent; + this._inheritOptions = options; + return this; + } - /** + /** * Get the parent of this SearchSource * @return {undefined|searchSource} */ - getParent() { - return this._parent || undefined; - } + getParent() { + return this._parent || undefined; + } - /** + /** * Fetch this source and reject the returned Promise on error * * @async */ - fetch() { - const self = this; - let req = _.first(self._myStartableQueued()); - - if (!req) { - const errorHandler = (request, error) => { - request.defer.reject(error); - request.abort(); - }; - req = self._createRequest({ errorHandler }); - } + async fetch(options) { + const $injector = await chrome.dangerouslyGetActiveInjector(); + const es = $injector.get('es'); - fetchSoon.fetchSearchRequests([req]); - return req.getCompletePromise(); - } + await this.requestIsStarting(options); - /** - * Fetch all pending requests for this source ASAP - * @async - */ - fetchQueued() { - return fetchSoon.fetchSearchRequests(this._myStartableQueued()); - } + const searchRequest = await this._flatten(); + this.history = [searchRequest]; - /** - * Cancel all pending requests for this searchSource - * @return {undefined} - */ - cancelQueued() { - searchRequestQueue.getAll() - .filter(req => req.source === this) - .forEach(req => req.abort()); + const response = await fetchSoon(searchRequest, { + ...(this._searchStrategyId && { searchStrategyId: this._searchStrategyId }), + ...options, + }, { es, config, esShardTimeout }); + + if (response.error) { + throw new RequestFailure(null, response); } - /** + return response; + } + + /** * Add a handler that will be notified whenever requests start * @param {Function} handler * @return {undefined} */ - onRequestStart(handler) { - this._requestStartHandlers.push(handler); - } + onRequestStart(handler) { + this._requestStartHandlers.push(handler); + } - /** + /** * Called by requests of this search source when they are started * @param {Courier.Request} request + * @param options * @return {Promise} */ - requestIsStarting(request) { - this.activeFetchCount = (this.activeFetchCount || 0) + 1; - this.history = [request]; - - const handlers = [...this._requestStartHandlers]; - // If callparentStartHandlers has been set to true, we also call all - // handlers of parent search sources. - if (this._inheritOptions.callParentStartHandlers) { - let searchSource = this.getParent(); - while (searchSource) { - handlers.push(...searchSource._requestStartHandlers); - searchSource = searchSource.getParent(); - } + requestIsStarting(options) { + const handlers = [...this._requestStartHandlers]; + // If callparentStartHandlers has been set to true, we also call all + // handlers of parent search sources. + if (this._inheritOptions.callParentStartHandlers) { + let searchSource = this.getParent(); + while (searchSource) { + handlers.push(...searchSource._requestStartHandlers); + searchSource = searchSource.getParent(); } - - return Promise - .map(handlers, fn => fn(this, request)) - .then(_.noop); } - /** - * Put a request in to the courier that this Source should - * be fetched on the next run of the courier - * @return {Promise} - */ - onResults() { - const self = this; - - return new Promise(function (resolve, reject) { - const defer = createDefer(Promise); - defer.promise.then(resolve, reject); - - const errorHandler = (request, error) => { - reject(error); - request.abort(); - }; - self._createRequest({ defer, errorHandler }); - }); - } - - async getSearchRequestBody() { - const searchRequest = await this._flatten(); - return searchRequest.body; - } + return Promise.all(handlers.map(fn => fn(this, options))); + } - /** - * Called by requests of this search source when they are done - * @param {Courier.Request} request - * @return {undefined} - */ - requestIsStopped() { - this.activeFetchCount -= 1; - } + async getSearchRequestBody() { + const searchRequest = await this._flatten(); + return searchRequest.body; + } - /** + /** * Completely destroy the SearchSource. * @return {undefined} */ - destroy() { - this.cancelQueued(); - this._requestStartHandlers.length = 0; - } + destroy() { + this._requestStartHandlers.length = 0; + } - /****** + /****** * PRIVATE APIS ******/ - _myStartableQueued() { - return searchRequestQueue - .getStartable() - .filter(req => req.source === this); - } - - /** - * Create a common search request object, which should - * be put into the pending request queue, for this search - * source - * - * @param {Deferred} defer - the deferred object that should be resolved - * when the request is complete - * @return {SearchRequest} - */ - _createRequest({ defer, errorHandler }) { - return new SearchRequest({ source: this, defer, errorHandler }); - } - - /** + /** * Used to merge properties into the data within ._flatten(). * The data is passed in and modified by the function * @@ -443,192 +358,184 @@ export function SearchSourceProvider(Promise, Private, config) { * @param {*} key - The key of `val` * @return {undefined} */ - _mergeProp(data, val, key) { - if (typeof val === 'function') { - const source = this; - return Promise.cast(val(this)) - .then(function (newVal) { - return source._mergeProp(data, newVal, key); - }); - } - - if (val == null || !key || !_.isString(key)) return; - - switch (key) { - case 'filter': - let filters = Array.isArray(val) ? val : [val]; - - filters = filters.filter(filter => { - return this._filterPredicates.every(predicate => predicate(filter, data)); - }); + _mergeProp(data, val, key) { + if (typeof val === 'function') { + const source = this; + return Promise.resolve(val(this)) + .then(function (newVal) { + return source._mergeProp(data, newVal, key); + }); + } - data.filters = [...(data.filters || []), ...filters]; - return; - case 'index': - case 'type': - case 'id': - case 'highlightAll': - if (key && data[key] == null) { - data[key] = val; - } - return; - case 'searchAfter': - key = 'search_after'; - addToBody(); - break; - case 'source': - key = '_source'; - addToBody(); - break; - case 'sort': - val = normalizeSortRequest(val, this.getField('index')); - addToBody(); - break; - case 'query': - data.query = (data.query || []).concat(val); - break; - case 'fields': - data[key] = _.uniq([...(data[key] || []), ...val]); - break; - default: - addToBody(); - } + if (val == null || !key || !_.isString(key)) return; + + switch (key) { + case 'filter': + const filters = Array.isArray(val) ? val : [val]; + data.filters = [...(data.filters || []), ...filters]; + return; + case 'index': + case 'type': + case 'id': + case 'highlightAll': + if (key && data[key] == null) { + data[key] = val; + } + return; + case 'searchAfter': + key = 'search_after'; + addToBody(); + break; + case 'source': + key = '_source'; + addToBody(); + break; + case 'sort': + val = normalizeSortRequest(val, this.getField('index'), config.get('sort:options')); + addToBody(); + break; + case 'query': + data.query = (data.query || []).concat(val); + break; + case 'fields': + data[key] = _.uniq([...(data[key] || []), ...val]); + break; + default: + addToBody(); + } - /** + /** * Add the key and val to the body of the request */ - function addToBody() { - data.body = data.body || {}; - // ignore if we already have a value - if (data.body[key] == null) { - data.body[key] = val; - } + function addToBody() { + data.body = data.body || {}; + // ignore if we already have a value + if (data.body[key] == null) { + data.body[key] = val; } } + } - /** + /** * Walk the inheritance chain of a source and return it's * flat representation (taking into account merging rules) * @returns {Promise} * @resolved {Object|null} - the flat data of the SearchSource */ - _flatten() { - // the merged data of this dataSource and it's ancestors - const flatData = {}; - - // function used to write each property from each data object in the chain to flat data - const root = this; - - // start the chain at this source - let current = this; - - // call the ittr and return it's promise - return (function ittr() { - // iterate the _fields object (not array) and - // pass each key:value pair to source._mergeProp. if _mergeProp - // returns a promise, then wait for it to complete and call _mergeProp again - return Promise.all(_.map(current._fields, function ittr(value, key) { - if (Promise.is(value)) { - return value.then(function (value) { - return ittr(value, key); - }); - } - - const prom = root._mergeProp(flatData, value, key); - return Promise.is(prom) ? prom : null; - })) - .then(function () { - // move to this sources parent - const parent = current.getParent(); - // keep calling until we reach the top parent - if (parent) { - current = parent; - return ittr(); - } + _flatten() { + // the merged data of this dataSource and it's ancestors + const flatData = {}; + + // function used to write each property from each data object in the chain to flat data + const root = this; + + // start the chain at this source + let current = this; + + // call the ittr and return it's promise + return (function ittr() { + // iterate the _fields object (not array) and + // pass each key:value pair to source._mergeProp. if _mergeProp + // returns a promise, then wait for it to complete and call _mergeProp again + return Promise.all(_.map(current._fields, function ittr(value, key) { + if (value instanceof Promise) { + return value.then(function (value) { + return ittr(value, key); }); - }()) - .then(function () { - // This is down here to prevent the circular dependency - flatData.body = flatData.body || {}; - - const computedFields = flatData.index.getComputedFields(); - - flatData.body.stored_fields = computedFields.storedFields; - flatData.body.script_fields = flatData.body.script_fields || {}; - _.extend(flatData.body.script_fields, computedFields.scriptFields); - - const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : []; - flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields; + } - if (flatData.body._source) { - // exclude source fields for this index pattern specified by the user - const filter = fieldWildcardFilter(flatData.body._source.excludes); - flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter( - docvalueField => filter(docvalueField.field) - ); + const prom = root._mergeProp(flatData, value, key); + return prom instanceof Promise ? prom : null; + })) + .then(function () { + // move to this sources parent + const parent = current.getParent(); + // keep calling until we reach the top parent + if (parent) { + current = parent; + return ittr(); } + }); + }()) + .then(function () { + // This is down here to prevent the circular dependency + flatData.body = flatData.body || {}; + + const computedFields = flatData.index.getComputedFields(); + + flatData.body.stored_fields = computedFields.storedFields; + flatData.body.script_fields = flatData.body.script_fields || {}; + _.extend(flatData.body.script_fields, computedFields.scriptFields); + + const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : []; + flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields; + + if (flatData.body._source) { + // exclude source fields for this index pattern specified by the user + const filter = fieldWildcardFilter(flatData.body._source.excludes, config.get('metaFields')); + flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter( + docvalueField => filter(docvalueField.field) + ); + } - // if we only want to search for certain fields - const fields = flatData.fields; - if (fields) { - // filter out the docvalue_fields, and script_fields to only include those that we are concerned with - flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields); - flatData.body.script_fields = _.pick(flatData.body.script_fields, fields); - - // request the remaining fields from both stored_fields and _source - const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields)); - flatData.body.stored_fields = remainingFields; - _.set(flatData.body, '_source.includes', remainingFields); - } + // if we only want to search for certain fields + const fields = flatData.fields; + if (fields) { + // filter out the docvalue_fields, and script_fields to only include those that we are concerned with + flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields); + flatData.body.script_fields = _.pick(flatData.body.script_fields, fields); + + // request the remaining fields from both stored_fields and _source + const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields)); + flatData.body.stored_fields = remainingFields; + _.set(flatData.body, '_source.includes', remainingFields); + } - const esQueryConfigs = getEsQueryConfig(config); - flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs); + const esQueryConfigs = getEsQueryConfig(config); + flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs); - if (flatData.highlightAll != null) { - if (flatData.highlightAll && flatData.body.query) { - flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig); - } - delete flatData.highlightAll; + if (flatData.highlightAll != null) { + if (flatData.highlightAll && flatData.body.query) { + flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig); } + delete flatData.highlightAll; + } - /** + /** * Translate a filter into a query to support es 3+ * @param {Object} filter - The filter to translate * @return {Object} the query version of that filter */ - const translateToQuery = function (filter) { - if (!filter) return; + const translateToQuery = function (filter) { + if (!filter) return; - if (filter.query) { - return filter.query; - } + if (filter.query) { + return filter.query; + } - return filter; - }; + return filter; + }; - // re-write filters within filter aggregations - (function recurse(aggBranch) { - if (!aggBranch) return; - Object.keys(aggBranch).forEach(function (id) { - const agg = aggBranch[id]; + // re-write filters within filter aggregations + (function recurse(aggBranch) { + if (!aggBranch) return; + Object.keys(aggBranch).forEach(function (id) { + const agg = aggBranch[id]; - if (agg.filters) { - // translate filters aggregations - const filters = agg.filters.filters; + if (agg.filters) { + // translate filters aggregations + const filters = agg.filters.filters; - Object.keys(filters).forEach(function (filterId) { - filters[filterId] = translateToQuery(filters[filterId]); - }); - } + Object.keys(filters).forEach(function (filterId) { + filters[filterId] = translateToQuery(filters[filterId]); + }); + } - recurse(agg.aggs || agg.aggregations); - }); - }(flatData.body.aggs || flatData.body.aggregations)); + recurse(agg.aggs || agg.aggregations); + }); + }(flatData.body.aggs || flatData.body.aggregations)); - return flatData; - }); - } + return flatData; + }); } - - return SearchSource; } diff --git a/src/legacy/ui/public/courier/search_source/search_source.test.js b/src/legacy/ui/public/courier/search_source/search_source.test.js new file mode 100644 index 0000000000000..be08261ba9d2c --- /dev/null +++ b/src/legacy/ui/public/courier/search_source/search_source.test.js @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { SearchSource } from '../search_source'; + +jest.mock('ui/new_platform', () => ({ + npSetup: { + core: { + injectedMetadata: { + getInjectedVar: () => 0, + } + } + } +})); + +jest.mock('../fetch', () => ({ + fetchSoon: jest.fn(), +})); + +const indexPattern = { title: 'foo', getIndex: () => 'foo' }; +const indexPattern2 = { title: 'foo', getIndex: () => 'foo' }; + +describe('SearchSource', function () { + describe('#setField()', function () { + it('sets the value for the property', function () { + const searchSource = new SearchSource(); + searchSource.setField('aggs', 5); + expect(searchSource.getField('aggs')).toBe(5); + }); + + it('throws an error if the property is not accepted', function () { + const searchSource = new SearchSource(); + expect(() => searchSource.setField('index', 5)).toThrow(); + }); + }); + + describe('#getField()', function () { + it('gets the value for the property', function () { + const searchSource = new SearchSource(); + searchSource.setField('aggs', 5); + expect(searchSource.getField('aggs')).toBe(5); + }); + + it('throws an error if the property is not accepted', function () { + const searchSource = new SearchSource(); + expect(() => searchSource.getField('unacceptablePropName')).toThrow(); + }); + }); + + describe(`#setField('index')`, function () { + describe('auto-sourceFiltering', function () { + describe('new index pattern assigned', function () { + it('generates a searchSource filter', function () { + const searchSource = new SearchSource(); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + searchSource.setField('index', indexPattern); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(typeof searchSource.getField('source')).toBe('function'); + }); + + it('removes created searchSource filter on removal', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + }); + }); + + describe('new index pattern assigned over another', function () { + it('replaces searchSource filter with new', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + const searchSourceFilter1 = searchSource.getField('source'); + searchSource.setField('index', indexPattern2); + expect(searchSource.getField('index')).toBe(indexPattern2); + expect(typeof searchSource.getField('source')).toBe('function'); + expect(searchSource.getField('source')).not.toBe(searchSourceFilter1); + }); + + it('removes created searchSource filter on removal', function () { + const searchSource = new SearchSource(); + searchSource.setField('index', indexPattern); + searchSource.setField('index', indexPattern2); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(undefined); + }); + }); + + describe('ip assigned before custom searchSource filter', function () { + it('custom searchSource filter becomes new searchSource', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('index', indexPattern); + expect(typeof searchSource.getField('source')).toBe('function'); + searchSource.setField('source', football); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(searchSource.getField('source')).toBe(football); + }); + + it('custom searchSource stays after removal', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('index', indexPattern); + searchSource.setField('source', football); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(football); + }); + }); + + describe('ip assigned after custom searchSource filter', function () { + it('leaves the custom filter in place', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('source', football); + searchSource.setField('index', indexPattern); + expect(searchSource.getField('index')).toBe(indexPattern); + expect(searchSource.getField('source')).toBe(football); + }); + + it('custom searchSource stays after removal', function () { + const searchSource = new SearchSource(); + const football = {}; + searchSource.setField('source', football); + searchSource.setField('index', indexPattern); + searchSource.setField('index', null); + expect(searchSource.getField('index')).toBe(undefined); + expect(searchSource.getField('source')).toBe(football); + }); + }); + }); + }); + + describe('#onRequestStart()', () => { + it('should be called when starting a request', () => { + const searchSource = new SearchSource(); + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const options = {}; + searchSource.requestIsStarting(options); + expect(fn).toBeCalledWith(searchSource, options); + }); + + it('should not be called on parent searchSource', () => { + const parent = new SearchSource(); + const searchSource = new SearchSource().setParent(parent); + + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const parentFn = jest.fn(); + parent.onRequestStart(parentFn); + const options = {}; + searchSource.requestIsStarting(options); + + expect(fn).toBeCalledWith(searchSource, options); + expect(parentFn).not.toBeCalled(); + }); + + it('should be called on parent searchSource if callParentStartHandlers is true', () => { + const parent = new SearchSource(); + const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true }); + + const fn = jest.fn(); + searchSource.onRequestStart(fn); + const parentFn = jest.fn(); + parent.onRequestStart(parentFn); + const options = {}; + searchSource.requestIsStarting(options); + + expect(fn).toBeCalledWith(searchSource, options); + expect(parentFn).toBeCalledWith(searchSource, options); + }); + }); +}); diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js index 4b1f488ece128..7d9865c137e62 100644 --- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js +++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.js @@ -19,48 +19,13 @@ import { addSearchStrategy } from './search_strategy_registry'; import { isDefaultTypeIndexPattern } from './is_default_type_index_pattern'; -import { SearchError } from './search_error'; -import { getSearchParams, getMSearchParams } from '../fetch/get_search_params'; - -function getAllFetchParams(searchRequests, Promise) { - return Promise.map(searchRequests, (searchRequest) => { - return Promise.try(searchRequest.getFetchParams, void 0, searchRequest) - .then((fetchParams) => { - return (searchRequest.fetchParams = fetchParams); - }) - .then(value => ({ resolved: value })) - .catch(error => ({ rejected: error })); - }); -} - -async function serializeAllFetchParams(fetchParams, searchRequests, serializeFetchParams) { - const searchRequestsWithFetchParams = []; - const failedSearchRequests = []; - - // Gather the fetch param responses from all the successful requests. - fetchParams.forEach((result, index) => { - if (result.resolved) { - searchRequestsWithFetchParams.push(result.resolved); - } else { - const searchRequest = searchRequests[index]; - - searchRequest.handleFailure(result.rejected); - failedSearchRequests.push(searchRequest); - } - }); - - return { - serializedFetchParams: await serializeFetchParams(searchRequestsWithFetchParams), - failedSearchRequests, - }; -} +import { getSearchParams, getMSearchParams, getPreference, getTimeout } from '../fetch/get_search_params'; export const defaultSearchStrategy = { id: 'default', search: params => { - const { config } = params; - return config.get('courier:batchSearches') ? msearch(params) : search(params); + return params.config.get('courier:batchSearches') ? msearch(params) : search(params); }, isViable: (indexPattern) => { @@ -72,79 +37,43 @@ export const defaultSearchStrategy = { }, }; -async function msearch({ searchRequests, es, Promise, serializeFetchParams, config }) { - // Flatten the searchSource within each searchRequest to get the fetch params, - // e.g. body, filters, index pattern, query. - const allFetchParams = await getAllFetchParams(searchRequests, Promise); - - // Serialize the fetch params into a format suitable for the body of an ES query. - const { - serializedFetchParams, - failedSearchRequests, - } = await serializeAllFetchParams(allFetchParams, searchRequests, serializeFetchParams); - - if (serializedFetchParams.trim() === '') { - return { - failedSearchRequests, +function msearch({ searchRequests, es, config, esShardTimeout }) { + const inlineRequests = searchRequests.map(({ index, body, search_type: searchType }) => { + const inlineHeader = { + index: index.title || index, + search_type: searchType, + ignore_unavailable: true, + preference: getPreference(config) }; - } - const msearchParams = { - ...getMSearchParams(config), - body: serializedFetchParams, - }; - - const searching = es.msearch(msearchParams); + const inlineBody = { + ...body, + timeout: getTimeout(esShardTimeout) + }; + return `${JSON.stringify(inlineHeader)}\n${JSON.stringify(inlineBody)}`; + }); + const searching = es.msearch({ + ...getMSearchParams(config), + body: `${inlineRequests.join('\n')}\n`, + }); return { - // Munge data into shape expected by consumer. - searching: new Promise((resolve, reject) => { - // Unwrap the responses object returned by the ES client. - searching.then(({ responses }) => { - resolve(responses); - }).catch(error => { - // Format ES client error as a SearchError. - const { statusCode, displayName, message, path } = error; - - const searchError = new SearchError({ - status: statusCode, - title: displayName, - message, - path, - }); - - reject(searchError); - }); - }), - abort: searching.abort, - failedSearchRequests, + searching: searching.then(({ responses }) => responses), + abort: searching.abort }; } -function search({ searchRequests, es, Promise, config, sessionId, esShardTimeout }) { - const failedSearchRequests = []; +function search({ searchRequests, es, config, esShardTimeout }) { const abortController = new AbortController(); - const searchParams = getSearchParams(config, sessionId, esShardTimeout); - const promises = searchRequests.map(async searchRequest => { - return searchRequest.getFetchParams() - .then(fetchParams => { - const { index, body } = searchRequest.fetchParams = fetchParams; - const promise = es.search({ index: index.title || index, body, ...searchParams }); - abortController.signal.addEventListener('abort', promise.abort); - return promise; - }, error => { - searchRequest.handleFailure(error); - failedSearchRequests.push(searchRequest); - }) - .catch(({ response }) => { - // Copying the _msearch behavior where the errors for individual requests are returned - // instead of thrown - return JSON.parse(response); - }); + const searchParams = getSearchParams(config, esShardTimeout); + const promises = searchRequests.map(({ index, body }) => { + const searching = es.search({ index: index.title || index, body, ...searchParams }) + .catch(({ response }) => JSON.parse(response)); + abortController.signal.addEventListener('abort', searching.abort); + return searching; }); return { searching: Promise.all(promises), abort: () => abortController.abort(), - failedSearchRequests }; } diff --git a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js index dc8732032ba22..953ca4fe800f1 100644 --- a/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js +++ b/src/legacy/ui/public/courier/search_strategy/default_search_strategy.test.js @@ -18,7 +18,6 @@ */ import { defaultSearchStrategy } from './default_search_strategy'; -import Bluebird from 'bluebird'; const { search } = defaultSearchStrategy; @@ -29,14 +28,12 @@ function getConfigStub(config = {}) { } describe('defaultSearchStrategy', function () { - describe('search', function () { - let searchArgs; beforeEach(() => { - const msearchMock = jest.fn().mockReturnValue(Bluebird.resolve([])); - const searchMock = jest.fn().mockReturnValue(Bluebird.resolve([])); + const msearchMock = jest.fn().mockReturnValue(Promise.resolve([])); + const searchMock = jest.fn().mockReturnValue(Promise.resolve([])); searchArgs = { searchRequests: [], @@ -44,8 +41,6 @@ describe('defaultSearchStrategy', function () { msearch: msearchMock, search: searchMock, }, - Promise: Bluebird, - serializeFetchParams: () => Bluebird.resolve('pretend this is a valid request body'), }; }); @@ -78,7 +73,5 @@ describe('defaultSearchStrategy', function () { await search(searchArgs); expect(searchArgs.es.msearch.mock.calls[0][0]).toHaveProperty('ignore_throttled', false); }); - }); - }); diff --git a/src/legacy/ui/public/courier/search_strategy/index.js b/src/legacy/ui/public/courier/search_strategy/index.js index 3f6d172426d0d..229d0cbb1da5d 100644 --- a/src/legacy/ui/public/courier/search_strategy/index.js +++ b/src/legacy/ui/public/courier/search_strategy/index.js @@ -18,9 +18,10 @@ */ export { - assignSearchRequestsToSearchStrategies, addSearchStrategy, hasSearchStategyForIndexPattern, + getSearchStrategyById, + getSearchStrategyForSearchRequest, } from './search_strategy_registry'; export { isDefaultTypeIndexPattern } from './is_default_type_index_pattern'; diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js index 3af93e4f16509..e67d39ea27aa6 100644 --- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js +++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.js @@ -19,7 +19,7 @@ import { noOpSearchStrategy } from './no_op_search_strategy'; -const searchStrategies = []; +export const searchStrategies = []; export const addSearchStrategy = searchStrategy => { if (searchStrategies.includes(searchStrategy)) { @@ -29,28 +29,26 @@ export const addSearchStrategy = searchStrategy => { searchStrategies.push(searchStrategy); }; -const getSearchStrategyByViability = indexPattern => { +export const getSearchStrategyByViability = indexPattern => { return searchStrategies.find(searchStrategy => { return searchStrategy.isViable(indexPattern); }); }; -const getSearchStrategyById = searchStrategyId => { +export const getSearchStrategyById = searchStrategyId => { return searchStrategies.find(searchStrategy => { return searchStrategy.id === searchStrategyId; }); }; -const getSearchStrategyForSearchRequest = searchRequest => { +export const getSearchStrategyForSearchRequest = (searchRequest, { searchStrategyId } = {}) => { // Allow the searchSource to declare the correct strategy with which to execute its searches. - const preferredSearchStrategyId = searchRequest.source.getPreferredSearchStrategyId(); - if (preferredSearchStrategyId != null) { - return getSearchStrategyById(preferredSearchStrategyId); + if (searchStrategyId != null) { + return getSearchStrategyById(searchStrategyId); } // Otherwise try to match it to a strategy. - const indexPattern = searchRequest.source.getField('index'); - const viableSearchStrategy = getSearchStrategyByViability(indexPattern); + const viableSearchStrategy = getSearchStrategyByViability(searchRequest.index); if (viableSearchStrategy) { return viableSearchStrategy; @@ -60,47 +58,6 @@ const getSearchStrategyForSearchRequest = searchRequest => { return noOpSearchStrategy; }; - -/** - * Build a structure like this: - * - * [{ - * searchStrategy: rollupSearchStrategy, - * searchRequests: [], - * }, { - * searchStrategy: defaultSearchStrategy, - * searchRequests: [], - * }] - * - * We use an array of objects to preserve the order of the search requests, which we use to - * deterministically associate each response with the originating request. - */ -export const assignSearchRequestsToSearchStrategies = searchRequests => { - const searchStrategiesWithRequests = []; - const searchStrategyById = {}; - - searchRequests.forEach(searchRequest => { - const matchingSearchStrategy = getSearchStrategyForSearchRequest(searchRequest); - const { id } = matchingSearchStrategy; - let searchStrategyWithRequest = searchStrategyById[id]; - - // Create the data structure if we don't already have it. - if (!searchStrategyWithRequest) { - searchStrategyWithRequest = { - searchStrategy: matchingSearchStrategy, - searchRequests: [], - }; - - searchStrategyById[id] = searchStrategyWithRequest; - searchStrategiesWithRequests.push(searchStrategyWithRequest); - } - - searchStrategyWithRequest.searchRequests.push(searchRequest); - }); - - return searchStrategiesWithRequests; -}; - export const hasSearchStategyForIndexPattern = indexPattern => { return Boolean(getSearchStrategyByViability(indexPattern)); }; diff --git a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js index 5f7e14082d577..362d303eb6203 100644 --- a/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js +++ b/src/legacy/ui/public/courier/search_strategy/search_strategy_registry.test.js @@ -17,79 +17,98 @@ * under the License. */ +import { noOpSearchStrategy } from './no_op_search_strategy'; import { - assignSearchRequestsToSearchStrategies, + searchStrategies, addSearchStrategy, + getSearchStrategyByViability, + getSearchStrategyById, + getSearchStrategyForSearchRequest, + hasSearchStategyForIndexPattern } from './search_strategy_registry'; -import { noOpSearchStrategy } from './no_op_search_strategy'; +const mockSearchStrategies = [{ + id: 0, + isViable: index => index === 0 +}, { + id: 1, + isViable: index => index === 1 +}]; + +describe('Search strategy registry', () => { + beforeEach(() => { + searchStrategies.length = 0; + }); + + describe('addSearchStrategy', () => { + it('adds a search strategy', () => { + addSearchStrategy(mockSearchStrategies[0]); + expect(searchStrategies.length).toBe(1); + }); + + it('does not add a search strategy if it is already included', () => { + addSearchStrategy(mockSearchStrategies[0]); + addSearchStrategy(mockSearchStrategies[0]); + expect(searchStrategies.length).toBe(1); + }); + }); + + describe('getSearchStrategyByViability', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); + + it('returns the viable strategy', () => { + expect(getSearchStrategyByViability(0)).toBe(mockSearchStrategies[0]); + expect(getSearchStrategyByViability(1)).toBe(mockSearchStrategies[1]); + }); + + it('returns undefined if there is no viable strategy', () => { + expect(getSearchStrategyByViability(-1)).toBe(undefined); + }); + }); + + describe('getSearchStrategyById', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); + + it('returns the strategy by ID', () => { + expect(getSearchStrategyById(0)).toBe(mockSearchStrategies[0]); + expect(getSearchStrategyById(1)).toBe(mockSearchStrategies[1]); + }); -describe('SearchStrategyRegistry', () => { - describe('assignSearchRequestsToSearchStrategies', () => { - test('associates search requests with valid search strategies', () => { - const searchStrategyA = { - id: 'a', - isViable: indexPattern => { - return indexPattern === 'a'; - }, - }; - - addSearchStrategy(searchStrategyA); - - const searchStrategyB = { - id: 'b', - isViable: indexPattern => { - return indexPattern === 'b'; - }, - }; - - addSearchStrategy(searchStrategyB); - - const searchRequest0 = { - id: 0, - source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest1 = { - id: 1, - source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest2 = { - id: 2, - source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequest3 = { - id: 3, - source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} }, - }; - - const searchRequests = [ searchRequest0, searchRequest1, searchRequest2, searchRequest3]; - const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests); - - expect(searchStrategiesWithSearchRequests).toEqual([{ - searchStrategy: searchStrategyB, - searchRequests: [ searchRequest0, searchRequest3 ], - }, { - searchStrategy: searchStrategyA, - searchRequests: [ searchRequest1, searchRequest2 ], - }]); + it('returns undefined if there is no strategy with that ID', () => { + expect(getSearchStrategyById(-1)).toBe(undefined); }); + }); - test(`associates search requests with noOpSearchStrategy when a viable one can't be found`, () => { - const searchRequest0 = { - id: 0, - source: { getField: () => {}, getPreferredSearchStrategyId: () => {} }, - }; + describe('getSearchStrategyForSearchRequest', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); - const searchRequests = [ searchRequest0 ]; - const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests); + it('returns the strategy by ID if provided', () => { + expect(getSearchStrategyForSearchRequest({}, { searchStrategyId: 1 })).toBe(mockSearchStrategies[1]); + }); + + it('returns the strategy by viability if there is one', () => { + expect(getSearchStrategyForSearchRequest({ index: 1 })).toBe(mockSearchStrategies[1]); + }); + + it('returns the no op strategy if there is no viable strategy', () => { + expect(getSearchStrategyForSearchRequest({ index: 3 })).toBe(noOpSearchStrategy); + }); + }); + + describe('hasSearchStategyForIndexPattern', () => { + beforeEach(() => { + mockSearchStrategies.forEach(addSearchStrategy); + }); - expect(searchStrategiesWithSearchRequests).toEqual([{ - searchStrategy: noOpSearchStrategy, - searchRequests: [ searchRequest0 ], - }]); + it('returns whether there is a search strategy for this index pattern', () => { + expect(hasSearchStategyForIndexPattern(0)).toBe(true); + expect(hasSearchStategyForIndexPattern(-1)).toBe(false); }); }); }); diff --git a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss deleted file mode 100644 index 769abea150199..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/_error_allow_explicit_index.scss +++ /dev/null @@ -1,3 +0,0 @@ -.kbnError--multi-allow-explicit-index { - padding: $euiSizeL; -} diff --git a/src/legacy/ui/public/error_allow_explicit_index/_index.scss b/src/legacy/ui/public/error_allow_explicit_index/_index.scss deleted file mode 100644 index 84cb111127679..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/_index.scss +++ /dev/null @@ -1 +0,0 @@ -@import './error_allow_explicit_index'; diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html deleted file mode 100644 index e61383b11101a..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.html +++ /dev/null @@ -1,48 +0,0 @@ -
-

- - - -

- -

- -

- -

-
    -
  1. -
  2. -
  3. -
-
diff --git a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js b/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js deleted file mode 100644 index 35763d8dd0385..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/error_allow_explicit_index.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { i18n } from '@kbn/i18n'; -import { get } from 'lodash'; - -import uiRoutes from '../routes'; -import { KbnUrlProvider } from '../url'; - -import template from './error_allow_explicit_index.html'; - -uiRoutes - .when('/error/multi.allow_explicit_index', { - template, - k7Breadcrumbs: () => [{ text: i18n.translate('common.ui.errorAllowExplicitIndex.breadcrumbs.errorText', { defaultMessage: 'Error' }) }], - }); - -export function ErrorAllowExplicitIndexProvider(Private, Promise) { - const kbnUrl = Private(KbnUrlProvider); - - return new (class ErrorAllowExplicitIndex { - test(error) { - if (!error || error.status !== 400) { - return false; - } - - const type = get(error, 'body.error.type'); - const reason = get(error, 'body.error.reason'); - - return ( - type === 'illegal_argument_exception' && - String(reason).includes('explicit index') - ); - } - - takeover() { - kbnUrl.change('/error/multi.allow_explicit_index'); - return Promise.halt(); - } - }); -} diff --git a/src/legacy/ui/public/error_allow_explicit_index/index.js b/src/legacy/ui/public/error_allow_explicit_index/index.js deleted file mode 100644 index a832fde31c987..0000000000000 --- a/src/legacy/ui/public/error_allow_explicit_index/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -export { ErrorAllowExplicitIndexProvider } from './error_allow_explicit_index'; diff --git a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js index aeffdbc8bfa6c..a15c602b7ba83 100644 --- a/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js +++ b/src/legacy/ui/public/field_wildcard/__tests__/field_wildcard.js @@ -20,19 +20,12 @@ import expect from '@kbn/expect'; import ngMock from 'ng_mock'; -import { FieldWildcardProvider } from '../../field_wildcard'; +import { fieldWildcardFilter, makeRegEx } from '../../field_wildcard'; describe('fieldWildcard', function () { - let fieldWildcardFilter; - let makeRegEx; + const metaFields = ['_id', '_type', '_source']; beforeEach(ngMock.module('kibana')); - beforeEach(ngMock.inject(function (config, Private) { - config.set('metaFields', ['_id', '_type', '_source']); - const fieldWildcard = Private(FieldWildcardProvider); - fieldWildcardFilter = fieldWildcard.fieldWildcardFilter; - makeRegEx = fieldWildcard.makeRegEx; - })); describe('makeRegEx', function () { it('matches * in any position', function () { @@ -70,7 +63,7 @@ describe('fieldWildcard', function () { }); it('filters nothing when given an empty array', function () { - const filter = fieldWildcardFilter([]); + const filter = fieldWildcardFilter([], metaFields); const original = [ 'foo', 'bar', @@ -82,7 +75,7 @@ describe('fieldWildcard', function () { }); it('does not filter metaFields', function () { - const filter = fieldWildcardFilter([ '_*' ]); + const filter = fieldWildcardFilter([ '_*' ], metaFields); const original = [ '_id', @@ -97,7 +90,7 @@ describe('fieldWildcard', function () { const filter = fieldWildcardFilter([ 'f*', '*4' - ]); + ], metaFields); const original = [ 'foo', @@ -114,7 +107,7 @@ describe('fieldWildcard', function () { 'f*', '*4', 'undefined' - ]); + ], metaFields); const original = [ 'foo', diff --git a/src/legacy/ui/public/field_wildcard/field_wildcard.js b/src/legacy/ui/public/field_wildcard/field_wildcard.js index f73997d40a4e4..656641b20a98c 100644 --- a/src/legacy/ui/public/field_wildcard/field_wildcard.js +++ b/src/legacy/ui/public/field_wildcard/field_wildcard.js @@ -19,31 +19,25 @@ import { escapeRegExp, memoize } from 'lodash'; -export function FieldWildcardProvider(config) { - const metaFields = config.get('metaFields'); +export const makeRegEx = memoize(function makeRegEx(glob) { + return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$'); +}); - const makeRegEx = memoize(function makeRegEx(glob) { - return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$'); - }); - - // Note that this will return an essentially noop function if globs is undefined. - function fieldWildcardMatcher(globs = []) { - return function matcher(val) { - // do not test metaFields or keyword - if (metaFields.indexOf(val) !== -1) { - return false; - } - return globs.some(p => makeRegEx(p).test(val)); - }; - } - - // Note that this will return an essentially noop function if globs is undefined. - function fieldWildcardFilter(globs = []) { - const matcher = fieldWildcardMatcher(globs); - return function filter(val) { - return !matcher(val); - }; - } +// Note that this will return an essentially noop function if globs is undefined. +export function fieldWildcardMatcher(globs = [], metaFields) { + return function matcher(val) { + // do not test metaFields or keyword + if (metaFields.indexOf(val) !== -1) { + return false; + } + return globs.some(p => makeRegEx(p).test(val)); + }; +} - return { makeRegEx, fieldWildcardMatcher, fieldWildcardFilter }; +// Note that this will return an essentially noop function if globs is undefined. +export function fieldWildcardFilter(globs = [], metaFields = []) { + const matcher = fieldWildcardMatcher(globs, metaFields); + return function filter(val) { + return !matcher(val); + }; } diff --git a/src/legacy/ui/public/field_wildcard/index.js b/src/legacy/ui/public/field_wildcard/index.js index d03643f8804d8..db9f830e450b8 100644 --- a/src/legacy/ui/public/field_wildcard/index.js +++ b/src/legacy/ui/public/field_wildcard/index.js @@ -17,4 +17,4 @@ * under the License. */ -export { FieldWildcardProvider } from './field_wildcard'; +export * from './field_wildcard'; diff --git a/src/legacy/ui/public/legacy_compat/angular_config.tsx b/src/legacy/ui/public/legacy_compat/angular_config.tsx index 28d57e9f8e8c9..8eac31e24530c 100644 --- a/src/legacy/ui/public/legacy_compat/angular_config.tsx +++ b/src/legacy/ui/public/legacy_compat/angular_config.tsx @@ -64,7 +64,6 @@ export const configureAppAngularModule = (angularModule: IModule) => { .value('buildNum', legacyMetadata.buildNum) .value('buildSha', legacyMetadata.buildSha) .value('serverName', legacyMetadata.serverName) - .value('sessionId', Date.now()) .value('esUrl', getEsUrl(newPlatform)) .value('uiCapabilities', capabilities.get()) .config(setupCompileProvider(newPlatform)) diff --git a/src/legacy/ui/public/management/components/sidebar_nav.tsx b/src/legacy/ui/public/management/components/sidebar_nav.tsx index ef232c7ef7eda..f0ac787e0ef44 100644 --- a/src/legacy/ui/public/management/components/sidebar_nav.tsx +++ b/src/legacy/ui/public/management/components/sidebar_nav.tsx @@ -19,6 +19,7 @@ import { EuiIcon, EuiSideNav, IconType } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; +import { i18n } from '@kbn/i18n'; import React from 'react'; import { IndexedArray } from 'ui/indexed_array'; @@ -73,6 +74,9 @@ export class SidebarNav extends React.Component { - if (this.searchSource) { - this.searchSource.cancelQueued(); - } - }; + this.destroy = () => {}; /** * Delete this object from Elasticsearch diff --git a/src/legacy/ui/public/vis/vis.js b/src/legacy/ui/public/vis/vis.js index c34fc1b10378e..c1fff1556e3ad 100644 --- a/src/legacy/ui/public/vis/vis.js +++ b/src/legacy/ui/public/vis/vis.js @@ -33,14 +33,13 @@ import '../render_complete/directive'; import { AggConfigs } from '../agg_types/agg_configs'; import { PersistedState } from '../persisted_state'; import { updateVisualizationConfig } from './vis_update'; -import { SearchSourceProvider } from '../courier/search_source'; +import { SearchSource } from '../courier'; import { start as visualizations } from '../../../core_plugins/visualizations/public/np_ready/public/legacy'; import '../directives/bind'; export function VisProvider(Private, getAppState) { const visTypes = visualizations.types; - const SearchSource = Private(SearchSourceProvider); class Vis extends EventEmitter { constructor(indexPattern, visState) { diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts index 9d6b56c32f1cb..c73f787457a03 100644 --- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts +++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.test.ts @@ -18,6 +18,7 @@ */ jest.mock('ui/new_platform'); +import { searchSourceMock } from '../../courier/search_source/mocks'; import { mockDataLoaderFetch, timefilter } from './embedded_visualize_handler.test.mocks'; import _ from 'lodash'; @@ -85,7 +86,7 @@ describe('EmbeddedVisualizeHandler', () => { inspectorAdapters: {}, query: undefined, queryFilter: null, - searchSource: undefined, + searchSource: searchSourceMock, timeRange: undefined, uiState: undefined, }; @@ -96,7 +97,7 @@ describe('EmbeddedVisualizeHandler', () => { { vis: mockVis, title: 'My Vis', - searchSource: undefined, + searchSource: searchSourceMock, destroy: () => ({}), copyOnSave: false, save: () => Promise.resolve('123'), @@ -128,7 +129,7 @@ describe('EmbeddedVisualizeHandler', () => { { vis: mockVis, title: 'My Vis', - searchSource: undefined, + searchSource: searchSourceMock, destroy: () => ({}), copyOnSave: false, save: () => Promise.resolve('123'), diff --git a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts index 119ec8a004239..bc2152911d1ec 100644 --- a/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts +++ b/src/legacy/ui/public/visualize/loader/embedded_visualize_handler.ts @@ -518,9 +518,9 @@ export class EmbeddedVisualizeHandler { // If the data loader was aborted then no need to surface this error in the UI if (error && error.name === 'AbortError') return; - // TODO: come up with a general way to cancel execution of pipeline expressions. - if (this.dataLoaderParams.searchSource && this.dataLoaderParams.searchSource.cancelQueued) { - this.dataLoaderParams.searchSource.cancelQueued(); + // Cancel execution of pipeline expressions + if (this.abortController) { + this.abortController.abort(); } this.vis.requestError = error; diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts index f644d7f52d458..0f9f04c87fc6f 100644 --- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts +++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.test.ts @@ -28,7 +28,7 @@ import { } from './build_pipeline'; import { Vis, VisState } from 'ui/vis'; import { AggConfig } from 'ui/agg_types/agg_config'; -import { SearchSource } from 'ui/courier'; +import { searchSourceMock } from 'ui/courier/search_source/mocks'; jest.mock('ui/new_platform'); jest.mock('ui/agg_types/buckets/date_histogram', () => ({ @@ -348,10 +348,7 @@ describe('visualize loader pipeline helpers: build pipeline', () => { toExpression: () => 'testing custom expressions', }, }; - const searchSource: SearchSource = { - getField: () => null, - }; - const expression = await buildPipeline(vis, { searchSource }); + const expression = await buildPipeline(vis, { searchSource: searchSourceMock }); expect(expression).toMatchSnapshot(); }); }); diff --git a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts index e8f1faf915eaf..a0d5b7b36d7f6 100644 --- a/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts +++ b/src/legacy/ui/public/visualize/loader/pipeline_helpers/build_pipeline.ts @@ -442,18 +442,9 @@ export const buildVislibDimensions = async ( } else if (xAgg.type.name === 'histogram') { const intervalParam = xAgg.type.paramByName('interval'); const output = { params: {} as any }; - const searchRequest = { - whenAborted: (fn: any) => { - if (params.abortSignal) { - params.abortSignal.addEventListener('abort', fn); - } - }, - }; - await intervalParam.modifyAggConfigOnSearchRequestStart( - xAgg, - params.searchSource, - searchRequest - ); + await intervalParam.modifyAggConfigOnSearchRequestStart(xAgg, params.searchSource, { + abortSignal: params.abortSignal, + }); intervalParam.write(xAgg, output); dimensions.x.params.interval = output.params.interval; } diff --git a/test/functional/apps/management/index.js b/test/functional/apps/management/index.js index 4d4031b4e489b..e68920f06372a 100644 --- a/test/functional/apps/management/index.js +++ b/test/functional/apps/management/index.js @@ -42,6 +42,8 @@ export default function ({ getService, loadTestFile }) { loadTestFile(require.resolve('./_kibana_settings')); loadTestFile(require.resolve('./_scripted_fields')); loadTestFile(require.resolve('./_scripted_fields_preview')); + loadTestFile(require.resolve('./_mgmt_import_saved_objects')); + }); describe('', function () { diff --git a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx index 39912ec2ca8b4..477559784bf59 100644 --- a/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx +++ b/x-pack/legacy/plugins/apm/public/new-platform/plugin.tsx @@ -24,7 +24,7 @@ import { MatchedRouteProvider } from '../context/MatchedRouteContext'; export const REACT_APP_ROOT_ID = 'react-apm-root'; -const MainContainer = styled.div` +const MainContainer = styled.main` min-width: ${px(unit * 50)}; padding: ${px(units.plus)}; `; diff --git a/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js b/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js index d4d7276ebbc18..11723587b057d 100644 --- a/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js +++ b/x-pack/legacy/plugins/canvas/scripts/shareable_runtime.js @@ -61,7 +61,7 @@ run( 'webpack-dev-server', '--config', webpackConfig, - '--progress', + ...(process.stdout.isTTY ? ['--progress'] : []), '--hide-modules', '--display-entrypoints', 'false', diff --git a/x-pack/legacy/plugins/code/index.ts b/x-pack/legacy/plugins/code/index.ts index d0d17aa9a802b..34a2102861c91 100644 --- a/x-pack/legacy/plugins/code/index.ts +++ b/x-pack/legacy/plugins/code/index.ts @@ -78,7 +78,7 @@ export const code = (kibana: any) => // Set up with the new platform plugin lifecycle API. const plugin = codePlugin(initializerContext); - plugin.setup(coreSetup); + await plugin.setup(coreSetup, initializerContext.legacy.http); // @ts-ignore const kbnServer = this.kbnServer; diff --git a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx index 68c96b904e98a..ca97c7a091c22 100644 --- a/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx +++ b/x-pack/legacy/plugins/code/public/components/admin_page/admin.tsx @@ -131,7 +131,7 @@ class AdminPage extends React.PureComponent { public render() { return ( -
+
{
- + ); } } diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts index 6d70c8386c31d..9d168e604c1b3 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import util from 'util'; import Boom from 'boom'; import { ServiceHandlerAdapter, ServiceRegisterOptions } from '../service_handler_adapter'; @@ -48,7 +48,7 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter { private readonly nonCodeAdapter: NonCodeNodeAdapter = new NonCodeNodeAdapter('', this.log); constructor( - private readonly server: CodeServerRouter, + private readonly router: CodeServerRouter, private readonly log: Logger, serverOptions: ServerOptions, esClient: EsClient @@ -113,17 +113,25 @@ export class ClusterNodeAdapter implements ServiceHandlerAdapter { const d = serviceDefinition[method]; const path = `${options.routePrefix}/${d.routePath || method}`; - this.server.route({ + this.router.route({ method: 'post', path, - handler: async (req: Request) => { - const { context, params } = req.payload as RequestPayload; + npHandler: async ( + ctx: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { + const { context, params } = req.body as RequestPayload; this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`); try { const data = await localHandler(params, context); - return { data }; + return res.ok({ body: { data } }); } catch (e) { - throw Boom.boomify(e); + if (Boom.isBoom(e)) { + throw e; + } else { + throw Boom.boomify(e, { statusCode: 500 }); + } } }, }); diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts index adb7e9b93fbad..e23b5a9027e75 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_node_endpoint.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { LocalEndpoint } from '../local_endpoint'; import { CodeNode } from './code_nodes'; export class ClusterNodeEndpoint extends LocalEndpoint { constructor( - public readonly httpRequest: Request, + public readonly httpRequest: KibanaRequest, public readonly resource: string, public readonly codeNode: CodeNode ) { diff --git a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts index 27f5c57214112..6ac0b830905bb 100644 --- a/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/cluster/cluster_resource_locator.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import Boom from 'boom'; import { Endpoint, ResourceLocator } from '../resource_locator'; import { ClusterService } from './cluster_service'; @@ -26,7 +26,7 @@ export class ClusterResourceLocator implements ResourceLocator { return RepositoryUtils.buildRepository(url).uri; } - async locate(req: Request, resource: string): Promise { + async locate(req: KibanaRequest, resource: string): Promise { // to be compatible with if (resource.trim() === '') { return new LocalEndpoint(req, resource); @@ -58,7 +58,7 @@ export class ClusterResourceLocator implements ResourceLocator { /** * Return undefined to let NodeRepositoriesService enqueue the clone job in cluster mode. */ - async allocate(req: Request, resource: string): Promise { + async allocate(req: KibanaRequest, resource: string): Promise { // make the cluster service synchronize the meta data and allocate new resources to nodes await this.clusterService.pollClusterState(); return undefined; diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts index 5f5319730c258..bcc2e7b21e672 100644 --- a/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts +++ b/x-pack/legacy/plugins/code/server/distributed/code_services.test.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request, Server } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; +import { httpServiceMock, httpServerMock } from 'src/core/server/mocks'; import { createTestHapiServer } from '../test_utils'; import { LocalHandlerAdapter } from './local_handler_adapter'; import { CodeServerRouter } from '../security'; @@ -17,12 +18,13 @@ import { Logger } from '../log'; import { ConsoleLoggerFactory } from '../utils/console_logger_factory'; const log: Logger = new ConsoleLoggerFactory().getLogger(['test']); -let hapiServer: Server = createTestHapiServer(); +let hapiServer = createTestHapiServer(); -let server: CodeServerRouter = new CodeServerRouter(hapiServer); +const routerMock = httpServiceMock.createRouter(); +let router: CodeServerRouter = new CodeServerRouter(routerMock); beforeEach(async () => { hapiServer = createTestHapiServer(); - server = new CodeServerRouter(hapiServer); + router = new CodeServerRouter(routerMock); }); const TestDefinition = { test1: { @@ -49,13 +51,13 @@ test('local adapter should work', async () => { const services = new CodeServices(new LocalHandlerAdapter()); services.registerHandler(TestDefinition, testServiceHandler); const testApi = services.serviceFor(TestDefinition); - const endpoint = await services.locate({} as Request, ''); + const endpoint = await services.locate(httpServerMock.createKibanaRequest(), ''); const { result } = await testApi.test1(endpoint, { name: 'tester' }); expect(result).toBe(`hello tester`); }); -test('multi-node adapter should register routes', async () => { - const services = new CodeServices(new CodeNodeAdapter(server, log)); +test.skip('multi-node adapter should register routes', async () => { + const services = new CodeServices(new CodeNodeAdapter(router, log)); services.registerHandler(TestDefinition, testServiceHandler); const prefix = DEFAULT_SERVICE_OPTION.routePrefix; @@ -70,8 +72,8 @@ test('multi-node adapter should register routes', async () => { expect(data.result).toBe(`hello tester`); }); -test('non-code-node could send request to code-node', async () => { - const codeNode = new CodeServices(new CodeNodeAdapter(server, log)); +test.skip('non-code-node could send request to code-node', async () => { + const codeNode = new CodeServices(new CodeNodeAdapter(router, log)); const codeNodeUrl = 'http://localhost:5601'; const nonCodeNodeAdapter = new NonCodeNodeAdapter(codeNodeUrl, log); const nonCodeNode = new CodeServices(nonCodeNodeAdapter); @@ -80,13 +82,13 @@ test('non-code-node could send request to code-node', async () => { baseUrl: string, path: string, payload: RequestPayload, - originRequest: Request + originRequest: KibanaRequest ) => { expect(baseUrl).toBe(codeNodeUrl); const response = await hapiServer.inject({ method: 'POST', url: path, - headers: originRequest.headers, + headers: originRequest.headers as any, payload, }); expect(response.statusCode).toBe(200); @@ -96,11 +98,13 @@ test('non-code-node could send request to code-node', async () => { nonCodeNode.registerHandler(TestDefinition, null); const testApi = nonCodeNode.serviceFor(TestDefinition); const fakeRequest = ({ - path: 'fakePath', + route: { + path: 'fakePath', + }, headers: { fakeHeader: 'fakeHeaderValue', }, - } as unknown) as Request; + } as unknown) as KibanaRequest; const fakeResource = 'fakeResource'; const endpoint = await nonCodeNode.locate(fakeRequest, fakeResource); const { result } = await testApi.test1(endpoint, { name: 'tester' }); @@ -108,5 +112,5 @@ test('non-code-node could send request to code-node', async () => { const context = await testApi.test2(endpoint, {}); expect(context.resource).toBe(fakeResource); - expect(context.path).toBe(fakeRequest.path); + expect(context.path).toBe(fakeRequest.route.path); }); diff --git a/x-pack/legacy/plugins/code/server/distributed/code_services.ts b/x-pack/legacy/plugins/code/server/distributed/code_services.ts index 480cab11ed84e..a2abe402a8e52 100644 --- a/x-pack/legacy/plugins/code/server/distributed/code_services.ts +++ b/x-pack/legacy/plugins/code/server/distributed/code_services.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { KibanaRequest } from 'src/core/server'; import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition'; import { DEFAULT_SERVICE_OPTION, @@ -11,7 +12,6 @@ import { ServiceRegisterOptions, } from './service_handler_adapter'; import { Endpoint } from './resource_locator'; -import { RequestFacade } from '../../'; export class CodeServices { constructor(private readonly adapter: ServiceHandlerAdapter) {} @@ -32,11 +32,11 @@ export class CodeServices { await this.adapter.stop(); } - public allocate(req: RequestFacade, resource: string): Promise { + public allocate(req: KibanaRequest, resource: string): Promise { return this.adapter.locator.allocate(req, resource); } - public locate(req: RequestFacade, resource: string): Promise { + public locate(req: KibanaRequest, resource: string): Promise { return this.adapter.locator.locate(req, resource); } diff --git a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts index 689ecc7fc641b..a7da90544fed3 100644 --- a/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/local_endpoint.ts @@ -4,17 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { Endpoint } from './resource_locator'; import { RequestContext } from './service_definition'; export class LocalEndpoint implements Endpoint { - constructor(readonly httpRequest: Request, readonly resource: string) {} + constructor(readonly httpRequest: KibanaRequest, readonly resource: string) {} toContext(): RequestContext { return { resource: this.resource, - path: this.httpRequest.path, + path: this.httpRequest.route.path, } as RequestContext; } } diff --git a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts index f4d9b6f1815a0..4f51ee2938366 100644 --- a/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/local_handler_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { ServiceHandlerAdapter } from './service_handler_adapter'; import { ServiceDefinition, ServiceHandlerFor, ServiceMethodMap } from './service_definition'; import { Endpoint, ResourceLocator } from './resource_locator'; @@ -45,7 +45,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter { } locator: ResourceLocator = { - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, @@ -53,7 +53,7 @@ export class LocalHandlerAdapter implements ServiceHandlerAdapter { return Promise.resolve(true); }, - async allocate(httpRequest: Request, resource: string): Promise { + async allocate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, }; diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts index 2778d29955e79..a7d2edf4b0308 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_adapter.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import util from 'util'; import Boom from 'boom'; import { @@ -31,10 +31,10 @@ export interface RequestPayload { export class CodeNodeAdapter implements ServiceHandlerAdapter { localAdapter: LocalHandlerAdapter = new LocalHandlerAdapter(); - constructor(private readonly server: CodeServerRouter, private readonly log: Logger) {} + constructor(private readonly router: CodeServerRouter, private readonly log: Logger) {} locator: ResourceLocator = { - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, @@ -42,7 +42,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { return Promise.resolve(false); }, - async allocate(httpRequest: Request, resource: string): Promise { + async allocate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new LocalEndpoint(httpRequest, resource)); }, }; @@ -70,11 +70,16 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { const d = serviceDefinition[method]; const path = `${options.routePrefix}/${d.routePath || method}`; // register routes, receive requests from non-code node. - this.server.route({ + this.router.route({ method: 'post', path, - handler: async (req: Request) => { - const { context, params } = req.payload as RequestPayload; + npHandler: async ( + ctx: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { + // @ts-ignore + const { context, params } = req.body as RequestPayload; this.log.debug(`Receiving RPC call ${req.url.path} ${util.inspect(params)}`); const endpoint: Endpoint = { toContext(): RequestContext { @@ -83,7 +88,7 @@ export class CodeNodeAdapter implements ServiceHandlerAdapter { }; try { const data = await serviceMethodMap[method](endpoint, params); - return { data }; + return res.ok({ body: data }); } catch (e) { if (!Boom.isBoom(e)) { throw Boom.boomify(e, { statusCode: 500 }); diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts index 048b7c81dfe6f..03c4917dfb732 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_endpoint.ts @@ -4,12 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { LocalEndpoint } from '../local_endpoint'; export class CodeNodeEndpoint extends LocalEndpoint { constructor( - public readonly httpRequest: Request, + public readonly httpRequest: KibanaRequest, public readonly resource: string, public readonly codeNodeUrl: string ) { diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts index b11ffeba394cf..e4b3d21b80ec7 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/code_node_resource_locator.ts @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { Endpoint, ResourceLocator } from '../resource_locator'; import { CodeNodeEndpoint } from './code_node_endpoint'; export class CodeNodeResourceLocator implements ResourceLocator { constructor(private readonly codeNodeUrl: string) {} - async locate(httpRequest: Request, resource: string): Promise { + async locate(httpRequest: KibanaRequest, resource: string): Promise { return Promise.resolve(new CodeNodeEndpoint(httpRequest, resource, this.codeNodeUrl)); } @@ -19,7 +19,7 @@ export class CodeNodeResourceLocator implements ResourceLocator { return Promise.resolve(false); } - allocate(req: Request, resource: string): Promise { + allocate(req: KibanaRequest, resource: string): Promise { return this.locate(req, resource); } } diff --git a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts index 648dffd01663e..1221651bc51e2 100644 --- a/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts +++ b/x-pack/legacy/plugins/code/server/distributed/multinode/non_code_node_adapter.ts @@ -7,7 +7,7 @@ import Wreck from '@hapi/wreck'; import util from 'util'; import Boom from 'boom'; -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import * as http from 'http'; import { DEFAULT_SERVICE_OPTION, @@ -23,8 +23,8 @@ import { Logger } from '../../log'; const pickHeaders = ['authorization']; -function filterHeaders(originRequest: Request) { - const result: { [name: string]: string } = {}; +function filterHeaders(originRequest: KibanaRequest) { + const result: { [name: string]: string | string[] | undefined } = {}; for (const header of pickHeaders) { if (originRequest.headers[header]) { result[header] = originRequest.headers[header]; @@ -82,7 +82,12 @@ export class NonCodeNodeAdapter implements ServiceHandlerAdapter { return dispatchedHandler as ServiceMethodMap; } - async requestFn(baseUrl: string, path: string, payload: RequestPayload, originRequest: Request) { + async requestFn( + baseUrl: string, + path: string, + payload: RequestPayload, + originRequest: KibanaRequest + ) { const opt = { baseUrl, payload: JSON.stringify(payload), diff --git a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts index 9dc6300675cb6..287e36982cbfd 100644 --- a/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts +++ b/x-pack/legacy/plugins/code/server/distributed/resource_locator.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Request } from 'hapi'; +import { KibanaRequest } from 'src/core/server'; import { RequestContext } from './service_definition'; export interface Endpoint { @@ -12,7 +12,7 @@ export interface Endpoint { } export interface ResourceLocator { - locate(req: Request, resource: string): Promise; + locate(req: KibanaRequest, resource: string): Promise; /** * Returns whether the resource resides on the local node. This should support both url and uri of the repository. @@ -25,5 +25,5 @@ export interface ResourceLocator { * Allocates the resource to nodes and returns the endpoint corresponds to the allocated node. * If the resource cannot be allocated to any node, it returns undefined. */ - allocate(req: Request, resource: string): Promise; + allocate(req: KibanaRequest, resource: string): Promise; } diff --git a/x-pack/legacy/plugins/code/server/init_es.ts b/x-pack/legacy/plugins/code/server/init_es.ts index 39ae05bf26877..0b12cddb73983 100644 --- a/x-pack/legacy/plugins/code/server/init_es.ts +++ b/x-pack/legacy/plugins/code/server/init_es.ts @@ -4,17 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Server } from 'hapi'; +import { IClusterClient } from 'src/core/server'; import { RepositoryIndexInitializerFactory } from './indexer'; import { RepositoryConfigController } from './repository_config_controller'; import { EsClientWithInternalRequest } from './utils/esclient_with_internal_request'; import { EsClient } from './lib/esqueue'; import { Logger } from './log'; -export async function initEs(server: Server, log: Logger) { - // wait until elasticsearch is ready - await server.plugins.elasticsearch.waitUntilReady(); - const esClient: EsClient = new EsClientWithInternalRequest(server); +export async function initEs(cluster: IClusterClient, log: Logger) { + const esClient: EsClient = new EsClientWithInternalRequest(cluster); const repoConfigController = new RepositoryConfigController(esClient); const repoIndexInitializerFactory = new RepositoryIndexInitializerFactory(esClient, log); return { diff --git a/x-pack/legacy/plugins/code/server/init_workers.ts b/x-pack/legacy/plugins/code/server/init_workers.ts index c4385cd711c5c..f20adf375f9a3 100644 --- a/x-pack/legacy/plugins/code/server/init_workers.ts +++ b/x-pack/legacy/plugins/code/server/init_workers.ts @@ -5,7 +5,6 @@ */ import checkDiskSpace from 'check-disk-space'; -import { Server } from 'hapi'; import { IndexerType } from '../model'; import { DiskWatermarkService } from './disk_watermark'; @@ -22,7 +21,6 @@ import { CloneScheduler, IndexScheduler, UpdateScheduler } from './scheduler'; import { Logger } from './log'; export function initWorkers( - server: Server, log: Logger, esClient: EsClient, queue: Esqueue, diff --git a/x-pack/legacy/plugins/code/server/plugin.ts b/x-pack/legacy/plugins/code/server/plugin.ts index 390b0ddc1256c..737d0b5c6686b 100644 --- a/x-pack/legacy/plugins/code/server/plugin.ts +++ b/x-pack/legacy/plugins/code/server/plugin.ts @@ -6,7 +6,7 @@ import crypto from 'crypto'; import * as _ from 'lodash'; -import { CoreSetup } from 'src/core/server'; +import { CoreSetup, IRouter } from 'src/core/server'; import { RepositoryIndexInitializerFactory, tryMigrateIndices } from './indexer'; import { Esqueue } from './lib/esqueue'; @@ -55,6 +55,18 @@ import { NodeRepositoriesService } from './distributed/cluster/node_repositories import { initCodeUsageCollector } from './usage_collector'; import { PluginSetupContract } from '../../../../plugins/code/server/index'; +declare module 'src/core/server' { + interface RequestHandlerContext { + code: { + codeServices: CodeServices | null; + // @deprecated + legacy: { + securityPlugin: any; + }; + }; + } +} + export class CodePlugin { private isCodeNode = false; @@ -67,15 +79,30 @@ export class CodePlugin { private codeServices: CodeServices | null = null; private nodeService: NodeRepositoriesService | null = null; + private rndString: string | null = null; + private router: IRouter | null = null; + constructor(private readonly initContext: PluginSetupContract) { this.log = {} as Logger; this.serverOptions = {} as ServerOptions; } - public setup(core: CoreSetup) { + public async setup(core: CoreSetup, npHttp: any) { const { server } = core.http as any; this.serverOptions = new ServerOptions(this.initContext.legacy.config, server.config()); this.log = new Logger(this.initContext.legacy.logger, this.serverOptions.verbose); + + this.router = npHttp.createRouter(); + this.rndString = crypto.randomBytes(20).toString('hex'); + + npHttp.registerRouteHandlerContext('code', () => { + return { + codeServices: this.codeServices, + legacy: { + securityPlugin: server.plugins.security, + }, + }; + }); } // TODO: CodeStart will not have the register route api. @@ -83,16 +110,17 @@ export class CodePlugin { public async start(core: CoreSetup) { // called after all plugins are set up const { server } = core.http as any; - const codeServerRouter = new CodeServerRouter(server); + const codeServerRouter = new CodeServerRouter(this.router!); const codeNodeUrl = this.serverOptions.codeNodeUrl; - const rndString = crypto.randomBytes(20).toString('hex'); - checkRoute(server, rndString); + + checkRoute(this.router!, this.rndString!); + if (this.serverOptions.clusterEnabled) { this.initDevMode(server); this.codeServices = await this.initClusterNode(server, codeServerRouter); } else if (codeNodeUrl) { const checkResult = await this.retryUntilAvailable( - async () => await checkCodeNode(codeNodeUrl, this.log, rndString), + async () => await checkCodeNode(codeNodeUrl, this.log, this.rndString!), 5000 ); if (checkResult.me) { @@ -115,7 +143,7 @@ export class CodePlugin { private async initClusterNode(server: any, codeServerRouter: CodeServerRouter) { this.log.info('Initializing Code plugin as cluster-node'); const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs( - server, + this.initContext.legacy.elasticsearch.adminClient$, this.log ); const clusterNodeAdapter = new ClusterNodeAdapter( @@ -139,7 +167,6 @@ export class CodePlugin { ); this.lspService = lspService; const { indexScheduler, updateScheduler, cloneWorker } = initWorkers( - server, this.log, esClient, this.queue!, @@ -159,18 +186,18 @@ export class CodePlugin { ); await this.nodeService.start(); + this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); + // Execute index version checking and try to migrate index data if necessary. await tryMigrateIndices(esClient, this.log); - this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); - return codeServices; } private async initCodeNode(server: any, codeServices: CodeServices) { this.isCodeNode = true; const { esClient, repoConfigController, repoIndexInitializerFactory } = await initEs( - server, + this.initContext.legacy.elasticsearch.adminClient$, this.log ); @@ -186,7 +213,6 @@ export class CodePlugin { ); this.lspService = lspService; const { indexScheduler, updateScheduler } = initWorkers( - server, this.log, esClient, this.queue!, @@ -198,14 +224,14 @@ export class CodePlugin { this.indexScheduler = indexScheduler; this.updateScheduler = updateScheduler; - // Execute index version checking and try to migrate index data if necessary. - await tryMigrateIndices(esClient, this.log); - this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); // TODO: extend the usage collection to cluster mode. initCodeUsageCollector(server, esClient, lspService); + // Execute index version checking and try to migrate index data if necessary. + await tryMigrateIndices(esClient, this.log); + return codeServices; } @@ -235,7 +261,10 @@ export class CodePlugin { codeServices.registerHandler(LspServiceDefinition, null, LspServiceDefinitionOption); codeServices.registerHandler(WorkspaceDefinition, null); codeServices.registerHandler(SetupDefinition, null); - const { repoConfigController, repoIndexInitializerFactory } = await initEs(server, this.log); + const { repoConfigController, repoIndexInitializerFactory } = await initEs( + this.initContext.legacy.elasticsearch.adminClient$, + this.log + ); this.initRoutes(server, codeServices, repoIndexInitializerFactory, repoConfigController); return codeServices; } @@ -246,7 +275,7 @@ export class CodePlugin { repoIndexInitializerFactory: RepositoryIndexInitializerFactory, repoConfigController: RepositoryConfigController ) { - const codeServerRouter = new CodeServerRouter(server); + const codeServerRouter = new CodeServerRouter(this.router!); repositoryRoute( codeServerRouter, codeServices, @@ -264,7 +293,7 @@ export class CodePlugin { fileRoute(codeServerRouter, codeServices); workspaceRoute(codeServerRouter, this.serverOptions, codeServices); symbolByQnameRoute(codeServerRouter, this.log); - installRoute(codeServerRouter, codeServices, this.serverOptions); + installRoute(server, codeServerRouter, codeServices, this.serverOptions); lspRoute(codeServerRouter, codeServices, this.serverOptions, this.log); setupRoute(codeServerRouter, codeServices); statusRoute(codeServerRouter, codeServices); diff --git a/x-pack/legacy/plugins/code/server/routes/check.ts b/x-pack/legacy/plugins/code/server/routes/check.ts index ad89d6281b4ff..7e585ffc34922 100644 --- a/x-pack/legacy/plugins/code/server/routes/check.ts +++ b/x-pack/legacy/plugins/code/server/routes/check.ts @@ -4,10 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ +import { schema } from '@kbn/config-schema'; import fetch from 'node-fetch'; +import { + IRouter, + KibanaRequest, + KibanaResponseFactory, + RequestHandlerContext, +} from 'src/core/server'; import { Logger } from '../log'; -import { ServerFacade } from '../..'; export async function checkCodeNode(url: string, log: Logger, rndStr: string) { try { @@ -24,13 +30,22 @@ export async function checkCodeNode(url: string, log: Logger, rndStr: string) { return null; } -export function checkRoute(server: ServerFacade, rndStr: string) { - server.route({ - method: 'GET', - path: '/api/code/codeNode', - options: { auth: false }, - handler(req: any) { - return { me: req.query.rndStr === rndStr }; +export function checkRoute(router: IRouter, rndStr: string) { + router.get( + { + path: '/api/code/codeNode', + validate: { + query: schema.object({}, { allowUnknowns: true }), + }, + options: { + authRequired: false, + }, }, - }); + (context: RequestHandlerContext, req: KibanaRequest, res: KibanaResponseFactory) => { + return res.ok({ + // @ts-ignore + body: { me: req.query.rndStr === rndStr }, + }); + } + ); } diff --git a/x-pack/legacy/plugins/code/server/routes/file.ts b/x-pack/legacy/plugins/code/server/routes/file.ts index 10a9050fa0a90..47cc16f7a6574 100644 --- a/x-pack/legacy/plugins/code/server/routes/file.ts +++ b/x-pack/legacy/plugins/code/server/routes/file.ts @@ -4,9 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - -import { RequestFacade, RequestQueryFacade, ResponseToolkitFacade } from '../../'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { DEFAULT_TREE_CHILDREN_LIMIT } from '../git_operations'; import { CodeServerRouter } from '../security'; import { RepositoryObjectClient } from '../search'; @@ -20,14 +18,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) const gitService = codeServices.serviceFor(GitServiceDefinition); async function getRepoUriFromMeta( - req: RequestFacade, + context: RequestHandlerContext, + req: KibanaRequest, repoUri: string ): Promise { - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { const repo = await repoObjectClient.getRepository(repoUri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri); return repo.uri; } catch (e) { return undefined; @@ -37,23 +36,27 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/tree/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const queries = req.query as RequestQueryFacade; + const queries = req.query as any; const limit = queries.limit ? parseInt(queries.limit as string, 10) : DEFAULT_TREE_CHILDREN_LIMIT; const skip = queries.skip ? parseInt(queries.skip as string, 10) : 0; const withParents = 'parents' in queries; const flatten = 'flatten' in queries; - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.fileTree(endpoint, { + const filetree = await gitService.fileTree(endpoint, { uri: repoUri, path, revision, @@ -62,11 +65,15 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) withParents, flatten, }); + return res.ok({ body: filetree }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -75,46 +82,59 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/blob/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { const blob = await gitService.blob(endpoint, { uri, path, - line: (req.query as RequestQueryFacade).line as string, + line: (req.query as any).line as string, revision: decodeURIComponent(revision), }); if (blob.imageType) { - const response = h.response(blob.content); - response.type(blob.imageType); - return response; + return res.ok({ + body: blob.content, + headers: { 'Content-Type': blob.imageType }, + }); } else if (blob.isBinary) { - return h - .response('') - .type('application/octet-stream') - .code(204); + return res.noContent({ + headers: { 'Content-Type': 'application/octet-stream' }, + }); } else { if (blob.content) { - return h - .response(blob.content) - .type('text/plain') - .header('lang', blob.lang!); + return res.ok({ + body: blob.content, + headers: { + 'Content-Type': 'text/plain', + lang: blob.lang!, + }, + }); } else { - return h.response('').type(`text/big`); + return res.ok({ + body: blob.content, + headers: { 'Content-Type': 'text/big' }, + }); } } } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -123,27 +143,40 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/app/code/repo/{uri*3}/raw/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const revision = decodeRevisionString(ref); - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { const blob = await gitService.raw(endpoint, { uri: repoUri, path, revision }); if (blob.isBinary) { - return h.response(blob.content).encoding('binary'); + return res.ok({ + body: blob.content, + headers: { 'Content-Transfer-Encoding': 'binary' }, + }); } else { - return h.response(blob.content).type('text/plain'); + return res.ok({ + body: blob.content, + headers: { 'Content-Type': 'text/plain' }, + }); } } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -152,33 +185,47 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/history/{ref}', method: 'GET', - handler: historyHandler, + npHandler: historyHandler, }); router.route({ path: '/api/code/repo/{uri*3}/history/{ref}/{path*}', method: 'GET', - handler: historyHandler, + npHandler: historyHandler, }); - async function historyHandler(req: RequestFacade) { - const { uri, ref, path } = req.params; + async function historyHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, ref, path } = req.params as any; const revision = decodeRevisionString(ref); - const queries = req.query as RequestQueryFacade; + const queries = req.query as any; const count = queries.count ? parseInt(queries.count as string, 10) : 10; const after = queries.after !== undefined; try { - const repoUri = await getRepoUriFromMeta(req, uri); + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); - return await gitService.history(endpoint, { uri: repoUri, path, revision, count, after }); + const history = await gitService.history(endpoint, { + uri: repoUri, + path, + revision, + count, + after, + }); + return res.ok({ body: history }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } } @@ -186,21 +233,29 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/references', method: 'GET', - async handler(req: RequestFacade) { - const uri = req.params.uri; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.badRequest({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.branchesAndTags(endpoint, { uri: repoUri }); + const branchesAndTags = await gitService.branchesAndTags(endpoint, { uri: repoUri }); + return res.ok({ body: branchesAndTags }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -209,23 +264,31 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/diff/{revision}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, revision } = req.params; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, revision } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.commitDiff(endpoint, { + const diff = await gitService.commitDiff(endpoint, { uri: repoUri, revision: decodeRevisionString(revision), }); + return res.ok({ body: diff }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, @@ -234,25 +297,33 @@ export function fileRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ path: '/api/code/repo/{uri*3}/blame/{revision}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, revision } = req.params; - const repoUri = await getRepoUriFromMeta(req, uri); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, revision } = req.params as any; + const repoUri = await getRepoUriFromMeta(context, req, uri); if (!repoUri) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } const endpoint = await codeServices.locate(req, uri); try { - return await gitService.blame(endpoint, { + const blames = await gitService.blame(endpoint, { uri: repoUri, revision: decodeRevisionString(decodeURIComponent(revision)), path, }); + return res.ok({ body: blames }); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); } else { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } }, diff --git a/x-pack/legacy/plugins/code/server/routes/index.ts b/x-pack/legacy/plugins/code/server/routes/index.ts index 27f40de552a3e..82973ac1d2791 100644 --- a/x-pack/legacy/plugins/code/server/routes/index.ts +++ b/x-pack/legacy/plugins/code/server/routes/index.ts @@ -8,7 +8,6 @@ export * from './check'; export * from './file'; export * from './install'; export * from './lsp'; -export * from './redirect'; export * from './repository'; export * from './search'; export * from './setup'; diff --git a/x-pack/legacy/plugins/code/server/routes/install.ts b/x-pack/legacy/plugins/code/server/routes/install.ts index 338f305cba858..28ccc4012ceec 100644 --- a/x-pack/legacy/plugins/code/server/routes/install.ts +++ b/x-pack/legacy/plugins/code/server/routes/install.ts @@ -4,9 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import * as Boom from 'boom'; - -import { RequestFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; +import { ServerFacade } from '../..'; import { enabledLanguageServers, LanguageServerDefinition } from '../lsp/language_servers'; import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; @@ -15,12 +14,13 @@ import { Endpoint } from '../distributed/resource_locator'; import { ServerOptions } from '../server_options'; export function installRoute( + server: ServerFacade, router: CodeServerRouter, codeServices: CodeServices, options: ServerOptions ) { const lspService = codeServices.serviceFor(LspServiceDefinition); - const kibanaVersion = router.server.config().get('pkg.version') as string; + const kibanaVersion = server.config().get('pkg.version') as string; const status = async (endpoint: Endpoint, def: LanguageServerDefinition) => ({ name: def.name, status: await lspService.languageServerStatus(endpoint, { langName: def.name }), @@ -35,23 +35,35 @@ export function installRoute( router.route({ path: '/api/code/install', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const endpoint = await codeServices.locate(req, ''); - return await Promise.all(enabledLanguageServers(options).map(def => status(endpoint, def))); + const installRes = await Promise.all( + enabledLanguageServers(options).map(def => status(endpoint, def)) + ); + return res.ok({ body: installRes }); }, method: 'GET', }); router.route({ path: '/api/code/install/{name}', - async handler(req: RequestFacade) { - const name = req.params.name; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { name } = req.params as any; const def = enabledLanguageServers(options).find(d => d.name === name); const endpoint = await codeServices.locate(req, ''); if (def) { - return await status(endpoint, def); + const installRes = await status(endpoint, def); + return res.ok({ body: installRes }); } else { - return Boom.notFound(`language server ${name} not found.`); + return res.notFound({ body: `language server ${name} not found.` }); } }, method: 'GET', diff --git a/x-pack/legacy/plugins/code/server/routes/lsp.ts b/x-pack/legacy/plugins/code/server/routes/lsp.ts index 10acb1e3863e8..6b8af10f9f11e 100644 --- a/x-pack/legacy/plugins/code/server/routes/lsp.ts +++ b/x-pack/legacy/plugins/code/server/routes/lsp.ts @@ -4,10 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; import { ResponseError } from 'vscode-jsonrpc'; import { ResponseMessage } from 'vscode-jsonrpc/lib/messages'; import { SymbolLocator } from '@elastic/lsp-extension'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { LanguageServerStartFailed, @@ -22,7 +22,6 @@ import { ServerOptions } from '../server_options'; import { EsClientWithRequest } from '../utils/esclient_with_request'; import { promiseTimeout } from '../utils/timeout'; -import { RequestFacade, ResponseToolkitFacade } from '../..'; import { CodeServices } from '../distributed/code_services'; import { GitServiceDefinition, LspServiceDefinition } from '../distributed/apis'; import { findTitleFromHover, groupFiles } from '../utils/lsp_utils'; @@ -32,7 +31,7 @@ import { SymbolSearchResult } from '../../model'; const LANG_SERVER_ERROR = 'language server error'; export function lspRoute( - server: CodeServerRouter, + router: CodeServerRouter, codeServices: CodeServices, serverOptions: ServerOptions, log: Logger @@ -40,23 +39,29 @@ export function lspRoute( const lspService = codeServices.serviceFor(LspServiceDefinition); const gitService = codeServices.serviceFor(GitServiceDefinition); - server.route({ + router.route({ path: '/api/code/lsp/textDocument/{method}', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - if (typeof req.payload === 'object' && req.payload != null) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + if (typeof req.body === 'object' && req.body != null) { + // @ts-ignore const method = req.params.method; if (method) { try { - const params = (req.payload as unknown) as any; + const params = (req.body as unknown) as any; const uri = params.textDocument.uri; const { repoUri } = parseLspUrl(uri)!; - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const requestPromise = lspService.sendRequest(endpoint, { method: `textDocument/${method}`, - params: req.payload, + params: req.body, }); - return await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise); + const result = await promiseTimeout(serverOptions.lsp.requestTimeoutMs, requestPromise); + return res.ok({ body: result }); } catch (error) { if (error instanceof ResponseError) { // hide some errors; @@ -67,39 +72,48 @@ export function lspRoute( ) { log.debug(error); } - return h - .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); // different code for LS errors and other internal errors. + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } else if (error.isBoom) { - return error; + return res.customError({ + body: error.error, + statusCode: error.statusCode ? error.statusCode : 500, + }); } else { log.error(error); - return h - .response({ error: { code: error.code || 500, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } } } else { - return h.response('missing `method` in request').code(400); + return res.badRequest({ body: 'missing `method` in request' }); } } else { - return h.response('json body required').code(400); // bad request + return res.badRequest({ body: 'json body required' }); } }, method: 'POST', }); - server.route({ + router.route({ path: '/api/code/lsp/findDefinitions', method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { // @ts-ignore - const { textDocument, position } = req.payload; + const { textDocument, position } = req.body as any; + // @ts-ignore + const { qname } = req.params as any; const { uri } = textDocument; const { repoUri } = parseLspUrl(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const response: ResponseMessage = await promiseTimeout( serverOptions.lsp.requestTimeoutMs, @@ -116,16 +130,16 @@ export function lspRoute( }, }); const title: string = await findTitleFromHover(hover, uri, position); - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); const locators = response.result as SymbolLocator[]; const locations = []; - const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); + const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences(); for (const locator of locators) { if (locator.location) { locations.push(locator.location); } else if (locator.qname && repoScope.length > 0) { - const searchResults = await symbolSearchClient.findByQname(req.params.qname, repoScope); + const searchResults = await symbolSearchClient.findByQname(qname, repoScope); for (const symbol of searchResults.symbols) { locations.push(symbol.symbolInformation.location); } @@ -135,20 +149,23 @@ export function lspRoute( const ep = await codeServices.locate(req, loc.uri); return await gitService.blob(ep, loc); }); - return { title, files, uri, position }; + return res.ok({ body: { title, files, uri, position } }); }, }); - server.route({ + router.route({ path: '/api/code/lsp/findReferences', method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { try { - // @ts-ignore - const { textDocument, position } = req.payload; + const { textDocument, position } = req.body as any; const { uri } = textDocument; const { repoUri } = parseLspUrl(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const endpoint = await codeServices.locate(req, repoUri); const response: ResponseMessage = await promiseTimeout( serverOptions.lsp.requestTimeoutMs, @@ -169,21 +186,24 @@ export function lspRoute( const ep = await codeServices.locate(req, loc.uri); return await gitService.blob(ep, loc); }); - return { title, files, uri, position }; + return res.ok({ body: { title, files, uri, position } }); } catch (error) { log.error(error); if (error instanceof ResponseError) { - return h - .response({ error: { code: error.code, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); // different code for LS errors and other internal errors. + return res.custom({ + statusCode: 500, + body: { error: { code: error.code, msg: LANG_SERVER_ERROR } }, + }); } else if (error.isBoom) { - return error; + return res.customError({ + body: error.error, + statusCode: error.statusCode ? error.statusCode : 500, + }); } else { - return h - .response({ error: { code: 500, msg: LANG_SERVER_ERROR } }) - .type('json') - .code(500); + return res.custom({ + statusCode: 500, + body: { error: { code: 500, msg: LANG_SERVER_ERROR } }, + }); } } }, @@ -194,21 +214,26 @@ export function symbolByQnameRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/lsp/symbol/{qname}', method: 'GET', - async handler(req: RequestFacade) { - try { - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); - const repoScope = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); - if (repoScope.length === 0) { - return { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + // @ts-ignore + const { qname } = req.params as any; + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); + const repoScope = await getReferenceHelper(context.core.savedObjects.client).findReferences(); + if (repoScope.length === 0) { + return res.ok({ + body: { symbols: [], total: 0, took: 0, - } as SymbolSearchResult; - } - return await symbolSearchClient.findByQname(req.params.qname, repoScope); - } catch (error) { - return Boom.internal(`Search Exception`); + } as SymbolSearchResult, + }); } + const symbol = await symbolSearchClient.findByQname(qname, repoScope); + return res.ok({ body: symbol }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/redirect.ts b/x-pack/legacy/plugins/code/server/routes/redirect.ts deleted file mode 100644 index 2882a37334836..0000000000000 --- a/x-pack/legacy/plugins/code/server/routes/redirect.ts +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { RequestFacade, ServerFacade } from '../../'; -import { Logger } from '../log'; - -export function redirectRoute(server: ServerFacade, redirectUrl: string, log: Logger) { - const proxyHandler = { - proxy: { - passThrough: true, - async mapUri(request: RequestFacade) { - let uri; - uri = `${redirectUrl}${request.path}`; - if (request.url.search) { - uri += request.url.search; - } - log.info(`redirect ${request.path}${request.url.search || ''} to ${uri}`); - return { - uri, - }; - }, - }, - }; - - server.route({ - path: '/api/code/{p*}', - method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], - handler: proxyHandler, - }); - - server.route({ - path: '/api/code/lsp/{p*}', - method: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], - handler: proxyHandler, - }); -} diff --git a/x-pack/legacy/plugins/code/server/routes/repository.ts b/x-pack/legacy/plugins/code/server/routes/repository.ts index 862586b406de4..d9e8edb4d2f50 100644 --- a/x-pack/legacy/plugins/code/server/routes/repository.ts +++ b/x-pack/legacy/plugins/code/server/routes/repository.ts @@ -4,10 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - import { i18n } from '@kbn/i18n'; -import { RequestFacade, ResponseToolkitFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; + import { validateGitUrl } from '../../common/git_url_utils'; import { RepositoryUtils } from '../../common/repository_utils'; import { RepositoryConfig, RepositoryUri, WorkerReservedProgress } from '../../model'; @@ -36,8 +35,12 @@ export function repositoryRoute( path: '/api/code/repo', requireAdmin: true, method: 'POST', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const repoUrl: string = (req.payload as any).url; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const repoUrl: string = (req.body as any).url; // Reject the request if the url is an invalid git url. try { @@ -49,11 +52,11 @@ export function repositoryRoute( } catch (error) { log.error(`Validate git url ${repoUrl} error.`); log.error(error); - return Boom.badRequest(error); + return res.badRequest({ body: error }); } const repo = RepositoryUtils.buildRepository(repoUrl); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // Check if the repository already exists @@ -61,28 +64,32 @@ export function repositoryRoute( // distinguish between that the repository exists in the current space and that the repository exists in // another space, and return the default message if error happens during reference checking. try { - const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference( + const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference( repo.uri ); if (!hasRef) { - return Boom.conflict( - i18n.translate('xpack.code.repositoryManagement.repoOtherSpaceImportedMessage', { - defaultMessage: 'The repository has already been imported in another space!', - }) - ); + return res.custom({ + statusCode: 409, // conflict + body: i18n.translate( + 'xpack.code.repositoryManagement.repoOtherSpaceImportedMessage', + { + defaultMessage: 'The repository has already been imported in another space!', + } + ), + }); } } catch (e) { log.error(`Failed to check reference for ${repo.uri} in current space`); } const msg = `Repository ${repoUrl} already exists. Skip clone.`; log.info(msg); - return h.response(msg).code(304); // Not Modified + return res.custom({ statusCode: 304, body: msg }); } catch (error) { log.info(`Repository ${repoUrl} does not exist. Go ahead with clone.`); try { // create the reference first, and make the creation idempotent, to avoid potential dangling repositories // which have no references from any space, in case the writes to ES may fail independently - await getReferenceHelper(req.getSavedObjectsClient()).createReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).createReference(repo.uri); // Create the index for the repository const initializer = (await repoIndexInitializerFactory.create( @@ -105,12 +112,12 @@ export function repositoryRoute( if (endpoint) { await repositoryService.clone(endpoint, payload); } - return repo; + return res.ok({ body: repo }); } catch (error2) { const msg = `Issue repository clone request for ${repoUrl} error`; log.error(msg); log.error(error2); - return Boom.badRequest(msg); + return res.badRequest({ body: msg }); } } }, @@ -121,12 +128,16 @@ export function repositoryRoute( path: '/api/code/repo/{uri*3}', requireAdmin: true, method: 'DELETE', - async handler(req: RequestFacade, h: ResponseToolkitFacade) { - const repoUri: string = req.params.uri as string; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // make sure the repo belongs to the current space - getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); // Check if the repository already exists. If not, an error will be thrown. await repoObjectClient.getRepository(repoUri); @@ -139,7 +150,7 @@ export function repositoryRoute( if (status.progress !== WorkerReservedProgress.ERROR) { const msg = `Repository ${repoUri} is already in delete.`; log.info(msg); - return h.response(msg).code(304); // Not Modified + return res.custom({ statusCode: 304, body: msg }); } } catch (error) { // Do nothing here since this error is expected. @@ -151,15 +162,14 @@ export function repositoryRoute( }; const endpoint = await codeServices.locate(req, repoUri); await repositoryService.delete(endpoint, payload); - // delete the reference last to avoid dangling repositories - await getReferenceHelper(req.getSavedObjectsClient()).deleteReference(repoUri); - return {}; + await getReferenceHelper(context.core.savedObjects.client).deleteReference(repoUri); + return res.ok(); } catch (error) { const msg = `Issue repository delete request for ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -168,17 +178,22 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepository(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const repo = await repoObjectClient.getRepository(repoUri); + return res.ok({ body: repo }); } catch (error) { const msg = `Get repository ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -186,15 +201,20 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/status/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); let gitStatus = null; let indexStatus = null; let deleteStatus = null; - const hasRef = await getReferenceHelper(req.getSavedObjectsClient()).hasReference(repoUri); + const hasRef = await getReferenceHelper(context.core.savedObjects.client).hasReference( + repoUri + ); if (hasRef) { try { @@ -215,16 +235,17 @@ export function repositoryRoute( log.debug(`Get repository delete status ${repoUri} error: ${error}`); } } - return { + const status = { gitStatus, indexStatus, deleteStatus, }; + return res.ok({ body: status }); } catch (error) { const msg = `Get repository status ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -233,16 +254,21 @@ export function repositoryRoute( router.route({ path: '/api/code/repos', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { try { - const uris = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepositories(uris); + const uris = await getReferenceHelper(context.core.savedObjects.client).findReferences(); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const repo = await repoObjectClient.getRepositories(uris); + return res.ok({ body: repo }); } catch (error) { const msg = `Get all repositories error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -254,12 +280,16 @@ export function repositoryRoute( path: '/api/code/repo/index/{uri*3}', method: 'POST', requireAdmin: true, - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; - const reindex: boolean = (req.payload as any).reindex; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; + const reindex: boolean = (req.body as any).reindex; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); const cloneStatus = await repoObjectClient.getRepositoryGitStatus(repoUri); const payload = { @@ -269,12 +299,12 @@ export function repositoryRoute( }; const endpoint = await codeServices.locate(req, repoUri); await repositoryService.index(endpoint, payload); - return {}; + return res.ok(); } catch (error) { const msg = `Index repository ${repoUri} error`; log.error(msg); log.error(error); - return Boom.notFound(msg); + return res.notFound({ body: msg }); } }, }); @@ -284,29 +314,33 @@ export function repositoryRoute( path: '/api/code/repo/config/{uri*3}', method: 'PUT', requireAdmin: true, - async handler(req: RequestFacade) { - const config: RepositoryConfig = req.payload as RepositoryConfig; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const config: RepositoryConfig = req.body as RepositoryConfig; const repoUri: RepositoryUri = config.uri; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); try { // Check if the repository exists - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); await repoObjectClient.getRepository(repoUri); } catch (error) { - return Boom.badRequest(`Repository not existed for ${repoUri}`); + return res.badRequest({ body: `Repository not existed for ${repoUri}` }); } try { // Persist to elasticsearch await repoObjectClient.setRepositoryConfig(repoUri, config); repoConfigController.resetConfigCache(repoUri); - return {}; + return res.ok(); } catch (error) { const msg = `Update repository config for ${repoUri} error`; log.error(msg); log.error(error); - return Boom.badRequest(msg); + return res.notFound({ body: msg }); } }, }); @@ -315,14 +349,19 @@ export function repositoryRoute( router.route({ path: '/api/code/repo/config/{uri*3}', method: 'GET', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri } = req.params as any; try { - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); - return await repoObjectClient.getRepositoryConfig(repoUri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); + const config = await repoObjectClient.getRepositoryConfig(repoUri); + return res.ok({ body: config }); } catch (error) { - return Boom.notFound(`Repository config ${repoUri} not exist`); + return res.notFound({ body: `Repository config ${repoUri} not exist` }); } }, }); diff --git a/x-pack/legacy/plugins/code/server/routes/search.ts b/x-pack/legacy/plugins/code/server/routes/search.ts index 86bdc931cff7a..5c2b731b33c42 100644 --- a/x-pack/legacy/plugins/code/server/routes/search.ts +++ b/x-pack/legacy/plugins/code/server/routes/search.ts @@ -4,9 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; - -import { RequestFacade, RequestQueryFacade } from '../../'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { CommitSearchRequest, DocumentSearchRequest, @@ -32,9 +30,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/repo', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -42,14 +44,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: RepositorySearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log); - const res = await repoSearchClient.search(searchReq); - return res; + const repoSearchClient = new RepositorySearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await repoSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -57,9 +62,13 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/repo', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -67,14 +76,17 @@ export function repositorySearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: RepositorySearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const repoSearchClient = new RepositorySearchClient(new EsClientWithRequest(req), log); - const res = await repoSearchClient.suggest(searchReq); - return res; + const repoSearchClient = new RepositorySearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await repoSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -84,9 +96,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/doc', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, langs, repos, repoScope } = req.query as RequestQueryFacade; + const { p, q, langs, repos, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -96,14 +112,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { page, langFilters: langs ? (langs as string).split(',') : [], repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [], - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log); - const res = await docSearchClient.search(searchReq); - return res; + const docSearchClient = new DocumentSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await docSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -111,9 +130,13 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/doc', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -121,14 +144,17 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: DocumentSearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const docSearchClient = new DocumentSearchClient(new EsClientWithRequest(req), log); - const res = await docSearchClient.suggest(searchReq); - return res; + const docSearchClient = new DocumentSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await docSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); @@ -143,14 +169,21 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/integration/snippets', method: 'POST', - async handler(req: RequestFacade) { - const reqs: StackTraceSnippetsRequest[] = (req.payload as any).requests; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const scopes = new Set( - await getReferenceHelper(req.getSavedObjectsClient()).findReferences() + await getReferenceHelper(context.core.savedObjects.client).findReferences() ); - return await Promise.all( + const reqs: StackTraceSnippetsRequest[] = (req.body as any).requests; + const searchRes = await Promise.all( reqs.map((stacktraceReq: StackTraceSnippetsRequest) => { - const integClient = new IntegrationsSearchClient(new EsClientWithRequest(req), log); + const integClient = new IntegrationsSearchClient( + new EsClientWithRequest(context, req), + log + ); return Promise.all( stacktraceReq.stacktraceItems.map((stacktrace: StackTraceItem) => { const repoUris = stacktraceReq.repoUris.filter(uri => scopes.has(uri)); @@ -166,14 +199,19 @@ export function documentSearchRoute(router: CodeServerRouter, log: Logger) { ); }) ); + return res.ok({ body: searchRes }); }, }); } export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { - const symbolSearchHandler = async (req: RequestFacade) => { + const symbolSearchHandler = async ( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { let page = 1; - const { p, q, repoScope } = req.query as RequestQueryFacade; + const { p, q, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -181,14 +219,14 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { const searchReq: SymbolSearchRequest = { query: q as string, page, - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(req), log); - const res = await symbolSearchClient.suggest(searchReq); - return res; + const symbolSearchClient = new SymbolSearchClient(new EsClientWithRequest(context, req), log); + const searchRes = await symbolSearchClient.suggest(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }; @@ -196,12 +234,12 @@ export function symbolSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/suggestions/symbol', method: 'GET', - handler: symbolSearchHandler, + npHandler: symbolSearchHandler, }); router.route({ path: '/api/code/search/symbol', method: 'GET', - handler: symbolSearchHandler, + npHandler: symbolSearchHandler, }); } @@ -209,9 +247,13 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) { router.route({ path: '/api/code/search/commit', method: 'GET', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { let page = 1; - const { p, q, repos, repoScope } = req.query as RequestQueryFacade; + const { p, q, repos, repoScope } = req.query as any; if (p) { page = parseInt(p as string, 10); } @@ -220,21 +262,27 @@ export function commitSearchRoute(router: CodeServerRouter, log: Logger) { query: q as string, page, repoFilters: repos ? decodeURIComponent(repos as string).split(',') : [], - repoScope: await getScope(req, repoScope), + repoScope: await getScope(context, repoScope), }; try { - const commitSearchClient = new CommitSearchClient(new EsClientWithRequest(req), log); - const res = await commitSearchClient.search(searchReq); - return res; + const commitSearchClient = new CommitSearchClient( + new EsClientWithRequest(context, req), + log + ); + const searchRes = await commitSearchClient.search(searchReq); + return res.ok({ body: searchRes }); } catch (error) { - return Boom.internal(`Search Exception`); + return res.internalError({ body: 'Search Exception' }); } }, }); } -async function getScope(req: RequestFacade, repoScope: string | string[]): Promise { - let scope: string[] = await getReferenceHelper(req.getSavedObjectsClient()).findReferences(); +async function getScope( + context: RequestHandlerContext, + repoScope: string | string[] +): Promise { + let scope: string[] = await getReferenceHelper(context.core.savedObjects.client).findReferences(); if (typeof repoScope === 'string') { const uriSet = new Set(repoScope.split(',')); scope = scope.filter(uri => uriSet.has(uri)); diff --git a/x-pack/legacy/plugins/code/server/routes/setup.ts b/x-pack/legacy/plugins/code/server/routes/setup.ts index 58db84fd80aaf..6f89ebf35441f 100644 --- a/x-pack/legacy/plugins/code/server/routes/setup.ts +++ b/x-pack/legacy/plugins/code/server/routes/setup.ts @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../..'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; + import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; import { SetupDefinition } from '../distributed/apis'; @@ -14,9 +15,14 @@ export function setupRoute(router: CodeServerRouter, codeServices: CodeServices) router.route({ method: 'get', path: '/api/code/setup', - async handler(req: RequestFacade) { + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { const endpoint = await codeServices.locate(req, ''); - return await setupService.setup(endpoint, {}); + const setup = await setupService.setup(endpoint, {}); + return res.ok({ body: setup }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/status.ts b/x-pack/legacy/plugins/code/server/routes/status.ts index 56b2972bd4147..e2723342b49d2 100644 --- a/x-pack/legacy/plugins/code/server/routes/status.ts +++ b/x-pack/legacy/plugins/code/server/routes/status.ts @@ -4,10 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; import { CodeServerRouter } from '../security'; -import { RequestFacade } from '../../'; import { LangServerType, RepoFileStatus, StatusReport } from '../../common/repo_file_status'; import { CTAGS, LanguageServerDefinition } from '../lsp/language_servers'; import { LanguageServerStatus } from '../../common/language_server'; @@ -108,18 +107,22 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices router.route({ path: '/api/code/repo/{uri*3}/status/{ref}/{path*}', method: 'GET', - async handler(req: RequestFacade) { - const { uri, path, ref } = req.params; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri, path, ref } = req.params as any; const report: StatusReport = {}; - const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(req)); + const repoObjectClient = new RepositoryObjectClient(new EsClientWithRequest(context, req)); const endpoint = await codeServices.locate(req, uri); try { // Check if the repository already exists const repo = await repoObjectClient.getRepository(uri); - await getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repo.uri); + await getReferenceHelper(context.core.savedObjects.client).ensureReference(repo.uri); } catch (e) { - return Boom.notFound(`repo ${uri} not found`); + return res.notFound({ body: `repo ${uri} not found` }); } await handleRepoStatus(endpoint, report, uri, ref, repoObjectClient); if (path) { @@ -141,10 +144,10 @@ export function statusRoute(router: CodeServerRouter, codeServices: CodeServices // not a file? The path may be a dir. } } catch (e) { - return Boom.internal(e.message || e.name); + return res.internalError({ body: e.message || e.name }); } } - return report; + return res.ok({ body: report }); }, }); } diff --git a/x-pack/legacy/plugins/code/server/routes/workspace.ts b/x-pack/legacy/plugins/code/server/routes/workspace.ts index 8a112af297245..4dfafda7369c1 100644 --- a/x-pack/legacy/plugins/code/server/routes/workspace.ts +++ b/x-pack/legacy/plugins/code/server/routes/workspace.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import Boom from 'boom'; +import { KibanaRequest, KibanaResponseFactory, RequestHandlerContext } from 'src/core/server'; -import { RequestFacade, RequestQueryFacade } from '../../'; +import { RequestQueryFacade } from '../../'; import { ServerOptions } from '../server_options'; import { CodeServerRouter } from '../security'; import { CodeServices } from '../distributed/code_services'; @@ -23,8 +23,12 @@ export function workspaceRoute( router.route({ path: '/api/code/workspace', method: 'GET', - async handler() { - return serverOptions.repoConfigs; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + return res.ok({ body: serverOptions.repoConfigs }); }, }); @@ -32,23 +36,35 @@ export function workspaceRoute( path: '/api/code/workspace/{uri*3}/{revision}', requireAdmin: true, method: 'POST', - async handler(req: RequestFacade) { - const repoUri = req.params.uri as string; - getReferenceHelper(req.getSavedObjectsClient()).ensureReference(repoUri); - const revision = req.params.revision as string; + async npHandler( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) { + const { uri: repoUri, revision } = req.params as any; + getReferenceHelper(context.core.savedObjects.client).ensureReference(repoUri); const repoConfig = serverOptions.repoConfigs[repoUri]; const force = !!(req.query as RequestQueryFacade).force; if (repoConfig) { const endpoint = await codeServices.locate(req, repoUri); try { await workspaceService.initCmd(endpoint, { repoUri, revision, force, repoConfig }); + return res.ok(); } catch (e) { if (e.isBoom) { - return e; + return res.customError({ + body: e.error, + statusCode: e.statusCode ? e.statusCode : 500, + }); + } else { + return res.customError({ + body: e.error, + statusCode: 500, + }); } } } else { - return Boom.notFound(`repo config for ${repoUri} not found.`); + return res.notFound({ body: `repo config for ${repoUri} not found.` }); } }, }); diff --git a/x-pack/legacy/plugins/code/server/security.ts b/x-pack/legacy/plugins/code/server/security.ts index c548b51940599..b511fba5af4d8 100644 --- a/x-pack/legacy/plugins/code/server/security.ts +++ b/x-pack/legacy/plugins/code/server/security.ts @@ -4,27 +4,100 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade, ServerRouteFacade, RouteOptionsFacade } from '..'; +import { schema } from '@kbn/config-schema'; + +import { IRouter, RequestHandler } from 'src/core/server'; +import { ServerRouteFacade, RouteOptionsFacade } from '..'; export class CodeServerRouter { - constructor(readonly server: ServerFacade) {} + constructor(readonly router: IRouter) {} route(route: CodeRoute) { const routeOptions: RouteOptionsFacade = (route.options || {}) as RouteOptionsFacade; - routeOptions.tags = [ + const tags = [ ...(routeOptions.tags || []), `access:code_${route.requireAdmin ? 'admin' : 'user'}`, ]; - this.server.route({ - handler: route.handler, - method: route.method, - options: routeOptions, - path: route.path, - }); + const routeHandler = route.npHandler!; + + switch ((route.method as string).toLowerCase()) { + case 'get': { + this.router.get( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'put': { + this.router.put( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + body: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'delete': { + this.router.delete( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + case 'patch': + case 'post': { + this.router.post( + { + path: route.path, + validate: { + query: schema.object({}, { allowUnknowns: true }), + params: schema.object({}, { allowUnknowns: true }), + body: schema.object({}, { allowUnknowns: true }), + }, + options: { + tags, + }, + }, + routeHandler + ); + break; + } + default: { + throw new Error(`Unknown HTTP method: ${route.method}`); + } + } } } export interface CodeRoute extends ServerRouteFacade { requireAdmin?: boolean; + // New Platform Route Handler API + npHandler?: RequestHandler; } diff --git a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts index 49e27cdde62b6..9dcfb543e8306 100644 --- a/x-pack/legacy/plugins/code/server/utils/es_index_client.ts +++ b/x-pack/legacy/plugins/code/server/utils/es_index_client.ts @@ -4,50 +4,62 @@ * you may not use this file except in compliance with the Elastic License. */ -import { AnyObject } from '../lib/esqueue'; +import { + IndicesCreateParams, + IndicesDeleteParams, + IndicesExistsParams, + IndicesExistsAliasParams, + IndicesDeleteAliasParams, + IndicesGetAliasParams, + IndicesGetMappingParams, + IndicesPutAliasParams, + IndicesUpdateAliasesParams, + IndicesRefreshParams, +} from 'elasticsearch'; + import { WithRequest } from './with_request'; import { WithInternalRequest } from './with_internal_request'; export class EsIndexClient { constructor(readonly self: WithRequest | WithInternalRequest) {} - public exists(params: AnyObject): Promise { + public exists(params: IndicesExistsParams): Promise { return this.self.callCluster('indices.exists', params); } - public create(params: AnyObject): Promise { + public create(params: IndicesCreateParams): Promise { return this.self.callCluster('indices.create', params); } - public refresh(params: AnyObject): Promise { + public refresh(params: IndicesRefreshParams): Promise { return this.self.callCluster('indices.refresh', params); } - public delete(params: AnyObject): Promise { + public delete(params: IndicesDeleteParams): Promise { return this.self.callCluster('indices.delete', params); } - public existsAlias(params: AnyObject): Promise { + public existsAlias(params: IndicesExistsAliasParams): Promise { return this.self.callCluster('indices.existsAlias', params); } - public getAlias(params: AnyObject): Promise { + public getAlias(params: IndicesGetAliasParams): Promise { return this.self.callCluster('indices.getAlias', params); } - public putAlias(params: AnyObject): Promise { + public putAlias(params: IndicesPutAliasParams): Promise { return this.self.callCluster('indices.putAlias', params); } - public deleteAlias(params: AnyObject): Promise { + public deleteAlias(params: IndicesDeleteAliasParams): Promise { return this.self.callCluster('indices.deleteAlias', params); } - public updateAliases(params: AnyObject): Promise { + public updateAliases(params: IndicesUpdateAliasesParams): Promise { return this.self.callCluster('indices.updateAliases', params); } - public getMapping(params: AnyObject): Promise { + public getMapping(params: IndicesGetMappingParams): Promise { return this.self.callCluster('indices.getMapping', params); } } diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts index 5a2cb0952e4b6..60a57f4dd26ea 100644 --- a/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_internal_request.ts @@ -4,35 +4,46 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../..'; -import { AnyObject, EsClient } from '../lib/esqueue'; +import { + BulkIndexDocumentsParams, + DeleteDocumentByQueryParams, + DeleteDocumentParams, + GetParams, + IndexDocumentParams, + ReindexParams, + SearchParams, + UpdateDocumentParams, + UpdateDocumentByQueryParams, +} from 'elasticsearch'; +import { IClusterClient } from 'src/core/server'; +import { EsClient } from '../lib/esqueue'; import { EsIndexClient } from './es_index_client'; import { WithInternalRequest } from './with_internal_request'; export class EsClientWithInternalRequest extends WithInternalRequest implements EsClient { public readonly indices = new EsIndexClient(this); - constructor(server: ServerFacade) { - super(server); + constructor(cluster: IClusterClient) { + super(cluster); } - public bulk(params: AnyObject): Promise { + public bulk(params: BulkIndexDocumentsParams): Promise { return this.callCluster('bulk', params); } - public delete(params: AnyObject): Promise { + public delete(params: DeleteDocumentParams): Promise { return this.callCluster('delete', params); } - public deleteByQuery(params: AnyObject): Promise { + public deleteByQuery(params: DeleteDocumentByQueryParams): Promise { return this.callCluster('deleteByQuery', params); } - public get(params: AnyObject): Promise { + public get(params: GetParams): Promise { return this.callCluster('get', params); } - public index(params: AnyObject): Promise { + public index(params: IndexDocumentParams): Promise { return this.callCluster('index', params); } @@ -40,19 +51,19 @@ export class EsClientWithInternalRequest extends WithInternalRequest implements return this.callCluster('ping'); } - public reindex(params: AnyObject): Promise { + public reindex(params: ReindexParams): Promise { return this.callCluster('reindex', params); } - public search(params: AnyObject): Promise { + public search(params: SearchParams): Promise { return this.callCluster('search', params); } - public update(params: AnyObject): Promise { + public update(params: UpdateDocumentParams): Promise { return this.callCluster('update', params); } - public updateByQuery(params: AnyObject): Promise { + public updateByQuery(params: UpdateDocumentByQueryParams): Promise { return this.callCluster('updateByQuery', params); } } diff --git a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts index a1f70db0a7074..2e4a18937a232 100644 --- a/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/esclient_with_request.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../../'; +import { KibanaRequest, RequestHandlerContext } from 'src/core/server'; import { AnyObject, EsClient } from '../lib/esqueue'; import { EsIndexClient } from './es_index_client'; import { WithRequest } from './with_request'; @@ -12,8 +12,8 @@ import { WithRequest } from './with_request'; export class EsClientWithRequest extends WithRequest implements EsClient { public readonly indices = new EsIndexClient(this); - constructor(readonly req: RequestFacade) { - super(req); + constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) { + super(context, req); } public bulk(params: AnyObject): Promise { diff --git a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts index a51fa990ff10e..9f8dde129039a 100644 --- a/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/with_internal_request.ts @@ -4,14 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../..'; -import { AnyObject } from '../lib/esqueue'; +import { APICaller, IClusterClient } from 'src/core/server'; export class WithInternalRequest { - public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise; + public readonly callCluster: APICaller; - constructor(server: ServerFacade) { - const cluster = server.plugins.elasticsearch.getCluster('admin'); - this.callCluster = cluster.callWithInternalUser; + constructor(cluster: IClusterClient) { + this.callCluster = cluster.callAsInternalUser; } } diff --git a/x-pack/legacy/plugins/code/server/utils/with_request.ts b/x-pack/legacy/plugins/code/server/utils/with_request.ts index e08b9727f375e..e2a4bfd03de66 100644 --- a/x-pack/legacy/plugins/code/server/utils/with_request.ts +++ b/x-pack/legacy/plugins/code/server/utils/with_request.ts @@ -4,24 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RequestFacade } from '../../'; -import { AnyObject } from '../lib/esqueue'; +import { APICaller, KibanaRequest, RequestHandlerContext } from 'src/core/server'; export class WithRequest { - public readonly callCluster: (endpoint: string, clientOptions?: AnyObject) => Promise; + public readonly callCluster: APICaller; - constructor(readonly req: RequestFacade) { - const cluster = req.server.plugins.elasticsearch.getCluster('data'); - - // @ts-ignore - const securityPlugin = req.server.plugins.security; - if (securityPlugin) { - const useRbac = securityPlugin.authorization.mode.useRbacForRequest(req); - if (useRbac) { - this.callCluster = cluster.callWithInternalUser; - return; - } - } - this.callCluster = cluster.callWithRequest.bind(null, req); + constructor(readonly context: RequestHandlerContext, readonly req: KibanaRequest) { + const securityPlugin = context.code.legacy.securityPlugin; + const useRbac = + securityPlugin && + securityPlugin.authorization && + // @ts-ignore + securityPlugin.authorization.mode.useRbacForRequest(req); + this.callCluster = useRbac + ? context.core.elasticsearch.dataClient.callAsInternalUser + : context.core.elasticsearch.dataClient.callAsCurrentUser; } } diff --git a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx index b1eef34001750..fe3c930f9e08e 100644 --- a/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx +++ b/x-pack/legacy/plugins/infra/public/components/navigation/app_navigation.tsx @@ -9,11 +9,12 @@ import React from 'react'; import euiStyled from '../../../../../common/eui_styled_components'; interface AppNavigationProps { + 'aria-label': string; children: React.ReactNode; } -export const AppNavigation = ({ children }: AppNavigationProps) => ( -