From 640264f614bafda69b80f2e5967f34f4dcc1c4ec Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 11:18:55 -0700 Subject: [PATCH 1/7] More Typescript Fixes --- .../server/execute_job.ts | 4 +- .../generate_from_savedobject_immediate.ts | 4 +- .../plugins/reporting/server/routes/jobs.ts | 42 +++++++++++-------- .../authorized_user_pre_routing.test.js | 19 ++++----- .../server/routes/lib/get_document_payload.ts | 26 +++++++----- ...nse_handler.js => job_response_handler.ts} | 33 +++++++++++---- .../lib/reporting_feature_pre_routing.ts | 2 +- x-pack/legacy/plugins/reporting/types.d.ts | 34 +++------------ 8 files changed, 85 insertions(+), 79 deletions(-) rename x-pack/legacy/plugins/reporting/server/routes/lib/{__tests__ => }/authorized_user_pre_routing.test.js (91%) rename x-pack/legacy/plugins/reporting/server/routes/lib/{job_response_handler.js => job_response_handler.ts} (65%) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 69d9a173d40b3..5e6181b8b07ef 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -9,7 +9,7 @@ import { cryptoFactory, LevelLogger } from '../../../server/lib'; import { ExecuteJobFactory, ImmediateExecuteFn, - JobDocOutputExecuted, + JobDocOutput, ServerFacade, RequestFacade, } from '../../../types'; @@ -36,7 +36,7 @@ export const executeJobFactory: ExecuteJobFactory { + ): Promise { // There will not be a jobID for "immediate" generation. // jobID is only for "queued" jobs // Use the jobID as a logging tag or "immediate" diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index 8d1c84664cbe9..b372422952622 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -13,7 +13,7 @@ import { HeadlessChromiumDriverFactory, ReportingResponseToolkit, Logger, - JobDocOutputExecuted, + JobDocOutput, } from '../../types'; import { JobDocPayloadPanelCsv } from '../../export_types/csv_from_savedobject/types'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; @@ -68,7 +68,7 @@ export function registerGenerateCsvFromSavedObjectImmediate( content_type: jobOutputContentType, content: jobOutputContent, size: jobOutputSize, - }: JobDocOutputExecuted = await executeJobFn(null, jobDocPayload, request); + }: JobDocOutput = await executeJobFn(null, jobDocPayload, request); logger.info(`Job output size: ${jobOutputSize} bytes`); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index 6084ca613d10e..ed2d8777a908e 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -4,8 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ +import Boom from 'boom'; import { Legacy } from 'kibana'; -import boom from 'boom'; +import { ResponseObject } from 'hapi'; import { API_BASE_URL } from '../../common/constants'; import { ServerFacade, @@ -28,6 +29,10 @@ import { makeRequestFacade } from './lib/make_request_facade'; const MAIN_ENTRY = `${API_BASE_URL}/jobs`; +function isResponse(response: Boom | ResponseObject): response is ResponseObject { + return !(response as Boom).isBoom; +} + export function registerJobInfoRoutes( server: ServerFacade, exportTypesRegistry: ExportTypesRegistry, @@ -84,14 +89,14 @@ export function registerJobInfoRoutes( return jobsQuery.get(request.pre.user, docId, { includeContent: true }).then( (result): JobDocOutput => { if (!result) { - throw boom.notFound(); + throw Boom.notFound(); } const { _source: { jobtype: jobType, output: jobOutput }, } = result; if (!request.pre.management.jobTypes.includes(jobType)) { - throw boom.unauthorized(`Sorry, you are not authorized to download ${jobType} reports`); + throw Boom.unauthorized(`Sorry, you are not authorized to download ${jobType} reports`); } return jobOutput; @@ -111,13 +116,13 @@ export function registerJobInfoRoutes( return jobsQuery.get(request.pre.user, docId).then((result): JobSource['_source'] => { if (!result) { - throw boom.notFound(); + throw Boom.notFound(); } const { _source: job } = result; const { jobtype: jobType, payload: jobPayload } = job; if (!request.pre.management.jobTypes.includes(jobType)) { - throw boom.unauthorized(`Sorry, you are not authorized to view ${jobType} info`); + throw Boom.unauthorized(`Sorry, you are not authorized to view ${jobType} info`); } return { @@ -147,21 +152,22 @@ export function registerJobInfoRoutes( h, { docId } ); - const { statusCode } = response; - - if (statusCode !== 200) { - if (statusCode === 500) { - logger.error(`Report ${docId} has failed: ${JSON.stringify(response.source)}`); - } else { - logger.debug( - `Report ${docId} has non-OK status: [${statusCode}] Reason: [${JSON.stringify( - response.source - )}]` - ); + + if (isResponse(response)) { + const { statusCode } = response; + + if (statusCode !== 200) { + if (statusCode === 500) { + logger.error(`Report ${docId} has failed: ${JSON.stringify(response.source)}`); + } else { + logger.debug( + `Report ${docId} has non-OK status: [${statusCode}] Reason: [${JSON.stringify( + response.source + )}]` + ); + } } - } - if (!response.isBoom) { response = response.header('accept-ranges', 'none'); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/__tests__/authorized_user_pre_routing.test.js b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js similarity index 91% rename from x-pack/legacy/plugins/reporting/server/routes/lib/__tests__/authorized_user_pre_routing.test.js rename to x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js index 0b2aff53793cd..9997aca0954d0 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/__tests__/authorized_user_pre_routing.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js @@ -5,8 +5,7 @@ */ import expect from '@kbn/expect'; -import sinon from 'sinon'; -import { authorizedUserPreRoutingFactory } from '../authorized_user_pre_routing'; +import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; describe('authorized_user_pre_routing', function() { // the getClientShield is using `once` which forces us to use a constant mock @@ -14,14 +13,14 @@ describe('authorized_user_pre_routing', function() { // so createMockServer reuses the same 'instance' of the server and overwrites // the properties to contain different values const createMockServer = (function() { - const getUserStub = sinon.stub(); + const getUserStub = jest.fn(); let mockConfig; const mockServer = { - expose: function() {}, - config: function() { + expose() {}, + config() { return { - get: function(key) { + get(key) { return mockConfig[key]; }, }; @@ -45,7 +44,7 @@ describe('authorized_user_pre_routing', function() { mockServer.plugins.xpack_main = { info: !xpackInfoUndefined && { isAvailable: () => xpackInfoAvailable, - feature: function(featureName) { + feature(featureName) { if (featureName === 'security') { return { isEnabled: () => securityEnabled, @@ -56,8 +55,8 @@ describe('authorized_user_pre_routing', function() { }, }; - getUserStub.resetHistory(); - getUserStub.resolves(user); + getUserStub.mockReset(); + getUserStub.mockResolvedValue(user); return mockServer; }; })(); @@ -66,7 +65,7 @@ describe('authorized_user_pre_routing', function() { const mockServer = createMockServer({ xpackInfoUndefined: true }); const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); - const response = await authorizedUserPreRouting(); + const response = await authorizedUserPreRouting({}); expect(response.isBoom).to.be(true); expect(response.output.statusCode).to.be(404); }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts index c3a30f9dda454..1c0566100e197 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/get_document_payload.ts @@ -11,8 +11,8 @@ import { ServerFacade, ExportTypesRegistry, ExportTypeDefinition, - JobDocExecuted, - JobDocOutputExecuted, + JobDocOutput, + JobSource, } from '../../../types'; import { CSV_JOB_TYPE } from '../../../common/constants'; @@ -20,14 +20,21 @@ interface ICustomHeaders { [x: string]: any; } -const DEFAULT_TITLE = 'report'; +type ExportTypeType = ExportTypeDefinition; + +interface Payload { + statusCode: number; + content: any; + contentType: string; + headers: Record; +} -type ExportTypeType = ExportTypeDefinition; +const DEFAULT_TITLE = 'report'; const getTitle = (exportType: ExportTypeType, title?: string): string => `${title || DEFAULT_TITLE}.${exportType.jobContentExtension}`; -const getReportingHeaders = (output: JobDocOutputExecuted, exportType: ExportTypeType) => { +const getReportingHeaders = (output: JobDocOutput, exportType: ExportTypeType) => { const metaDataHeaders: ICustomHeaders = {}; if (exportType.jobType === CSV_JOB_TYPE) { @@ -54,7 +61,7 @@ export function getDocumentPayloadFactory( } } - function getCompleted(output: JobDocOutputExecuted, jobType: string, title: string) { + function getCompleted(output: JobDocOutput, jobType: string, title: string) { const exportType = exportTypesRegistry.get((item: ExportTypeType) => item.jobType === jobType); const filename = getTitle(exportType, title); const headers = getReportingHeaders(output, exportType); @@ -70,7 +77,7 @@ export function getDocumentPayloadFactory( }; } - function getFailure(output: JobDocOutputExecuted) { + function getFailure(output: JobDocOutput) { return { statusCode: 500, content: { @@ -78,6 +85,7 @@ export function getDocumentPayloadFactory( reason: output.content, }, contentType: 'application/json', + headers: {}, }; } @@ -90,9 +98,7 @@ export function getDocumentPayloadFactory( }; } - return function getDocumentPayload(doc: { - _source: JobDocExecuted<{ output: JobDocOutputExecuted }>; - }) { + return function getDocumentPayload(doc: JobSource): Payload { const { status, jobtype: jobType, payload: { title } = { title: '' } } = doc._source; const { output } = doc._source; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.js b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts similarity index 65% rename from x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.js rename to x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts index e2da323546113..3ba7aa30eedcb 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts @@ -4,29 +4,48 @@ * you may not use this file except in compliance with the Elastic License. */ -import boom from 'boom'; +import Boom from 'boom'; +import { ResponseToolkit } from 'hapi'; +import { ServerFacade, ExportTypesRegistry } from '../../../types'; import { jobsQueryFactory } from '../../lib/jobs_query'; import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants'; import { getDocumentPayloadFactory } from './get_document_payload'; -export function jobResponseHandlerFactory(server, exportTypesRegistry) { +interface JobResponseHandlerParams { + docId: string; +} + +interface JobResponseHandlerOpts { + excludeContent?: boolean; +} + +export function jobResponseHandlerFactory( + server: ServerFacade, + exportTypesRegistry: ExportTypesRegistry +) { const jobsQuery = jobsQueryFactory(server); const getDocumentPayload = getDocumentPayloadFactory(server, exportTypesRegistry); - return function jobResponseHandler(validJobTypes, user, h, params, opts = {}) { + return function jobResponseHandler( + validJobTypes: string[], + user: any, + h: ResponseToolkit, + params: JobResponseHandlerParams, + opts: JobResponseHandlerOpts = {} + ) { const { docId } = params; return jobsQuery.get(user, docId, { includeContent: !opts.excludeContent }).then(doc => { - if (!doc) return boom.notFound(); + if (!doc) return Boom.notFound(); const { jobtype: jobType } = doc._source; if (!validJobTypes.includes(jobType)) { - return boom.unauthorized(`Sorry, you are not authorized to download ${jobType} reports`); + return Boom.unauthorized(`Sorry, you are not authorized to download ${jobType} reports`); } const output = getDocumentPayload(doc); if (!WHITELISTED_JOB_CONTENT_TYPES.includes(output.contentType)) { - return boom.badImplementation( + return Boom.badImplementation( `Unsupported content-type of ${output.contentType} specified by job output` ); } @@ -42,7 +61,7 @@ export function jobResponseHandlerFactory(server, exportTypesRegistry) { }); } - return response; + return response; // Hapi }); }; } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 6efac818981ef..f1e7786baf7a2 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -20,7 +20,7 @@ export const reportingFeaturePreRoutingFactory = function reportingFeaturePreRou return function reportingFeaturePreRouting(getReportingFeatureId: GetReportingFeatureIdFn) { return function licensePreRouting(request: Legacy.Request) { const licenseCheckResults = xpackMainPlugin.info.feature(pluginId).getLicenseCheckResults(); - const reportingFeatureId = getReportingFeatureId(request); + const reportingFeatureId = getReportingFeatureId(request) as string; const reportingFeature = licenseCheckResults[reportingFeatureId]; if (!reportingFeature.showLinks || !reportingFeature.enableLinks) { throw Boom.forbidden(reportingFeature.message); diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index 47f384250dd53..e600492014389 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -200,18 +200,6 @@ export interface JobDocPayload { type: string | null; } -export interface JobDocOutput { - content: string; // encoded content - contentType: string; -} - -export interface JobDocExecuted { - jobtype: string; - output: JobDocOutputExecuted; - payload: JobDocPayload; - status: string; // completed, failed, etc -} - export interface JobSource { _id: string; _source: { @@ -222,21 +210,9 @@ export interface JobSource { }; } -/* - * A snake_cased field is the only significant difference in structure of - * JobDocOutputExecuted vs JobDocOutput. - * - * JobDocOutput is the structure of the object returned by getDocumentPayload - * - * data in the _source fields of the - * Reporting index. - * - * The ESQueueWorker internals have executed job objects returned with this - * structure. See `_formatOutput` in reporting/server/lib/esqueue/worker.js - */ -export interface JobDocOutputExecuted { - content_type: string; // vs `contentType` above - content: string | null; // defaultOutput is null +export interface JobDocOutput { + content_type: string; + content: string | null; max_size_reached: boolean; size: number; } @@ -279,7 +255,7 @@ export type ImmediateExecuteFn = ( jobId: null, job: JobDocPayload, request: RequestFacade -) => Promise; +) => Promise; export interface ESQueueWorkerOptions { kibanaName: string; @@ -292,7 +268,7 @@ export interface ESQueueWorkerOptions { type GenericWorkerFn = ( jobSource: JobSource, ...workerRestArgs: any[] -) => void | Promise; +) => void | Promise; export interface ESQueueInstance { registerWorker: ( From 0e75d64f75e9ce9fb558f5bb0b47a104fd617d8c Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Tue, 21 Jan 2020 11:03:50 -0700 Subject: [PATCH 2/7] [Reporting/New Platform] Use the logger service from core --- .../export_types/csv/server/execute_job.ts | 13 +++---- .../server/create_job/create_job.ts | 13 +++---- .../server/execute_job.ts | 20 ++++------- .../png/server/execute_job/index.ts | 7 ++-- .../printable_pdf/server/execute_job/index.ts | 7 ++-- x-pack/legacy/plugins/reporting/index.ts | 4 +-- .../browsers/create_browser_driver_factory.ts | 9 +++-- .../reporting/server/lib/create_queue.ts | 14 ++++---- .../server/lib/create_tagged_logger.ts | 32 ++++++++--------- .../server/lib/create_worker.test.ts | 7 ++-- .../reporting/server/lib/create_worker.ts | 6 ++-- .../reporting/server/lib/enqueue_job.ts | 6 ++-- .../plugins/reporting/server/lib/get_user.ts | 6 ++-- .../reporting/server/lib/level_logger.ts | 32 ++++++++--------- .../legacy/plugins/reporting/server/plugin.ts | 22 ++++++++---- .../server/routes/generate_from_jobparams.ts | 8 +++-- .../routes/generate_from_savedobject.ts | 7 ++-- .../generate_from_savedobject_immediate.ts | 6 ++-- .../reporting/server/routes/generation.ts | 10 +++--- .../plugins/reporting/server/routes/jobs.ts | 6 ++-- .../routes/lib/authorized_user_pre_routing.ts | 14 ++++---- .../lib/reporting_feature_pre_routing.ts | 5 +-- .../routes/lib/route_config_factories.ts | 34 ++++++++++++------- x-pack/legacy/plugins/reporting/types.d.ts | 7 ++-- 24 files changed, 157 insertions(+), 138 deletions(-) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index f35ffa0e45bfe..b949be384b2ae 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -5,25 +5,26 @@ */ import { i18n } from '@kbn/i18n'; +import { CSV_JOB_TYPE } from '../../../common/constants'; +import { cryptoFactory } from '../../../server/lib'; import { - ExecuteJobFactory, ESQueueWorkerExecuteFn, + ExecuteJobFactory, FieldFormats, + Logger, ServerFacade, } from '../../../types'; -import { CSV_JOB_TYPE, PLUGIN_ID } from '../../../common/constants'; -import { cryptoFactory, LevelLogger } from '../../../server/lib'; import { JobDocPayloadDiscoverCsv } from '../types'; -import { createGenerateCsv } from './lib/generate_csv'; import { fieldFormatMapFactory } from './lib/field_format_map'; +import { createGenerateCsv } from './lib/generate_csv'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn(server: ServerFacade) { +>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) { const { callWithRequest } = server.plugins.elasticsearch.getCluster('data'); const crypto = cryptoFactory(server); const config = server.config(); - const logger = LevelLogger.createForServer(server, [PLUGIN_ID, CSV_JOB_TYPE, 'execute-job']); + const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']); const serverBasePath = config.get('server.basePath'); return async function executeJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index 8443be2b25f4f..a270e3e0329fe 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -6,13 +6,14 @@ import { notFound, notImplemented } from 'boom'; import { get } from 'lodash'; -import { PLUGIN_ID, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; -import { cryptoFactory, LevelLogger } from '../../../../server/lib'; +import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; +import { cryptoFactory } from '../../../../server/lib'; import { CreateJobFactory, ImmediateCreateJobFn, ServerFacade, RequestFacade, + Logger, } from '../../../../types'; import { SavedObject, @@ -34,13 +35,9 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(server: ServerFacade, parentLogger: Logger) { const crypto = cryptoFactory(server); - const logger = LevelLogger.createForServer(server, [ - PLUGIN_ID, - CSV_FROM_SAVEDOBJECT_JOB_TYPE, - 'create-job', - ]); + const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']); return async function createJob( jobParams: JobParamsPanelCsv, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 5e6181b8b07ef..75e0826c3f0a5 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -5,32 +5,26 @@ */ import { i18n } from '@kbn/i18n'; -import { cryptoFactory, LevelLogger } from '../../../server/lib'; +import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; +import { cryptoFactory } from '../../../server/lib'; import { ExecuteJobFactory, ImmediateExecuteFn, JobDocOutput, ServerFacade, + Logger, RequestFacade, + ServerFacade, } from '../../../types'; -import { - CONTENT_TYPE_CSV, - CSV_FROM_SAVEDOBJECT_JOB_TYPE, - PLUGIN_ID, -} from '../../../common/constants'; import { CsvResultFromSearch } from '../../csv/types'; -import { JobParamsPanelCsv, SearchPanel, JobDocPayloadPanelCsv, FakeRequest } from '../types'; +import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types'; import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn(server: ServerFacade) { +>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) { const crypto = cryptoFactory(server); - const logger = LevelLogger.createForServer(server, [ - PLUGIN_ID, - CSV_FROM_SAVEDOBJECT_JOB_TYPE, - 'execute-job', - ]); + const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); return async function executeJob( jobId: string | null, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index c2fda05fbe3e9..7d5c69655c362 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -6,14 +6,14 @@ import * as Rx from 'rxjs'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; -import { PLUGIN_ID, PNG_JOB_TYPE } from '../../../../common/constants'; +import { PNG_JOB_TYPE } from '../../../../common/constants'; import { ServerFacade, ExecuteJobFactory, ESQueueWorkerExecuteFn, HeadlessChromiumDriverFactory, + Logger, } from '../../../../types'; -import { LevelLogger } from '../../../../server/lib'; import { decryptJobHeaders, omitBlacklistedHeaders, @@ -27,10 +27,11 @@ type QueuedPngExecutorFactory = ExecuteJobFactory { uiSettingsServiceFactory: server.uiSettingsServiceFactory, // @ts-ignore Property 'fieldFormatServiceFactory' does not exist on type 'Server'. fieldFormatServiceFactory: server.fieldFormatServiceFactory, - log: server.log.bind(server), }; - const plugin: ReportingPlugin = reportingPluginFactory(__LEGACY, this); + const initializerContext = server.newPlatform.coreContext; + const plugin: ReportingPlugin = reportingPluginFactory(initializerContext, __LEGACY, this); await plugin.setup(coreSetup, pluginsSetup); }, diff --git a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts index a253988b01952..128df4d318c76 100644 --- a/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts +++ b/x-pack/legacy/plugins/reporting/server/browsers/create_browser_driver_factory.ts @@ -6,17 +6,16 @@ import { ensureBrowserDownloaded } from './download'; import { installBrowser } from './install'; -import { LevelLogger } from '../lib/level_logger'; -import { ServerFacade, CaptureConfig } from '../../types'; -import { PLUGIN_ID, BROWSER_TYPE } from '../../common/constants'; +import { ServerFacade, CaptureConfig, Logger } from '../../types'; +import { BROWSER_TYPE } from '../../common/constants'; import { chromium } from './index'; import { HeadlessChromiumDriverFactory } from './chromium/driver_factory'; export async function createBrowserDriverFactory( - server: ServerFacade + server: ServerFacade, + logger: Logger ): Promise { const config = server.config(); - const logger = LevelLogger.createForServer(server, [PLUGIN_ID, 'browser-driver']); const dataDir: string = config.get('path.data'); const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index 4a9b0c7cd2ebb..05b760c0c3bd6 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -4,17 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { PLUGIN_ID } from '../../common/constants'; import { ServerFacade, ExportTypesRegistry, HeadlessChromiumDriverFactory, QueueConfig, + Logger, } from '../../types'; // @ts-ignore import { Esqueue } from './esqueue'; import { createWorkerFactory } from './create_worker'; -import { LevelLogger } from './level_logger'; import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed interface CreateQueueFactoryOpts { @@ -24,6 +23,7 @@ interface CreateQueueFactoryOpts { export function createQueueFactory( server: ServerFacade, + logger: Logger, { exportTypesRegistry, browserDriverFactory }: CreateQueueFactoryOpts ): Esqueue { const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); @@ -34,23 +34,25 @@ export function createQueueFactory( timeout: queueConfig.timeout, dateSeparator: '.', client: server.plugins.elasticsearch.getCluster('admin'), - logger: createTaggedLogger(server, [PLUGIN_ID, 'esqueue', 'queue-worker']), + logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']), }; const queue: Esqueue = new Esqueue(index, queueOptions); if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(server, { exportTypesRegistry, browserDriverFactory }); + const createWorker = createWorkerFactory(server, logger, { + exportTypesRegistry, + browserDriverFactory, + }); createWorker(queue); } else { - const logger = LevelLogger.createForServer(server, [PLUGIN_ID, 'create_queue']); logger.info( 'xpack.reporting.queue.pollEnabled is set to false. This Kibana instance ' + 'will not poll for idle jobs to claim and execute. Make sure another ' + 'Kibana instance with polling enabled is running in this cluster so ' + 'reporting jobs can complete.', - ['info'] + ['create_queue'] ); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_tagged_logger.ts b/x-pack/legacy/plugins/reporting/server/lib/create_tagged_logger.ts index 40a1cd8203d2f..97b34dfe40830 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_tagged_logger.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_tagged_logger.ts @@ -4,23 +4,23 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../../types'; +import { Logger } from '../../types'; -/** - * @function taggedLogger - * @param {string} message - * @param {string[]} [additionalTags] - */ - -/** - * Creates a taggedLogger function with tags, allows the consumer to optionally provide additional tags - * - * @param {Server} server - * @param {string[]} tags - tags to always be passed into the `logger` function - * @returns taggedLogger - */ -export function createTaggedLogger(server: ServerFacade, tags: string[]) { +export function createTaggedLogger(logger: Logger, tags: string[]) { return (msg: string, additionalTags = []) => { - server.log([...tags, ...additionalTags], msg); + const allTags = [...tags, ...additionalTags]; + + if (allTags.includes('info')) { + const newTags = allTags.filter(t => t !== 'info'); // Ensure 'info' is not included twice + logger.info(msg, newTags); + } else if (allTags.includes('debug')) { + const newTags = allTags.filter(t => t !== 'debug'); + logger.debug(msg, newTags); + } else if (allTags.includes('warn') || allTags.includes('warning')) { + const newTags = allTags.filter(t => t !== 'warn' && t !== 'warning'); + logger.warn(msg, newTags); + } else { + logger.error(msg, allTags); + } }; } diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index 8f843752491ec..6a5c93db32376 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -25,10 +25,11 @@ const executeJobFactoryStub = sinon.stub(); const getMockServer = (): ServerFacade => { return ({ - log: sinon.stub(), config: () => ({ get: configGetStub }), } as unknown) as ServerFacade; }; +const getMockLogger = jest.fn(); + const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] ) => ({ @@ -47,7 +48,7 @@ describe('Create Worker', () => { test('Creates a single Esqueue worker for Reporting', async () => { const exportTypesRegistry = getMockExportTypesRegistry(); - const createWorker = createWorkerFactory(getMockServer(), { + const createWorker = createWorkerFactory(getMockServer(), getMockLogger(), { exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, browserDriverFactory: {} as HeadlessChromiumDriverFactory, }); @@ -81,7 +82,7 @@ Object { { executeJobFactory: executeJobFactoryStub }, { executeJobFactory: executeJobFactoryStub }, ]); - const createWorker = createWorkerFactory(getMockServer(), { + const createWorker = createWorkerFactory(getMockServer(), getMockLogger(), { exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, browserDriverFactory: {} as HeadlessChromiumDriverFactory, }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 1326e411b6c5c..67869016a250b 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -17,10 +17,10 @@ import { JobSource, RequestFacade, ServerFacade, + Logger, } from '../../types'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -import { LevelLogger } from './level_logger'; interface CreateWorkerFactoryOpts { exportTypesRegistry: ExportTypesRegistry; @@ -29,11 +29,11 @@ interface CreateWorkerFactoryOpts { export function createWorkerFactory( server: ServerFacade, + logger: Logger, { exportTypesRegistry, browserDriverFactory }: CreateWorkerFactoryOpts ) { type JobDocPayloadType = JobDocPayload; const config = server.config(); - const logger = LevelLogger.createForServer(server, [PLUGIN_ID, 'queue-worker']); const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); const kibanaName: string = config.get('server.name'); const kibanaId: string = config.get('server.uuid'); @@ -50,7 +50,7 @@ export function createWorkerFactory( ExportTypeDefinition >) { // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = exportType.executeJobFactory(server, { browserDriverFactory }); + const jobExecutor = exportType.executeJobFactory(server, logger, { browserDriverFactory }); jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index 2d044ab31a160..14c57fa35dcf4 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -35,8 +35,10 @@ interface EnqueueJobFactoryOpts { export function enqueueJobFactory( server: ServerFacade, + parentLogger: Logger, { exportTypesRegistry, esqueue }: EnqueueJobFactoryOpts ): EnqueueJobFn { + const logger = parentLogger.clone(['queue-job']); const config = server.config(); const captureConfig: CaptureConfig = config.get('xpack.reporting.capture'); const browserType = captureConfig.browser.type; @@ -44,7 +46,6 @@ export function enqueueJobFactory( const queueConfig: QueueConfig = config.get('xpack.reporting.queue'); return async function enqueueJob( - parentLogger: Logger, exportTypeId: string, jobParams: JobParamsType, user: string, @@ -53,7 +54,6 @@ export function enqueueJobFactory( ): Promise { type CreateJobFn = ESQueueCreateJobFn | ImmediateCreateJobFn; - const logger = parentLogger.clone(['queue-job']); const exportType = exportTypesRegistry.getById(exportTypeId); if (exportType == null) { @@ -61,7 +61,7 @@ export function enqueueJobFactory( } // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory(server) as CreateJobFn; + const createJob = exportType.createJobFactory(server, logger) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index e2921de795012..fac1177afff74 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -5,9 +5,9 @@ */ import { Legacy } from 'kibana'; -import { ServerFacade } from '../../types'; +import { Logger, ServerFacade } from '../../types'; -export function getUserFactory(server: ServerFacade) { +export function getUserFactory(server: ServerFacade, logger: Logger) { /* * Legacy.Request because this is called from routing middleware */ @@ -19,7 +19,7 @@ export function getUserFactory(server: ServerFacade) { try { return await server.plugins.security.getUser(request); } catch (err) { - server.log(['reporting', 'getUser', 'error'], err); + logger.error(err); return null; } }; diff --git a/x-pack/legacy/plugins/reporting/server/lib/level_logger.ts b/x-pack/legacy/plugins/reporting/server/lib/level_logger.ts index 839fa16a716b7..d015d500363c1 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/level_logger.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/level_logger.ts @@ -4,48 +4,46 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ServerFacade } from '../../types'; +import { LoggerFactory } from 'src/core/server'; const trimStr = (toTrim: string) => { return typeof toTrim === 'string' ? toTrim.trim() : toTrim; }; export class LevelLogger { - private _logger: any; + private _logger: LoggerFactory; private _tags: string[]; + public warning: (msg: string, tags?: string[]) => void; - public warn: (msg: string, tags?: string[]) => void; - - static createForServer(server: ServerFacade, tags: string[]) { - const serverLog: ServerFacade['log'] = (tgs: string[], msg: string) => server.log(tgs, msg); - return new LevelLogger(serverLog, tags); - } - - constructor(logger: ServerFacade['log'], tags: string[]) { + constructor(logger: LoggerFactory, tags?: string[]) { this._logger = logger; - this._tags = tags; + this._tags = tags || []; /* * This shortcut provides maintenance convenience: Reporting code has been * using both .warn and .warning */ - this.warn = this.warning.bind(this); + this.warning = this.warn.bind(this); + } + + private getLogger(tags: string[]) { + return this._logger.get(...this._tags, ...tags); } public error(err: string | Error, tags: string[] = []) { - this._logger([...this._tags, ...tags, 'error'], err); + this.getLogger(tags).error(err); } - public warning(msg: string, tags: string[] = []) { - this._logger([...this._tags, ...tags, 'warning'], trimStr(msg)); + public warn(msg: string, tags: string[] = []) { + this.getLogger(tags).warn(msg); } public debug(msg: string, tags: string[] = []) { - this._logger([...this._tags, ...tags, 'debug'], trimStr(msg)); + this.getLogger(tags).debug(msg); } public info(msg: string, tags: string[] = []) { - this._logger([...this._tags, ...tags, 'info'], trimStr(msg)); + this.getLogger(tags).info(trimStr(msg)); } public clone(tags: string[]) { diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index 934a3487209c4..b0dc56dd8d8d0 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -5,7 +5,7 @@ */ import { Legacy } from 'kibana'; -import { CoreSetup, CoreStart, Plugin } from 'src/core/server'; +import { CoreSetup, CoreStart, Plugin, LoggerFactory } from 'src/core/server'; import { IUiSettingsClient } from 'src/core/server'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; @@ -14,11 +14,15 @@ import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; import { PLUGIN_ID } from '../common/constants'; import { ReportingPluginSpecOptions } from '../types.d'; import { registerRoutes } from './routes'; -import { LevelLogger, checkLicenseFactory, getExportTypesRegistry, runValidations } from './lib'; +import { checkLicenseFactory, getExportTypesRegistry, runValidations, LevelLogger } from './lib'; import { createBrowserDriverFactory } from './browsers'; import { registerReportingUsageCollector } from './usage'; import { logConfiguration } from '../log_configuration'; +export interface ReportingInitializerContext { + logger: LoggerFactory; +} + // For now there is no exposed functionality to other plugins export type ReportingSetup = object; export type ReportingStart = object; @@ -33,7 +37,6 @@ type LegacyPlugins = Legacy.Server['plugins']; export interface LegacySetup { config: Legacy.Server['config']; info: Legacy.Server['info']; - log: Legacy.Server['log']; plugins: { elasticsearch: LegacyPlugins['elasticsearch']; security: LegacyPlugins['security']; @@ -59,10 +62,17 @@ export type ReportingPlugin = Plugin< * into `setup`. The factory parameters take the legacy dependencies, and the * `setup` method gets it from enclosure */ export function reportingPluginFactory( + initializerContext: ReportingInitializerContext, __LEGACY: LegacySetup, legacyPlugin: ReportingPluginSpecOptions ) { return new (class ReportingPlugin implements ReportingPlugin { + private initializerContext: ReportingInitializerContext; + + constructor(context: ReportingInitializerContext) { + this.initializerContext = context; + } + public async setup(core: CoreSetup, plugins: ReportingSetupDeps): Promise { const exportTypesRegistry = getExportTypesRegistry(); @@ -76,8 +86,8 @@ export function reportingPluginFactory( exportTypesRegistry ); - const logger = LevelLogger.createForServer(__LEGACY, [PLUGIN_ID]); - const browserDriverFactory = await createBrowserDriverFactory(__LEGACY); + const logger = new LevelLogger(this.initializerContext.logger.get('reporting')); + const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, logger); logConfiguration(__LEGACY, logger); runValidations(__LEGACY, logger, browserDriverFactory); @@ -103,5 +113,5 @@ export function reportingPluginFactory( public start(core: CoreStart, plugins: ReportingStartDeps): ReportingStart { return {}; } - })(); + })(initializerContext); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index c9225dfee6978..d920015c4290c 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -9,7 +9,7 @@ import boom from 'boom'; import Joi from 'joi'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; -import { ServerFacade, ReportingResponseToolkit } from '../../types'; +import { ServerFacade, ReportingResponseToolkit, Logger } from '../../types'; import { getRouteConfigFactoryReportingPre, GetRouteConfigFactoryFn, @@ -23,11 +23,13 @@ const BASE_GENERATE = `${API_BASE_URL}/generate`; export function registerGenerateFromJobParams( server: ServerFacade, handler: HandlerFunction, - handleError: HandlerErrorFunction + handleError: HandlerErrorFunction, + logger: Logger ) { const getRouteConfig = () => { const getOriginalRouteConfig: GetRouteConfigFactoryFn = getRouteConfigFactoryReportingPre( - server + server, + logger ); const routeConfigFactory: RouteConfigFactory = getOriginalRouteConfig( ({ params: { exportType } }) => exportType diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 2c509136b1b44..0da8e40ea29c0 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -7,7 +7,7 @@ import { Legacy } from 'kibana'; import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; -import { ServerFacade, ReportingResponseToolkit } from '../../types'; +import { ServerFacade, ReportingResponseToolkit, Logger } from '../../types'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { makeRequestFacade } from './lib/make_request_facade'; @@ -25,9 +25,10 @@ import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject export function registerGenerateCsvFromSavedObject( server: ServerFacade, handleRoute: HandlerFunction, - handleRouteError: HandlerErrorFunction + handleRouteError: HandlerErrorFunction, + logger: Logger ) { - const routeOptions = getRouteOptionsCsv(server); + const routeOptions = getRouteOptionsCsv(server, logger); server.route({ path: `${API_BASE_GENERATE_V1}/csv/saved-object/{savedObjectType}:{savedObjectId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index b372422952622..60799b20ce420 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -33,7 +33,7 @@ export function registerGenerateCsvFromSavedObjectImmediate( server: ServerFacade, parentLogger: Logger ) { - const routeOptions = getRouteOptionsCsv(server); + const routeOptions = getRouteOptionsCsv(server, parentLogger); /* * CSV export with the `immediate` option does not queue a job with Reporting's ESQueue to run the job async. Instead, this does: @@ -55,8 +55,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( * * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here */ - const createJobFn = createJobFactory(server); - const executeJobFn = executeJobFactory(server, { + const createJobFn = createJobFactory(server, logger); + const executeJobFn = executeJobFactory(server, logger, { browserDriverFactory: {} as HeadlessChromiumDriverFactory, }); const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 21af54ddf11e3..2a3102d0dd159 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -31,8 +31,8 @@ export function registerJobGenerationRoutes( // @ts-ignore TODO const { errors: esErrors } = server.plugins.elasticsearch.getCluster('admin'); - const esqueue = createQueueFactory(server, { exportTypesRegistry, browserDriverFactory }); - const enqueueJob = enqueueJobFactory(server, { exportTypesRegistry, esqueue }); + const esqueue = createQueueFactory(server, logger, { exportTypesRegistry, browserDriverFactory }); + const enqueueJob = enqueueJobFactory(server, logger, { exportTypesRegistry, esqueue }); /* * Generates enqueued job details to use in responses @@ -47,7 +47,7 @@ export function registerJobGenerationRoutes( const user = request.pre.user; const headers = request.headers; - const job = await enqueueJob(logger, exportTypeId, jobParams, user, headers, request); + const job = await enqueueJob(exportTypeId, jobParams, user, headers, request); // return the queue's job information const jobJson = job.toJSON(); @@ -73,11 +73,11 @@ export function registerJobGenerationRoutes( return err; } - registerGenerateFromJobParams(server, handler, handleError); + registerGenerateFromJobParams(server, handler, handleError, logger); // Register beta panel-action download-related API's if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { - registerGenerateCsvFromSavedObject(server, handler, handleError); + registerGenerateCsvFromSavedObject(server, handler, handleError, logger); registerGenerateCsvFromSavedObjectImmediate(server, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index ed2d8777a908e..049ee0ce20ceb 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -17,9 +17,7 @@ import { JobSource, ListQuery, } from '../../types'; -// @ts-ignore import { jobsQueryFactory } from '../lib/jobs_query'; -// @ts-ignore import { jobResponseHandlerFactory } from './lib/job_response_handler'; import { getRouteConfigFactoryDownloadPre, @@ -39,8 +37,8 @@ export function registerJobInfoRoutes( logger: Logger ) { const jobsQuery = jobsQueryFactory(server); - const getRouteConfig = getRouteConfigFactoryManagementPre(server); - const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server); + const getRouteConfig = getRouteConfigFactoryManagementPre(server, logger); + const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server, logger); // list jobs in the queue, paginated server.route({ diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index eb473e0bc76d4..906f266290a42 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -8,7 +8,7 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; import { getUserFactory } from '../../lib/get_user'; -import { ServerFacade } from '../../../types'; +import { ServerFacade, Logger } from '../../../types'; const superuserRole = 'superuser'; @@ -17,19 +17,19 @@ export type PreRoutingFunction = ( ) => Promise | AuthenticatedUser | null>; export const authorizedUserPreRoutingFactory = function authorizedUserPreRoutingFn( - server: ServerFacade + server: ServerFacade, + logger: Logger ) { - const getUser = getUserFactory(server); + const getUser = getUserFactory(server, logger); const config = server.config(); return async function authorizedUserPreRouting(request: Legacy.Request) { const xpackInfo = server.plugins.xpack_main.info; if (!xpackInfo || !xpackInfo.isAvailable()) { - server.log( - ['reporting', 'authorizedUserPreRouting', 'debug'], - 'Unable to authorize user before xpack info is available.' - ); + logger.warn('Unable to authorize user before xpack info is available.', [ + 'authorizedUserPreRouting', + ]); return Boom.notFound(); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index f1e7786baf7a2..88c5e4edc12f8 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -6,12 +6,13 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; -import { ServerFacade } from '../../../types'; +import { Logger, ServerFacade } from '../../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; export const reportingFeaturePreRoutingFactory = function reportingFeaturePreRoutingFn( - server: ServerFacade + server: ServerFacade, + logger: Logger ) { const xpackMainPlugin = server.plugins.xpack_main; const pluginId = 'reporting'; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index caf24bf64f602..16d0101f05acb 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -5,8 +5,9 @@ */ import Joi from 'joi'; +import { Legacy } from 'kibana'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; -import { ServerFacade } from '../../../types'; +import { ServerFacade, Logger } from '../../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { reportingFeaturePreRoutingFactory } from './reporting_feature_pre_routing'; import { GetReportingFeatureIdFn } from './reporting_feature_pre_routing'; @@ -25,11 +26,14 @@ export type GetRouteConfigFactoryFn = ( getFeatureId?: GetReportingFeatureIdFn ) => RouteConfigFactory; -export function getRouteConfigFactoryReportingPre(server: ServerFacade): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server); +export function getRouteConfigFactoryReportingPre( + server: ServerFacade, + logger: Logger +): GetRouteConfigFactoryFn { + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, logger); - return (getFeatureId?: GetReportingFeatureIdFn): RouteConfigFactory => { + return (getFeatureId?: GetFeatureFunction): RouteConfigFactory => { const preRouting: any[] = [{ method: authorizedUserPreRouting, assign: 'user' }]; if (getFeatureId) { preRouting.push(reportingFeaturePreRouting(getFeatureId)); @@ -42,8 +46,8 @@ export function getRouteConfigFactoryReportingPre(server: ServerFacade): GetRout }; } -export function getRouteOptionsCsv(server: ServerFacade) { - const getRouteConfig = getRouteConfigFactoryReportingPre(server); +export function getRouteOptionsCsv(server: ServerFacade, logger: Logger) { + const getRouteConfig = getRouteConfigFactoryReportingPre(server, logger); return { ...getRouteConfig(() => CSV_FROM_SAVEDOBJECT_JOB_TYPE), validate: { @@ -63,9 +67,12 @@ export function getRouteOptionsCsv(server: ServerFacade) { }; } -export function getRouteConfigFactoryManagementPre(server: ServerFacade): GetRouteConfigFactoryFn { - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server); - const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server); +export function getRouteConfigFactoryManagementPre( + server: ServerFacade, + logger: Logger +): GetRouteConfigFactoryFn { + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, logger); + const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, logger); const managementPreRouting = reportingFeaturePreRouting(() => 'management'); return (): RouteConfigFactory => { @@ -83,8 +90,11 @@ export function getRouteConfigFactoryManagementPre(server: ServerFacade): GetRou // TOC at the end of the PDF, but it's sending multiple cookies and causing our auth to fail with a 401. // Additionally, the range-request doesn't alleviate any performance issues on the server as the entire // download is loaded into memory. -export function getRouteConfigFactoryDownloadPre(server: ServerFacade): GetRouteConfigFactoryFn { - const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server); +export function getRouteConfigFactoryDownloadPre( + server: ServerFacade, + logger: Logger +): GetRouteConfigFactoryFn { + const getManagementRouteConfig = getRouteConfigFactoryManagementPre(server, logger); return (): RouteConfigFactory => ({ ...getManagementRouteConfig(), tags: [API_TAG], diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index e600492014389..6d769c0d7b717 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -74,7 +74,6 @@ export type ServerFacade = LegacySetup; export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions; export type EnqueueJobFn = ( - parentLogger: LevelLogger, exportTypeId: string, jobParams: JobParamsType, user: string, @@ -278,9 +277,13 @@ export interface ESQueueInstance { ) => ESQueueWorker; } -export type CreateJobFactory = (server: ServerFacade) => CreateJobFnType; +export type CreateJobFactory = ( + server: ServerFacade, + logger: LevelLogger +) => CreateJobFnType; export type ExecuteJobFactory = ( server: ServerFacade, + logger: LevelLogger, opts: { browserDriverFactory: HeadlessChromiumDriverFactory; } From 77f91bf60d38b078d5dc70a8b2d8fafdf21431aa Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 10:13:16 -0700 Subject: [PATCH 3/7] Add log tag --- x-pack/legacy/plugins/reporting/server/lib/get_user.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index fac1177afff74..9ee8d9a835c89 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -19,7 +19,7 @@ export function getUserFactory(server: ServerFacade, logger: Logger) { try { return await server.plugins.security.getUser(request); } catch (err) { - logger.error(err); + logger.error(err, ['getUser']); return null; } }; From 94ebeeb3d9b5b88c9a802f3a5ea41dbf00a7e81c Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 10:13:39 -0700 Subject: [PATCH 4/7] fix jest tests --- .../csv_from_savedobject/server/execute_job.ts | 1 - .../png/server/execute_job/index.test.js | 8 +++++--- .../server/execute_job/index.test.js | 8 +++++--- .../lib/authorized_user_pre_routing.test.js | 15 ++++++++------- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 75e0826c3f0a5..03f491deaa43d 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -11,7 +11,6 @@ import { ExecuteJobFactory, ImmediateExecuteFn, JobDocOutput, - ServerFacade, Logger, RequestFacade, ServerFacade, diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index 1be65722fa668..4f02ab5d4c077 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -57,6 +57,8 @@ beforeEach(() => { afterEach(() => generatePngObservableFactory.mockReset()); +const getMockLogger = () => new LevelLogger(); + const encryptHeaders = async headers => { const crypto = cryptoFactory(mockServer); return await crypto.encrypt(headers); @@ -68,7 +70,7 @@ test(`passes browserTimezone to generatePng`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -86,7 +88,7 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -106,7 +108,7 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index ddbee6e9d54a4..cb038561cd8bc 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -57,6 +57,8 @@ beforeEach(() => { afterEach(() => generatePdfObservableFactory.mockReset()); +const getMockLogger = () => new LevelLogger(); + const encryptHeaders = async headers => { const crypto = cryptoFactory(mockServer); return await crypto.encrypt(headers); @@ -68,7 +70,7 @@ test(`passes browserTimezone to generatePdf`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const browserTimezone = 'UTC'; await executeJob( 'pdfJobId', @@ -89,7 +91,7 @@ test(`passes browserTimezone to generatePdf`, async () => { }); test(`returns content_type of application/pdf`, async () => { - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -109,7 +111,7 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js index 9997aca0954d0..841f753f0c09b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.test.js @@ -60,11 +60,12 @@ describe('authorized_user_pre_routing', function() { return mockServer; }; })(); + const getMockLogger = () => ({ warn: jest.fn() }); it('should return with boom notFound when xpackInfo is undefined', async function() { const mockServer = createMockServer({ xpackInfoUndefined: true }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting({}); expect(response.isBoom).to.be(true); expect(response.output.statusCode).to.be(404); @@ -73,7 +74,7 @@ describe('authorized_user_pre_routing', function() { it(`should return with boom notFound when xpackInfo isn't available`, async function() { const mockServer = createMockServer({ xpackInfoAvailable: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response.isBoom).to.be(true); expect(response.output.statusCode).to.be(404); @@ -82,7 +83,7 @@ describe('authorized_user_pre_routing', function() { it('should return with null user when security is disabled in Elasticsearch', async function() { const mockServer = createMockServer({ securityEnabled: false }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response).to.be(null); }); @@ -90,7 +91,7 @@ describe('authorized_user_pre_routing', function() { it('should return with boom unauthenticated when security is enabled but no authenticated user', async function() { const mockServer = createMockServer({ user: null }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response.isBoom).to.be(true); expect(response.output.statusCode).to.be(401); @@ -102,7 +103,7 @@ describe('authorized_user_pre_routing', function() { config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response.isBoom).to.be(true); expect(response.output.statusCode).to.be(403); @@ -115,7 +116,7 @@ describe('authorized_user_pre_routing', function() { config: { 'xpack.reporting.roles.allow': ['.reporting_user'] }, }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response).to.be(user); }); @@ -127,7 +128,7 @@ describe('authorized_user_pre_routing', function() { config: { 'xpack.reporting.roles.allow': [] }, }); - const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer); + const authorizedUserPreRouting = authorizedUserPreRoutingFactory(mockServer, getMockLogger()); const response = await authorizedUserPreRouting(); expect(response).to.be(user); }); From c4b807ebaab11a0b882587c11203d82d52db5721 Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 13:30:19 -0700 Subject: [PATCH 5/7] ts fixes --- .../reporting/server/routes/lib/route_config_factories.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 16d0101f05acb..25c08261490d5 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -5,7 +5,6 @@ */ import Joi from 'joi'; -import { Legacy } from 'kibana'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { ServerFacade, Logger } from '../../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; @@ -33,7 +32,7 @@ export function getRouteConfigFactoryReportingPre( const authorizedUserPreRouting = authorizedUserPreRoutingFactory(server, logger); const reportingFeaturePreRouting = reportingFeaturePreRoutingFactory(server, logger); - return (getFeatureId?: GetFeatureFunction): RouteConfigFactory => { + return (getFeatureId?: GetReportingFeatureIdFn): RouteConfigFactory => { const preRouting: any[] = [{ method: authorizedUserPreRouting, assign: 'user' }]; if (getFeatureId) { preRouting.push(reportingFeaturePreRouting(getFeatureId)); From 393f962ae527b719f8b7d8e39562a934cc5144d6 Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 16:10:44 -0700 Subject: [PATCH 6/7] fix mocha test --- .../csv/server/__tests__/execute_job.js | 87 ++++++++++--------- 1 file changed, 46 insertions(+), 41 deletions(-) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js index 83be303191bad..dd4f86c9ad7ef 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js @@ -8,13 +8,12 @@ import expect from '@kbn/expect'; import Puid from 'puid'; import sinon from 'sinon'; import nodeCrypto from '@elastic/node-crypto'; - import { CancellationToken } from '../../../../common/cancellation_token'; import { FieldFormatsService } from '../../../../../../../../src/legacy/ui/field_formats/mixin/field_formats_service'; // Reporting uses an unconventional directory structure so the linter marks this as a violation // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { StringFormat } from '../../../../../../../../src/plugins/data/server'; - +import { LevelLogger } from '../../../../server/lib/level_logger'; import { executeJobFactory } from '../execute_job'; const delay = ms => new Promise(resolve => setTimeout(() => resolve(), ms)); @@ -41,6 +40,12 @@ describe('CSV Execute Job', function() { const headers = { sid: 'test', }; + const mockLogger = new LevelLogger({ + get: () => ({ + debug: () => {}, + warn: () => {}, + }), + }); let defaultElasticsearchResponse; let encryptedHeaders; @@ -123,7 +128,7 @@ describe('CSV Execute Job', function() { describe('calls getScopedSavedObjectsClient with request', function() { it('containing decrypted headers', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -141,7 +146,7 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('server.basePath') .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -159,7 +164,7 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('server.basePath') .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobBasePath = 'foo-job/basePath/'; await executeJob( 'job789', @@ -182,7 +187,7 @@ describe('CSV Execute Job', function() { it('passed scoped SavedObjectsClient to uiSettingsServiceFactory', async function() { const returnValue = Symbol(); mockServer.savedObjects.getScopedSavedObjectsClient.returns(returnValue); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -197,7 +202,7 @@ describe('CSV Execute Job', function() { describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callWithRequest', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -213,7 +218,7 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const job = { headers: encryptedHeaders, fields: [], @@ -240,7 +245,7 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callWithRequestStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -254,7 +259,7 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -284,7 +289,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -319,7 +324,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -347,7 +352,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -375,7 +380,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -403,7 +408,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -431,7 +436,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -459,7 +464,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -479,7 +484,7 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callWithRequestStub.rejects(new Error()); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -496,7 +501,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callWithRequestStub.onSecondCall().rejects(new Error()); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -515,7 +520,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -532,7 +537,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -556,7 +561,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -580,7 +585,7 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -610,7 +615,7 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -625,7 +630,7 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -639,7 +644,7 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -657,7 +662,7 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -669,7 +674,7 @@ describe('CSV Execute Job', function() { it('should use custom uiSettings csv:separator for header', async function() { uiSettingsGetStub.withArgs('csv:separator').returns(';'); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -681,7 +686,7 @@ describe('CSV Execute Job', function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() { uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -693,7 +698,7 @@ describe('CSV Execute Job', function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { uiSettingsGetStub.withArgs('csv:quoteValues').returns(false); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -704,7 +709,7 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); callWithRequestStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -724,7 +729,7 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); callWithRequestStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -745,7 +750,7 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); callWithRequestStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -773,7 +778,7 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); callWithRequestStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -818,7 +823,7 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(1); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -851,7 +856,7 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(9); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -891,7 +896,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -932,7 +937,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -972,7 +977,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1001,7 +1006,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1030,7 +1035,7 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer); + const executeJob = executeJobFactory(mockServer, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], From 9119c9f541ca43bf2e71ee007121b3bbb47788fe Mon Sep 17 00:00:00 2001 From: Timothy Sullivan Date: Wed, 22 Jan 2020 16:12:32 -0700 Subject: [PATCH 7/7] convert to jest --- .../execute_job.js => execute_job.test.js} | 182 +++++++++--------- 1 file changed, 96 insertions(+), 86 deletions(-) rename x-pack/legacy/plugins/reporting/export_types/csv/server/{__tests__/execute_job.js => execute_job.test.js} (86%) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js similarity index 86% rename from x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js rename to x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index dd4f86c9ad7ef..6109db8542764 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/__tests__/execute_job.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -4,32 +4,19 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; import Puid from 'puid'; import sinon from 'sinon'; import nodeCrypto from '@elastic/node-crypto'; -import { CancellationToken } from '../../../../common/cancellation_token'; -import { FieldFormatsService } from '../../../../../../../../src/legacy/ui/field_formats/mixin/field_formats_service'; +import { CancellationToken } from '../../../common/cancellation_token'; +import { FieldFormatsService } from '../../../../../../../src/legacy/ui/field_formats/mixin/field_formats_service'; // Reporting uses an unconventional directory structure so the linter marks this as a violation // eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { StringFormat } from '../../../../../../../../src/plugins/data/server'; -import { LevelLogger } from '../../../../server/lib/level_logger'; -import { executeJobFactory } from '../execute_job'; +import { StringFormat } from '../../../../../../../src/plugins/data/server'; +import { LevelLogger } from '../../../server/lib/level_logger'; +import { executeJobFactory } from './execute_job'; const delay = ms => new Promise(resolve => setTimeout(() => resolve(), ms)); -const expectRejectedPromise = async promise => { - let error = null; - try { - await promise; - } catch (err) { - error = err; - } finally { - expect(error).to.not.be(null); - expect(error).to.be.an(Error); - } -}; - const puid = new Puid(); const getRandomScrollId = () => { return puid.generate(); @@ -42,8 +29,9 @@ describe('CSV Execute Job', function() { }; const mockLogger = new LevelLogger({ get: () => ({ - debug: () => {}, - warn: () => {}, + debug: jest.fn(), + warn: jest.fn(), + error: jest.fn(), }), }); let defaultElasticsearchResponse; @@ -55,7 +43,7 @@ describe('CSV Execute Job', function() { let callWithRequestStub; let uiSettingsGetStub; - before(async function() { + beforeAll(async function() { const crypto = nodeCrypto({ encryptionKey }); encryptedHeaders = await crypto.encrypt(headers); }); @@ -134,10 +122,10 @@ describe('CSV Execute Job', function() { { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).to.be(true); - expect( - mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].headers - ).to.be.eql(headers); + expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); + expect(mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].headers).toEqual( + headers + ); }); it(`containing getBasePath() returning server's basePath if the job doesn't have one`, async function() { @@ -152,10 +140,10 @@ describe('CSV Execute Job', function() { { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).to.be(true); + expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); expect( mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).to.be.eql(serverBasePath); + ).toEqual(serverBasePath); }); it(`containing getBasePath() returning job's basePath if the job has one`, async function() { @@ -176,10 +164,10 @@ describe('CSV Execute Job', function() { }, cancellationToken ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).to.be(true); + expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); expect( mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).to.be.eql(jobBasePath); + ).toEqual(jobBasePath); }); }); @@ -193,8 +181,8 @@ describe('CSV Execute Job', function() { { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(mockServer.uiSettingsServiceFactory.calledOnce).to.be(true); - expect(mockServer.uiSettingsServiceFactory.firstCall.args[0].savedObjectsClient).to.be( + expect(mockServer.uiSettingsServiceFactory.calledOnce).toBe(true); + expect(mockServer.uiSettingsServiceFactory.firstCall.args[0].savedObjectsClient).toBe( returnValue ); }); @@ -208,8 +196,8 @@ describe('CSV Execute Job', function() { { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(callWithRequestStub.called).to.be(true); - expect(callWithRequestStub.firstCall.args[0].headers).to.be.eql(headers); + expect(callWithRequestStub.called).toBe(true); + expect(callWithRequestStub.firstCall.args[0].headers).toEqual(headers); }); it('should pass the index and body to execute the initial search', async function() { @@ -231,9 +219,9 @@ describe('CSV Execute Job', function() { await executeJob('job777', job, cancellationToken); const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).to.be('search'); - expect(searchCall.args[2].index).to.be(index); - expect(searchCall.args[2].body).to.be(body); + expect(searchCall.args[1]).toBe('search'); + expect(searchCall.args[2].index).toBe(index); + expect(searchCall.args[2].body).toBe(body); }); it('should pass the scrollId from the initial search to the subsequent scroll', async function() { @@ -254,8 +242,8 @@ describe('CSV Execute Job', function() { const scrollCall = callWithRequestStub.secondCall; - expect(scrollCall.args[1]).to.be('scroll'); - expect(scrollCall.args[2].scrollId).to.be(scrollId); + expect(scrollCall.args[1]).toBe('scroll'); + expect(scrollCall.args[2].scrollId).toBe(scrollId); }); it('should not execute scroll if there are no hits from the search', async function() { @@ -266,13 +254,13 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(callWithRequestStub.callCount).to.be(2); + expect(callWithRequestStub.callCount).toBe(2); const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).to.be('search'); + expect(searchCall.args[1]).toBe('search'); const clearScrollCall = callWithRequestStub.secondCall; - expect(clearScrollCall.args[1]).to.be('clearScroll'); + expect(clearScrollCall.args[1]).toBe('clearScroll'); }); it('should stop executing scroll if there are no hits', async function() { @@ -296,16 +284,16 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(callWithRequestStub.callCount).to.be(3); + expect(callWithRequestStub.callCount).toBe(3); const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).to.be('search'); + expect(searchCall.args[1]).toBe('search'); const scrollCall = callWithRequestStub.secondCall; - expect(scrollCall.args[1]).to.be('scroll'); + expect(scrollCall.args[1]).toBe('scroll'); const clearScroll = callWithRequestStub.thirdCall; - expect(clearScroll.args[1]).to.be('clearScroll'); + expect(clearScroll.args[1]).toBe('clearScroll'); }); it('should call clearScroll with scrollId when there are no more hits', async function() { @@ -332,8 +320,8 @@ describe('CSV Execute Job', function() { ); const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).to.be('clearScroll'); - expect(lastCall.args[2].scrollId).to.eql([lastScrollId]); + expect(lastCall.args[1]).toBe('clearScroll'); + expect(lastCall.args[2].scrollId).toEqual([lastScrollId]); }); it('calls clearScroll when there is an error iterating the hits', async function() { @@ -359,11 +347,13 @@ describe('CSV Execute Job', function() { conflictedTypesFields: undefined, searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot(`[TypeError: Cannot read property 'indexOf' of undefined]`); const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).to.be('clearScroll'); - expect(lastCall.args[2].scrollId).to.eql([lastScrollId]); + expect(lastCall.args[1]).toBe('clearScroll'); + expect(lastCall.args[2].scrollId).toEqual([lastScrollId]); }); }); @@ -393,7 +383,7 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(csvContainsFormulas).to.equal(true); + expect(csvContainsFormulas).toEqual(true); }); it('returns warnings when headings contain formulas', async function() { @@ -421,7 +411,7 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(csvContainsFormulas).to.equal(true); + expect(csvContainsFormulas).toEqual(true); }); it('returns no warnings when cells have no formulas', async function() { @@ -449,7 +439,7 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(csvContainsFormulas).to.equal(false); + expect(csvContainsFormulas).toEqual(false); }); it('returns no warnings when configured not to', async () => { @@ -477,7 +467,7 @@ describe('CSV Execute Job', function() { cancellationToken ); - expect(csvContainsFormulas).to.equal(false); + expect(csvContainsFormulas).toEqual(false); }); }); @@ -490,7 +480,9 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot(`[Error]`); }); it('should reject Promise if scroll call errors out', async function() { @@ -507,7 +499,9 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot(`[Error]`); }); }); @@ -526,7 +520,11 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot( + `[Error: Expected _scroll_id in the following Elasticsearch response: {"hits":{"hits":[{}]}}]` + ); }); it('should reject Promise if search returns no hits and no _scroll_id', async function() { @@ -543,7 +541,11 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot( + `[Error: Expected _scroll_id in the following Elasticsearch response: {"hits":{"hits":[]}}]` + ); }); it('should reject Promise if scroll returns hits but no _scroll_id', async function() { @@ -567,7 +569,11 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot( + `[Error: Expected _scroll_id in the following Elasticsearch response: {"hits":{"hits":[{}]}}]` + ); }); it('should reject Promise if scroll returns no hits and no _scroll_id', async function() { @@ -591,7 +597,11 @@ describe('CSV Execute Job', function() { fields: [], searchRequest: { index: null, body: null }, }; - await expectRejectedPromise(executeJob('job123', jobParams, cancellationToken)); + await expect( + executeJob('job123', jobParams, cancellationToken) + ).rejects.toMatchInlineSnapshot( + `[Error: Expected _scroll_id in the following Elasticsearch response: {"hits":{"hits":[]}}]` + ); }); }); @@ -626,7 +636,7 @@ describe('CSV Execute Job', function() { const callCount = callWithRequestStub.callCount; cancellationToken.cancel(); await delay(250); - expect(callWithRequestStub.callCount).to.be(callCount + 1); // last call is to clear the scroll + expect(callWithRequestStub.callCount).toBe(callCount + 1); // last call is to clear the scroll }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { @@ -655,8 +665,8 @@ describe('CSV Execute Job', function() { await delay(100); const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).to.be('clearScroll'); - expect(lastCall.args[2].scrollId).to.eql([scrollId]); + expect(lastCall.args[1]).toBe('clearScroll'); + expect(lastCall.args[2].scrollId).toEqual([scrollId]); }); }); @@ -669,7 +679,7 @@ describe('CSV Execute Job', function() { searchRequest: { index: null, body: null }, }; const { content } = await executeJob('job123', jobParams, cancellationToken); - expect(content).to.be(`one,two\n`); + expect(content).toBe(`one,two\n`); }); it('should use custom uiSettings csv:separator for header', async function() { @@ -681,7 +691,7 @@ describe('CSV Execute Job', function() { searchRequest: { index: null, body: null }, }; const { content } = await executeJob('job123', jobParams, cancellationToken); - expect(content).to.be(`one;two\n`); + expect(content).toBe(`one;two\n`); }); it('should escape column headers if uiSettings csv:quoteValues is true', async function() { @@ -693,7 +703,7 @@ describe('CSV Execute Job', function() { searchRequest: { index: null, body: null }, }; const { content } = await executeJob('job123', jobParams, cancellationToken); - expect(content).to.be(`"one and a half",two,"three-and-four","five & six"\n`); + expect(content).toBe(`"one and a half",two,"three-and-four","five & six"\n`); }); it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { @@ -705,7 +715,7 @@ describe('CSV Execute Job', function() { searchRequest: { index: null, body: null }, }; const { content } = await executeJob('job123', jobParams, cancellationToken); - expect(content).to.be(`one and a half,two,three-and-four,five & six\n`); + expect(content).toBe(`one and a half,two,three-and-four,five & six\n`); }); it('should write column headers to output, when there are results', async function() { @@ -725,7 +735,7 @@ describe('CSV Execute Job', function() { const { content } = await executeJob('job123', jobParams, cancellationToken); const lines = content.split('\n'); const headerLine = lines[0]; - expect(headerLine).to.be('one,two'); + expect(headerLine).toBe('one,two'); }); it('should use comma separated values of non-nested fields from _source', async function() { @@ -746,7 +756,7 @@ describe('CSV Execute Job', function() { const { content } = await executeJob('job123', jobParams, cancellationToken); const lines = content.split('\n'); const valuesLine = lines[1]; - expect(valuesLine).to.be('foo,bar'); + expect(valuesLine).toBe('foo,bar'); }); it('should concatenate the hits from multiple responses', async function() { @@ -773,8 +783,8 @@ describe('CSV Execute Job', function() { const { content } = await executeJob('job123', jobParams, cancellationToken); const lines = content.split('\n'); - expect(lines[1]).to.be('foo,bar'); - expect(lines[2]).to.be('baz,qux'); + expect(lines[1]).toBe('foo,bar'); + expect(lines[2]).toBe('baz,qux'); }); it('should use field formatters to format fields', async function() { @@ -804,7 +814,7 @@ describe('CSV Execute Job', function() { const { content } = await executeJob('job123', jobParams, cancellationToken); const lines = content.split('\n'); - expect(lines[1]).to.be('FOO,bar'); + expect(lines[1]).toBe('FOO,bar'); }); }); @@ -838,11 +848,11 @@ describe('CSV Execute Job', function() { }); it('should return max_size_reached', function() { - expect(maxSizeReached).to.be(true); + expect(maxSizeReached).toBe(true); }); it('should return empty content', function() { - expect(content).to.be(''); + expect(content).toBe(''); }); }); @@ -871,11 +881,11 @@ describe('CSV Execute Job', function() { }); it(`shouldn't return max_size_reached`, function() { - expect(maxSizeReached).to.be(false); + expect(maxSizeReached).toBe(false); }); it(`should return content`, function() { - expect(content).to.be('one,two\n'); + expect(content).toBe('one,two\n'); }); }); @@ -912,11 +922,11 @@ describe('CSV Execute Job', function() { }); it(`should return max_size_reached`, function() { - expect(maxSizeReached).to.be(true); + expect(maxSizeReached).toBe(true); }); it(`should return the headers in the content`, function() { - expect(content).to.be('one,two\n'); + expect(content).toBe('one,two\n'); }); }); @@ -953,11 +963,11 @@ describe('CSV Execute Job', function() { }); it(`shouldn't return max_size_reached`, async function() { - expect(maxSizeReached).to.be(false); + expect(maxSizeReached).toBe(false); }); it('should return headers and data in content', function() { - expect(content).to.be('one,two\nfoo,bar\n'); + expect(content).toBe('one,two\nfoo,bar\n'); }); }); }); @@ -988,8 +998,8 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).to.be('search'); - expect(searchCall.args[2].scroll).to.be(scrollDuration); + expect(searchCall.args[1]).toBe('search'); + expect(searchCall.args[2].scroll).toBe(scrollDuration); }); it('passes scroll size to initial search call', async function() { @@ -1017,8 +1027,8 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).to.be('search'); - expect(searchCall.args[2].size).to.be(scrollSize); + expect(searchCall.args[1]).toBe('search'); + expect(searchCall.args[2].size).toBe(scrollSize); }); it('passes scroll duration to subsequent scroll call', async function() { @@ -1046,8 +1056,8 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); const scrollCall = callWithRequestStub.secondCall; - expect(scrollCall.args[1]).to.be('scroll'); - expect(scrollCall.args[2].scroll).to.be(scrollDuration); + expect(scrollCall.args[1]).toBe('scroll'); + expect(scrollCall.args[2].scroll).toBe(scrollDuration); }); }); });