From 0f653f8d479575e378dcd2114bf9b9776a15a433 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Sun, 14 Feb 2021 14:53:38 -0700 Subject: [PATCH 01/11] [ml] move importer to file_upload plugin --- .../plugins/file_upload/common/constants.ts | 1 + .../plugins/file_upload/public/api/index.ts | 30 +++++++ .../public/components/json_import_progress.js | 4 +- .../public/get_file_upload_component.ts | 39 --------- .../public}/importer/importer.ts | 85 +++++++++++-------- .../public}/importer/importer_factory.ts | 13 +-- .../public}/importer/index.ts | 1 + .../public}/importer/message_importer.ts | 18 ++-- .../public}/importer/ndjson_importer.ts | 8 +- .../file_upload/public/importer/types.ts | 45 ++++++++++ x-pack/plugins/file_upload/public/index.ts | 6 +- .../file_upload/public/kibana_services.js | 21 ----- .../file_upload/public/kibana_services.ts | 20 +++++ .../public/lazy_load_bundle/index.ts | 46 ++++++++++ .../public/lazy_load_bundle/lazy/index.ts | 10 +++ x-pack/plugins/file_upload/public/plugin.ts | 39 ++++----- .../file_upload/public/util/http_service.js | 4 +- .../public/util/indexing_service.js | 6 +- x-pack/plugins/maps/public/plugin.ts | 4 +- .../components/import_view/import_view.js | 8 +- .../file_based/components/utils/utils.ts | 3 +- .../services/ml_api_service/datavisualizer.ts | 33 ------- .../application/util/dependency_cache.ts | 13 +++ x-pack/plugins/ml/public/plugin.ts | 3 + 24 files changed, 276 insertions(+), 184 deletions(-) create mode 100644 x-pack/plugins/file_upload/public/api/index.ts delete mode 100644 x-pack/plugins/file_upload/public/get_file_upload_component.ts rename x-pack/plugins/{ml/public/application/datavisualizer/file_based/components/import_view => file_upload/public}/importer/importer.ts (86%) rename x-pack/plugins/{ml/public/application/datavisualizer/file_based/components/import_view => file_upload/public}/importer/importer_factory.ts (67%) rename x-pack/plugins/{ml/public/application/datavisualizer/file_based/components/import_view => file_upload/public}/importer/index.ts (92%) rename x-pack/plugins/{ml/public/application/datavisualizer/file_based/components/import_view => file_upload/public}/importer/message_importer.ts (83%) rename x-pack/plugins/{ml/public/application/datavisualizer/file_based/components/import_view => file_upload/public}/importer/ndjson_importer.ts (83%) create mode 100644 x-pack/plugins/file_upload/public/importer/types.ts delete mode 100644 x-pack/plugins/file_upload/public/kibana_services.js create mode 100644 x-pack/plugins/file_upload/public/kibana_services.ts create mode 100644 x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts create mode 100644 x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts diff --git a/x-pack/plugins/file_upload/common/constants.ts b/x-pack/plugins/file_upload/common/constants.ts index 5744429c80d4d..11ad80f5c955e 100644 --- a/x-pack/plugins/file_upload/common/constants.ts +++ b/x-pack/plugins/file_upload/common/constants.ts @@ -5,6 +5,7 @@ * 2.0. */ +export const MB = Math.pow(2, 20); export const MAX_FILE_SIZE = '100MB'; export const MAX_FILE_SIZE_BYTES = 104857600; // 100MB diff --git a/x-pack/plugins/file_upload/public/api/index.ts b/x-pack/plugins/file_upload/public/api/index.ts new file mode 100644 index 0000000000000..359bc4a1687b5 --- /dev/null +++ b/x-pack/plugins/file_upload/public/api/index.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { FileUploadComponentProps, lazyLoadFileUploadModules } from '../lazy_load_bundle'; +import type { IImporter, ImportFactoryOptions } from '../importer'; + +export interface FileUploadStartApi { + getFileUploadComponent(): Promise>; + importerFactory(format: string, options: ImportFactoryOptions): Promise; +} + +export async function getFileUploadComponent(): Promise< + React.ComponentType +> { + const fileUploadModules = await lazyLoadFileUploadModules(); + return fileUploadModules.JsonUploadAndParse; +} + +export async function importerFactory( + format: string, + options: ImportFactoryOptions +): Promise { + const fileUploadModules = await lazyLoadFileUploadModules(); + return fileUploadModules.importerFactory(format, options); +} diff --git a/x-pack/plugins/file_upload/public/components/json_import_progress.js b/x-pack/plugins/file_upload/public/components/json_import_progress.js index 1f9293e77d33c..1adf7d9039e56 100644 --- a/x-pack/plugins/file_upload/public/components/json_import_progress.js +++ b/x-pack/plugins/file_upload/public/components/json_import_progress.js @@ -9,7 +9,7 @@ import React, { Fragment, Component } from 'react'; import { i18n } from '@kbn/i18n'; import { EuiCodeBlock, EuiSpacer, EuiText, EuiTitle, EuiProgress, EuiCallOut } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; -import { basePath } from '../kibana_services'; +import { getHttp } from '../kibana_services'; export class JsonImportProgress extends Component { state = { @@ -118,7 +118,7 @@ export class JsonImportProgress extends Component { {i18n.translate('xpack.fileUpload.jsonImport.indexMgmtLink', { defaultMessage: 'Index Management', diff --git a/x-pack/plugins/file_upload/public/get_file_upload_component.ts b/x-pack/plugins/file_upload/public/get_file_upload_component.ts deleted file mode 100644 index 0366b6c46832d..0000000000000 --- a/x-pack/plugins/file_upload/public/get_file_upload_component.ts +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { FeatureCollection } from 'geojson'; - -export interface FileUploadComponentProps { - appName: string; - isIndexingTriggered: boolean; - onFileUpload: (geojsonFile: FeatureCollection, name: string) => void; - onFileRemove: () => void; - onIndexReady: (indexReady: boolean) => void; - transformDetails: string; - onIndexingComplete: (indexResponses: { - indexDataResp: unknown; - indexPatternResp: unknown; - }) => void; -} - -let lazyLoadPromise: Promise>; - -export async function getFileUploadComponent(): Promise< - React.ComponentType -> { - if (typeof lazyLoadPromise !== 'undefined') { - return lazyLoadPromise; - } - - lazyLoadPromise = new Promise(async (resolve) => { - // @ts-expect-error - const { JsonUploadAndParse } = await import('./components/json_upload_and_parse'); - resolve(JsonUploadAndParse); - }); - return lazyLoadPromise; -} diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts similarity index 86% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts rename to x-pack/plugins/file_upload/public/importer/importer.ts index 518d3808b2da2..1c85797bddebb 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -8,7 +8,7 @@ import { chunk } from 'lodash'; import moment from 'moment'; import { i18n } from '@kbn/i18n'; -import { ml } from '../../../../../services/ml_api_service'; +import { getHttp } from '../kibana_services'; import { ImportDoc, ImportFailure, @@ -16,34 +16,16 @@ import { Mappings, Settings, IngestPipeline, -} from '../../../../../../../../file_upload/common'; + MB, +} from '../../common'; +import { CreateDocsResponse, IImporter, ImportConfig, ImportResults } from './types'; const CHUNK_SIZE = 5000; const MAX_CHUNK_CHAR_COUNT = 1000000; const IMPORT_RETRIES = 5; const STRING_CHUNKS_MB = 100; -export interface ImportConfig { - settings: Settings; - mappings: Mappings; - pipeline: IngestPipeline; -} - -export interface ImportResults { - success: boolean; - failures?: any[]; - docCount?: number; - error?: any; -} - -export interface CreateDocsResponse { - success: boolean; - remainder: number; - docs: ImportDoc[]; - error?: any; -} - -export abstract class Importer { +export abstract class Importer implements IImporter { private _settings: Settings; private _mappings: Mappings; private _pipeline: IngestPipeline; @@ -58,7 +40,7 @@ export abstract class Importer { public read(data: ArrayBuffer) { const decoder = new TextDecoder(); - const size = STRING_CHUNKS_MB * Math.pow(2, 20); + const size = STRING_CHUNKS_MB * MB; // chop the data up into 100MB chunks for processing. // if the chop produces a partial line at the end, a character "remainder" count @@ -98,7 +80,7 @@ export abstract class Importer { } : {}; - const createIndexResp = await ml.fileDatavisualizer.import({ + const createIndexResp = await callImportRoute({ id: undefined, index, data: [], @@ -139,15 +121,6 @@ export abstract class Importer { let error; for (let i = 0; i < chunks.length; i++) { - const aggs = { - id, - index, - data: chunks[i], - settings: {}, - mappings: {}, - ingestPipeline, - }; - let retries = IMPORT_RETRIES; let resp: ImportResponse = { success: false, @@ -160,7 +133,14 @@ export abstract class Importer { while (resp.success === false && retries > 0) { try { - resp = await ml.fileDatavisualizer.import(aggs); + resp = await callImportRoute({ + id, + index, + data: chunks[i], + settings: {}, + mappings: {}, + ingestPipeline, + }); if (retries < IMPORT_RETRIES) { // eslint-disable-next-line no-console @@ -264,3 +244,38 @@ function createDocumentChunks(docArray: ImportDoc[]) { } return chunks; } + +function callImportRoute({ + id, + index, + data, + settings, + mappings, + ingestPipeline, +}: { + id: string | undefined; + index: string; + data: ImportDoc[]; + settings: Settings | unknown; + mappings: Mappings | unknown; + ingestPipeline: { + id?: string; + pipeline?: IngestPipeline; + }; +}) { + const query = id !== undefined ? { id } : {}; + const body = JSON.stringify({ + index, + data, + settings, + mappings, + ingestPipeline, + }); + + return getHttp().fetch({ + path: `/api/file_upload/import`, + method: 'POST', + query, + body, + }); +} diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts b/x-pack/plugins/file_upload/public/importer/importer_factory.ts similarity index 67% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts rename to x-pack/plugins/file_upload/public/importer/importer_factory.ts index 6646f967825fb..0690b66a4582b 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/importer_factory.ts +++ b/x-pack/plugins/file_upload/public/importer/importer_factory.ts @@ -7,14 +7,9 @@ import { MessageImporter } from './message_importer'; import { NdjsonImporter } from './ndjson_importer'; -import { ImportConfig } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; +import { ImportFactoryOptions } from './types'; -export function importerFactory( - format: string, - results: FindFileStructureResponse, - settings: ImportConfig -) { +export function importerFactory(format: string, options: ImportFactoryOptions) { switch (format) { // delimited and semi-structured text are both handled by splitting the // file into messages, then sending these to ES for further processing @@ -22,9 +17,9 @@ export function importerFactory( // field (like Filebeat does) case 'delimited': case 'semi_structured_text': - return new MessageImporter(results, settings); + return new MessageImporter(options); case 'ndjson': - return new NdjsonImporter(results, settings); + return new NdjsonImporter(options); default: return; } diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts b/x-pack/plugins/file_upload/public/importer/index.ts similarity index 92% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts rename to x-pack/plugins/file_upload/public/importer/index.ts index 6d33a0eeb5ab3..face822f91efb 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/index.ts +++ b/x-pack/plugins/file_upload/public/importer/index.ts @@ -6,3 +6,4 @@ */ export { importerFactory } from './importer_factory'; +export * from './types'; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts b/x-pack/plugins/file_upload/public/importer/message_importer.ts similarity index 83% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts rename to x-pack/plugins/file_upload/public/importer/message_importer.ts index 8692e2b9cecd2..ae72f5f1368f7 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/message_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/message_importer.ts @@ -5,25 +5,23 @@ * 2.0. */ -import { Importer, ImportConfig, CreateDocsResponse } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; -import { Doc } from '../../../../../../../../file_upload/common'; +import { Importer } from './importer'; +import { Doc } from '../../common'; +import { CreateDocsResponse, ImportFactoryOptions } from './types'; export class MessageImporter extends Importer { private _excludeLinesRegex: RegExp | null; private _multilineStartRegex: RegExp | null; - constructor(results: FindFileStructureResponse, settings: ImportConfig) { - super(settings); + constructor(options: ImportFactoryOptions) { + super(options.importConfig); this._excludeLinesRegex = - results.exclude_lines_pattern === undefined - ? null - : new RegExp(results.exclude_lines_pattern); + options.excludeLinesPattern === undefined ? null : new RegExp(options.excludeLinesPattern); this._multilineStartRegex = - results.multiline_start_pattern === undefined + options.multilineStartPattern === undefined ? null - : new RegExp(results.multiline_start_pattern); + : new RegExp(options.multilineStartPattern); } // split the text into an array of lines by looking for newlines. diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts similarity index 83% rename from x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts rename to x-pack/plugins/file_upload/public/importer/ndjson_importer.ts index 661f3f9179e49..8f0b6e39157f0 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/importer/ndjson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts @@ -5,12 +5,12 @@ * 2.0. */ -import { Importer, ImportConfig, CreateDocsResponse } from './importer'; -import { FindFileStructureResponse } from '../../../../../../../common/types/file_datavisualizer'; +import { Importer } from './importer'; +import { CreateDocsResponse, ImportFactoryOptions } from './types'; export class NdjsonImporter extends Importer { - constructor(results: FindFileStructureResponse, settings: ImportConfig) { - super(settings); + constructor(options: ImportFactoryOptions) { + super(options.importConfig); } protected _createDocs(json: string): CreateDocsResponse { diff --git a/x-pack/plugins/file_upload/public/importer/types.ts b/x-pack/plugins/file_upload/public/importer/types.ts new file mode 100644 index 0000000000000..81324f02b68cc --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/types.ts @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { ImportDoc, Mappings, Settings, IngestPipeline } from '../../common'; + +export interface ImportConfig { + settings: Settings; + mappings: Mappings; + pipeline: IngestPipeline; +} + +export interface ImportResults { + success: boolean; + failures?: any[]; + docCount?: number; + error?: any; +} + +export interface CreateDocsResponse { + success: boolean; + remainder: number; + docs: ImportDoc[]; + error?: any; +} + +export interface ImportFactoryOptions { + excludeLinesPattern?: string; + multilineStartPattern?: string; + importConfig: ImportConfig; +} + +export interface IImporter { + read(data: ArrayBuffer): { success: boolean }; + initializeImport(index: string): Promise; + import( + id: string, + index: string, + pipelineId: string, + setImportProgress: (progress: number) => void + ): Promise; +} diff --git a/x-pack/plugins/file_upload/public/index.ts b/x-pack/plugins/file_upload/public/index.ts index efabc984e0220..0c81779130d87 100644 --- a/x-pack/plugins/file_upload/public/index.ts +++ b/x-pack/plugins/file_upload/public/index.ts @@ -13,5 +13,7 @@ export function plugin() { export * from '../common'; -export { StartContract } from './plugin'; -export { FileUploadComponentProps } from './get_file_upload_component'; +export * from './importer/types'; + +export { FileUploadPluginStart } from './plugin'; +export { FileUploadComponentProps } from './lazy_load_bundle'; diff --git a/x-pack/plugins/file_upload/public/kibana_services.js b/x-pack/plugins/file_upload/public/kibana_services.js deleted file mode 100644 index 88e1b16eb062a..0000000000000 --- a/x-pack/plugins/file_upload/public/kibana_services.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export let indexPatternService; -export let savedObjectsClient; -export let basePath; -export let kbnFetch; - -export const setupInitServicesAndConstants = ({ http }) => { - basePath = http.basePath.basePath; - kbnFetch = http.fetch; -}; - -export const startInitServicesAndConstants = ({ savedObjects }, { data }) => { - indexPatternService = data.indexPatterns; - savedObjectsClient = savedObjects.client; -}; diff --git a/x-pack/plugins/file_upload/public/kibana_services.ts b/x-pack/plugins/file_upload/public/kibana_services.ts new file mode 100644 index 0000000000000..c007c5c2273a8 --- /dev/null +++ b/x-pack/plugins/file_upload/public/kibana_services.ts @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { CoreStart } from 'kibana/public'; +import { FileUploadStartDependencies } from './plugin'; + +let coreStart: CoreStart; +let pluginsStart: FileUploadStartDependencies; +export function setStartServices(core: CoreStart, plugins: FileUploadStartDependencies) { + coreStart = core; + pluginsStart = plugins; +} + +export const getIndexPatternService = () => pluginsStart.data.indexPatterns; +export const getHttp = () => coreStart.http; +export const getSavedObjectsClient = () => coreStart.savedObjects.client; diff --git a/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts new file mode 100644 index 0000000000000..5ed0887d1375d --- /dev/null +++ b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; +import { FeatureCollection } from 'geojson'; +import { IImporter, ImportFactoryOptions } from '../importer'; + +export interface FileUploadComponentProps { + appName: string; + isIndexingTriggered: boolean; + onFileUpload: (geojsonFile: FeatureCollection, name: string) => void; + onFileRemove: () => void; + onIndexReady: (indexReady: boolean) => void; + transformDetails: string; + onIndexingComplete: (indexResponses: { + indexDataResp: unknown; + indexPatternResp: unknown; + }) => void; +} + +let loadModulesPromise: Promise; + +interface LazyLoadedFileUploadModules { + JsonUploadAndParse: React.ComponentType; + importerFactory: (format: string, options: ImportFactoryOptions) => IImporter | undefined; +} + +export async function lazyLoadFileUploadModules(): Promise { + if (typeof loadModulesPromise !== 'undefined') { + return loadModulesPromise; + } + + loadModulesPromise = new Promise(async (resolve) => { + const { JsonUploadAndParse, importerFactory } = await import('./lazy'); + + resolve({ + JsonUploadAndParse, + importerFactory, + }); + }); + return loadModulesPromise; +} diff --git a/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts b/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts new file mode 100644 index 0000000000000..36df353f65d8c --- /dev/null +++ b/x-pack/plugins/file_upload/public/lazy_load_bundle/lazy/index.ts @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// @ts-expect-error +export { JsonUploadAndParse } from '../../components/json_upload_and_parse'; +export { importerFactory } from '../../importer'; diff --git a/x-pack/plugins/file_upload/public/plugin.ts b/x-pack/plugins/file_upload/public/plugin.ts index d66e249ce1173..5d3918193d48a 100644 --- a/x-pack/plugins/file_upload/public/plugin.ts +++ b/x-pack/plugins/file_upload/public/plugin.ts @@ -5,34 +5,35 @@ * 2.0. */ -import React from 'react'; -import { CoreSetup, CoreStart, Plugin } from 'kibana/server'; -import { FileUploadComponentProps, getFileUploadComponent } from './get_file_upload_component'; -// @ts-ignore -import { setupInitServicesAndConstants, startInitServicesAndConstants } from './kibana_services'; -import { IDataPluginServices } from '../../../../src/plugins/data/public'; +import { CoreStart, Plugin } from '../../../../src/core/public'; +import { FileUploadStartApi, getFileUploadComponent, importerFactory } from './api'; +import { setStartServices } from './kibana_services'; +import { DataPublicPluginStart } from '../../../../src/plugins/data/public'; // eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface SetupDependencies {} -export interface StartDependencies { - data: IDataPluginServices; +export interface FileUploadSetupDependencies {} +export interface FileUploadStartDependencies { + data: DataPublicPluginStart; } -export type SetupContract = ReturnType; -export interface StartContract { - getFileUploadComponent: () => Promise>; -} +export type FileUploadPluginSetup = ReturnType; +export type FileUploadPluginStart = ReturnType; export class FileUploadPlugin - implements Plugin { - public setup(core: CoreSetup, plugins: SetupDependencies) { - setupInitServicesAndConstants(core); - } + implements + Plugin< + FileUploadPluginSetup, + FileUploadPluginStart, + FileUploadSetupDependencies, + FileUploadStartDependencies + > { + public setup() {} - public start(core: CoreStart, plugins: StartDependencies) { - startInitServicesAndConstants(core, plugins); + public start(core: CoreStart, plugins: FileUploadStartDependencies): FileUploadStartApi { + setStartServices(core, plugins); return { getFileUploadComponent, + importerFactory, }; } } diff --git a/x-pack/plugins/file_upload/public/util/http_service.js b/x-pack/plugins/file_upload/public/util/http_service.js index c3c080ddce792..33afebc514c36 100644 --- a/x-pack/plugins/file_upload/public/util/http_service.js +++ b/x-pack/plugins/file_upload/public/util/http_service.js @@ -6,7 +6,7 @@ */ import { i18n } from '@kbn/i18n'; -import { kbnFetch } from '../kibana_services'; +import { getHttp } from '../kibana_services'; export async function http(options) { if (!(options && options.url)) { @@ -38,7 +38,7 @@ export async function http(options) { async function doFetch(url, payload) { try { - return await kbnFetch(url, payload); + return await getHttp().fetch(url, payload); } catch (err) { return { failures: [ diff --git a/x-pack/plugins/file_upload/public/util/indexing_service.js b/x-pack/plugins/file_upload/public/util/indexing_service.js index 253681dad6a7d..92b1afbf16936 100644 --- a/x-pack/plugins/file_upload/public/util/indexing_service.js +++ b/x-pack/plugins/file_upload/public/util/indexing_service.js @@ -6,7 +6,7 @@ */ import { http as httpService } from './http_service'; -import { indexPatternService, savedObjectsClient } from '../kibana_services'; +import { getIndexPatternService, getSavedObjectsClient } from '../kibana_services'; import { getGeoJsonIndexingDetails } from './geo_processing'; import { sizeLimitedChunking } from './size_limited_chunking'; import { i18n } from '@kbn/i18n'; @@ -187,7 +187,7 @@ async function chunkDataAndWriteToIndex({ id, index, data, mappings, settings }) export async function createIndexPattern(indexPatternName) { try { - const indexPattern = await indexPatternService.createAndSave( + const indexPattern = await getIndexPatternService().createAndSave( { title: indexPatternName, }, @@ -215,7 +215,7 @@ export const getExistingIndexNames = async () => { }; export const getExistingIndexPatternNames = async () => { - const indexPatterns = await savedObjectsClient + const indexPatterns = await getSavedObjectsClient() .find({ type: 'index-pattern', fields: ['id', 'title', 'type', 'fields'], diff --git a/x-pack/plugins/maps/public/plugin.ts b/x-pack/plugins/maps/public/plugin.ts index 12cff9edf55ff..d423acf67bcd4 100644 --- a/x-pack/plugins/maps/public/plugin.ts +++ b/x-pack/plugins/maps/public/plugin.ts @@ -54,7 +54,7 @@ import { EmbeddableStart } from '../../../../src/plugins/embeddable/public'; import { MapsLegacyConfig } from '../../../../src/plugins/maps_legacy/config'; import { DataPublicPluginStart } from '../../../../src/plugins/data/public'; import { LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public'; -import { StartContract as FileUploadStartContract } from '../../file_upload/public'; +import { FileUploadPluginStart } from '../../file_upload/public'; import { SavedObjectsStart } from '../../../../src/plugins/saved_objects/public'; import { PresentationUtilPluginStart } from '../../../../src/plugins/presentation_util/public'; import { @@ -80,7 +80,7 @@ export interface MapsPluginStartDependencies { charts: ChartsPluginStart; data: DataPublicPluginStart; embeddable: EmbeddableStart; - fileUpload: FileUploadStartContract; + fileUpload: FileUploadPluginStart; inspector: InspectorStartContract; licensing: LicensingPluginStart; navigation: NavigationPublicPluginStart; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js index e22cca2746f99..99cc91d438a0e 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js @@ -20,7 +20,7 @@ import { import { i18n } from '@kbn/i18n'; import { debounce } from 'lodash'; -import { importerFactory } from './importer'; +import { getFileUpload } from '../../../../util/dependency_cache'; import { ResultsLinks } from '../results_links'; import { FilebeatConfigFlyout } from '../filebeat_config_flyout'; import { ImportProgress, IMPORT_STATUS } from '../import_progress'; @@ -222,7 +222,11 @@ export class ImportView extends Component { } if (success) { - const importer = importerFactory(format, results, indexCreationSettings); + const importer = await getFileUpload().importerFactory(format, { + excludeLinesPattern: results.exclude_lines_pattern, + multilineStartPattern: results.multiline_start_pattern, + importConfig: indexCreationSettings, + }); if (importer !== undefined) { const readResp = importer.read(data, this.setReadProgress); success = readResp.success; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts index 4412390d62c1f..2c1b02b53354a 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/utils/utils.ts @@ -11,6 +11,7 @@ import numeral from '@elastic/numeral'; import { ml } from '../../../../services/ml_api_service'; import { AnalysisResult, InputOverrides } from '../../../../../../common/types/file_datavisualizer'; import { + MB, MAX_FILE_SIZE, MAX_FILE_SIZE_BYTES, ABSOLUTE_MAX_FILE_SIZE_BYTES, @@ -49,7 +50,7 @@ export function readFile(file: File) { if (data === null || typeof data === 'string') { return reject(); } - const size = UPLOAD_SIZE_MB * Math.pow(2, 20); + const size = UPLOAD_SIZE_MB * MB; const fileContents = decoder.decode(data.slice(0, size)); if (fileContents === '') { diff --git a/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts b/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts index ce647b5401b0b..98a0d7b9b0a94 100644 --- a/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts +++ b/x-pack/plugins/ml/public/application/services/ml_api_service/datavisualizer.ts @@ -8,7 +8,6 @@ import { http } from '../http_service'; import { basePath } from './index'; -import { ImportResponse } from '../../../../../file_upload/common'; export const fileDatavisualizer = { analyzeFile(file: string, params: Record = {}) { @@ -20,36 +19,4 @@ export const fileDatavisualizer = { query: params, }); }, - - import({ - id, - index, - data, - settings, - mappings, - ingestPipeline, - }: { - id: string | undefined; - index: string; - data: any; - settings: any; - mappings: any; - ingestPipeline: any; - }) { - const query = id !== undefined ? { id } : {}; - const body = JSON.stringify({ - index, - data, - settings, - mappings, - ingestPipeline, - }); - - return http({ - path: `/api/file_upload/import`, - method: 'POST', - query, - body, - }); - }, }; diff --git a/x-pack/plugins/ml/public/application/util/dependency_cache.ts b/x-pack/plugins/ml/public/application/util/dependency_cache.ts index 17e5c50c4b00c..3df1e0a504649 100644 --- a/x-pack/plugins/ml/public/application/util/dependency_cache.ts +++ b/x-pack/plugins/ml/public/application/util/dependency_cache.ts @@ -23,6 +23,7 @@ import type { IndexPatternsContract, DataPublicPluginStart } from 'src/plugins/d import type { SharePluginStart } from 'src/plugins/share/public'; import type { SecurityPluginSetup } from '../../../../security/public'; import type { MapsStartApi } from '../../../../maps/public'; +import type { FileUploadPluginStart } from '../../../../fileUpload/public'; export interface DependencyCache { timefilter: DataPublicPluginSetup['query']['timefilter'] | null; @@ -43,6 +44,7 @@ export interface DependencyCache { i18n: I18nStart | null; urlGenerators: SharePluginStart['urlGenerators'] | null; maps: MapsStartApi | null; + fileUpload: FileUploadPluginStart | null; } const cache: DependencyCache = { @@ -64,6 +66,7 @@ const cache: DependencyCache = { i18n: null, urlGenerators: null, maps: null, + fileUpload: null, }; export function setDependencyCache(deps: Partial) { @@ -84,6 +87,9 @@ export function setDependencyCache(deps: Partial) { cache.security = deps.security || null; cache.i18n = deps.i18n || null; cache.urlGenerators = deps.urlGenerators || null; + if (deps.fileUpload) { + cache.fileUpload = deps.fileUpload; + } } export function getTimefilter() { @@ -209,3 +215,10 @@ export function clearCache() { cache[k as keyof DependencyCache] = null; }); } + +export function getFileUpload() { + if (cache.fileUpload === null) { + throw new Error("fileUpload hasn't been initialized"); + } + return cache.fileUpload; +} diff --git a/x-pack/plugins/ml/public/plugin.ts b/x-pack/plugins/ml/public/plugin.ts index b4eb5a6d702b7..65f26e0c26f08 100644 --- a/x-pack/plugins/ml/public/plugin.ts +++ b/x-pack/plugins/ml/public/plugin.ts @@ -52,6 +52,7 @@ import { TriggersAndActionsUIPublicPluginStart, } from '../../triggers_actions_ui/public'; import { registerMlAlerts } from './alerting/register_ml_alerts'; +import { FileUploadPluginStart } from '../../file_upload/public'; export interface MlStartDependencies { data: DataPublicPluginStart; @@ -63,6 +64,7 @@ export interface MlStartDependencies { maps?: MapsStartApi; lens?: LensPublicStart; triggersActionsUi: TriggersAndActionsUIPublicPluginStart; + fileUpload: FileUploadPluginStart; } export interface MlSetupDependencies { @@ -189,6 +191,7 @@ export class MlPlugin implements Plugin { basePath: core.http.basePath, http: core.http, i18n: core.i18n, + fileUpload: deps.fileUpload, }); registerMlAlerts(deps.triggersActionsUi.alertTypeRegistry); return { From 2f4d211bb47ca97c533d5ca8e55e9d15e1ae2fc9 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Mon, 15 Feb 2021 15:45:48 -0700 Subject: [PATCH 02/11] move file_parse logic into GeoJsonImporter --- .../components/json_index_file_picker.js | 105 +++++------- .../components/json_upload_and_parse.js | 19 ++- .../public/importer/geojson_importer.ts | 126 +++++++++++++++ .../file_upload/public/importer/importer.ts | 20 +-- .../public/importer/importer_factory.ts | 2 +- .../public/importer/message_importer.ts | 2 +- .../public/importer/ndjson_importer.ts | 4 +- .../file_upload/public/util/file_parser.js | 152 ------------------ .../public/util/file_parser.test.js | 18 +-- .../components/import_view/import_view.js | 14 +- 10 files changed, 204 insertions(+), 258 deletions(-) create mode 100644 x-pack/plugins/file_upload/public/importer/geojson_importer.ts delete mode 100644 x-pack/plugins/file_upload/public/util/file_parser.js diff --git a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js index a92412ae9d697..bfc06d0fc7273 100644 --- a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js +++ b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js @@ -5,11 +5,11 @@ * 2.0. */ +import _ from 'lodash'; import React, { Fragment, Component } from 'react'; import { EuiFilePicker, EuiFormRow, EuiProgress } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import { i18n } from '@kbn/i18n'; -import { parseFile } from '../util/file_parser'; const MAX_FILE_SIZE = 52428800; const ACCEPTABLE_FILETYPES = ['json', 'geojson']; @@ -33,7 +33,7 @@ export class JsonIndexFilePicker extends Component { this._isMounted = false; } - getFileParseActive = () => this._isMounted && this.state.fileParseActive; + isFileParseActive = () => this._isMounted && this.state.fileParseActive; _fileHandler = (fileList) => { const fileArr = Array.from(fileList); @@ -61,36 +61,6 @@ export class JsonIndexFilePicker extends Component { ); }; - _checkFileSize = ({ size }) => { - const fileSizeValid = true; - try { - if (size > MAX_FILE_SIZE) { - const humanReadableSize = bytesToSize(size); - const humanReadableMaxSize = bytesToSize(MAX_FILE_SIZE); - throw new Error( - i18n.translate('xpack.fileUpload.jsonIndexFilePicker.acceptableFileSize', { - defaultMessage: 'File size {fileSize} exceeds max file size of {maxFileSize}', - values: { - fileSize: humanReadableSize, - maxFileSize: humanReadableMaxSize, - }, - }) - ); - } - } catch (error) { - this.setState({ - fileUploadError: i18n.translate('xpack.fileUpload.jsonIndexFilePicker.fileSizeError', { - defaultMessage: 'File size error: {errorMessage}', - values: { - errorMessage: error.message, - }, - }), - }); - return; - } - return fileSizeValid; - }; - _getFileNameAndCheckType({ name }) { let fileNameOnly; try { @@ -136,7 +106,7 @@ export class JsonIndexFilePicker extends Component { setFileProgress = ({ featuresProcessed, bytesProcessed, totalBytes }) => { const percentageProcessed = parseInt((100 * bytesProcessed) / totalBytes); - if (this.getFileParseActive()) { + if (this.isFileParseActive()) { this.setState({ featuresProcessed, percentageProcessed }); } }; @@ -148,42 +118,52 @@ export class JsonIndexFilePicker extends Component { setParsedFile, resetFileAndIndexSettings, onFileUpload, - transformDetails, setIndexName, } = this.props; - const fileSizeValid = this._checkFileSize(file); + if (file.size > MAX_FILE_SIZE) { + this.setState({ + fileUploadError: i18n.translate('xpack.fileUpload.jsonIndexFilePicker.acceptableFileSize', { + defaultMessage: 'File size {fileSize} exceeds maximum file size of {maxFileSize}', + values: { + fileSize: bytesToSize(file.size), + maxFileSize: bytesToSize(MAX_FILE_SIZE), + }, + }), + }); + resetFileAndIndexSettings(); + return; + } + const defaultIndexName = this._getFileNameAndCheckType(file); - if (!fileSizeValid || !defaultIndexName) { + if (!defaultIndexName) { resetFileAndIndexSettings(); return; } - // Parse file - const fileResult = await parseFile({ - file, - transformDetails, - onFileUpload, - setFileProgress: this.setFileProgress, - getFileParseActive: this.getFileParseActive, - }).catch((err) => { - if (this._isMounted) { - this.setState({ - fileParseActive: false, - percentageProcessed: 0, - featuresProcessed: 0, - fileUploadError: ( - - ), - }); - } - }); + const fileResult = await this.props.geojsonImporter + .readFile(file, this.setFileProgress, this.isFileParseActive) + .catch((err) => { + if (this._isMounted) { + this.setState({ + fileParseActive: false, + percentageProcessed: 0, + featuresProcessed: 0, + fileUploadError: ( + + ), + }); + resetFileAndIndexSettings(); + return; + } + }); + if (!this._isMounted) { return; } @@ -214,6 +194,9 @@ export class JsonIndexFilePicker extends Component { ), }); } + if (parsedGeojson) { + onFileUpload(parsedGeojson, _.get(parsedGeojson, 'name', file.name)); + } setIndexName(defaultIndexName); setFileRef(file); setParsedFile(parsedGeojson); diff --git a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js index 6a9d7ce74fe84..0ab60a7de436c 100644 --- a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js +++ b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js @@ -15,6 +15,7 @@ import { IndexSettings } from './index_settings'; import { JsonIndexFilePicker } from './json_index_file_picker'; import { JsonImportProgress } from './json_import_progress'; import _ from 'lodash'; +import { GeoJsonImporter } from '../importer/geojson_importer'; const INDEXING_STAGE = { INDEXING_STARTED: i18n.translate('xpack.fileUpload.jsonUploadAndParse.dataIndexingStarted', { @@ -63,6 +64,7 @@ export class JsonUploadAndParse extends Component { currentIndexingStage: INDEXING_STAGE.INDEXING_STARTED, indexDataResp: '', indexPatternResp: '', + geojsonImporter: new GeoJsonImporter(), }; componentDidMount() { @@ -264,7 +266,6 @@ export class JsonUploadAndParse extends Component { indexTypes, showImportProgress, } = this.state; - const { onFileUpload, transformDetails } = this.props; return ( @@ -282,15 +283,13 @@ export class JsonUploadAndParse extends Component { ) : ( this.setState({ indexName }), - setFileRef: (fileRef) => this.setState({ fileRef }), - setParsedFile: (parsedFile) => this.setState({ parsedFile }), - transformDetails, - resetFileAndIndexSettings: this._resetFileAndIndexSettings, - }} + onFileUpload={this.props.onFileUpload} + fileRef={fileRef} + setIndexName={(indexName) => this.setState({ indexName })} + setFileRef={(fileRef) => this.setState({ fileRef })} + setParsedFile={(parsedFile) => this.setState({ parsedFile })} + resetFileAndIndexSettings={this._resetFileAndIndexSettings} + geojsonImporter={this.state.geojsonImporter} /> void, + isFileParseActive: () => boolean + ) { + if (!file) { + throw new Error( + i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { + defaultMessage: 'Error, no file provided', + }) + ); + } + + const filePromise = new Promise(async (resolve, reject) => { + const batches = await loadInBatches(file, JSONLoader, { + json: { + jsonpaths: ['$.features'], + _rootObjectBatches: true, + }, + }); + + let featuresProcessed = 0; + const features: Feature[] = []; + const errors: string = []; + let boolGeometryErrs = false; + let parsedGeojson; + for await (const batch of batches) { + if (!isFileParseActive()) { + break; + } + + if (batch.batchType === 'root-object-batch-complete') { + if (featuresProcessed > 0) { + parsedGeojson = { ...batch.container, features }; + } else { + // Handle single feature geoJson + const cleanedSingleFeature = geoJsonCleanAndValidate(batch.container); + if (cleanedSingleFeature.geometry && cleanedSingleFeature.geometry.type) { + parsedGeojson = cleanedSingleFeature; + featuresProcessed++; + } + } + } else { + for (const feature of batch.data) { + if (!feature.geometry || !feature.geometry.type) { + if (!boolGeometryErrs) { + boolGeometryErrs = true; + errors.push( + new Error( + i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { + defaultMessage: 'Some features without geometry omitted', + }) + ) + ); + } + } else { + const cleanFeature = geoJsonCleanAndValidate(feature); + features.push(cleanFeature); + featuresProcessed++; + } + } + } + + setFileProgress({ + featuresProcessed, + bytesProcessed: batch.bytesUsed, + totalBytes: file.size, + }); + } + + if (!isFileParseActive()) { + resolve(null); + return; + } + + if (featuresProcessed === 0) { + reject( + new Error( + i18n.translate('xpack.fileUpload.fileParser.noFeaturesDetected', { + defaultMessage: 'Error, no features detected', + }) + ) + ); + } else { + resolve({ + errors, + parsedGeojson, + }); + } + }); + + return filePromise; + } +} diff --git a/x-pack/plugins/file_upload/public/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts index 1c85797bddebb..2ec8fb5ed7e1c 100644 --- a/x-pack/plugins/file_upload/public/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -26,18 +26,8 @@ const IMPORT_RETRIES = 5; const STRING_CHUNKS_MB = 100; export abstract class Importer implements IImporter { - private _settings: Settings; - private _mappings: Mappings; - private _pipeline: IngestPipeline; - protected _docArray: ImportDoc[] = []; - constructor({ settings, mappings, pipeline }: ImportConfig) { - this._settings = settings; - this._mappings = mappings; - this._pipeline = pipeline; - } - public read(data: ArrayBuffer) { const decoder = new TextDecoder(); const size = STRING_CHUNKS_MB * MB; @@ -64,10 +54,12 @@ export abstract class Importer implements IImporter { protected abstract _createDocs(t: string): CreateDocsResponse; - public async initializeImport(index: string) { - const settings = this._settings; - const mappings = this._mappings; - const pipeline = this._pipeline; + public async initializeImport( + index: string, + settings: Settings, + mappings: Mappings, + pipeline: IngestPipeline + ) { updatePipelineTimezone(pipeline); // if no pipeline has been supplied, diff --git a/x-pack/plugins/file_upload/public/importer/importer_factory.ts b/x-pack/plugins/file_upload/public/importer/importer_factory.ts index 0690b66a4582b..8d9432c697fe1 100644 --- a/x-pack/plugins/file_upload/public/importer/importer_factory.ts +++ b/x-pack/plugins/file_upload/public/importer/importer_factory.ts @@ -19,7 +19,7 @@ export function importerFactory(format: string, options: ImportFactoryOptions) { case 'semi_structured_text': return new MessageImporter(options); case 'ndjson': - return new NdjsonImporter(options); + return new NdjsonImporter(); default: return; } diff --git a/x-pack/plugins/file_upload/public/importer/message_importer.ts b/x-pack/plugins/file_upload/public/importer/message_importer.ts index ae72f5f1368f7..f3855340f87fa 100644 --- a/x-pack/plugins/file_upload/public/importer/message_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/message_importer.ts @@ -14,7 +14,7 @@ export class MessageImporter extends Importer { private _multilineStartRegex: RegExp | null; constructor(options: ImportFactoryOptions) { - super(options.importConfig); + super(); this._excludeLinesRegex = options.excludeLinesPattern === undefined ? null : new RegExp(options.excludeLinesPattern); diff --git a/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts index 8f0b6e39157f0..552cd0783c0d8 100644 --- a/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts @@ -9,8 +9,8 @@ import { Importer } from './importer'; import { CreateDocsResponse, ImportFactoryOptions } from './types'; export class NdjsonImporter extends Importer { - constructor(options: ImportFactoryOptions) { - super(options.importConfig); + constructor() { + super(); } protected _createDocs(json: string): CreateDocsResponse { diff --git a/x-pack/plugins/file_upload/public/util/file_parser.js b/x-pack/plugins/file_upload/public/util/file_parser.js deleted file mode 100644 index 7488533bd6345..0000000000000 --- a/x-pack/plugins/file_upload/public/util/file_parser.js +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import _ from 'lodash'; -import { geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; -import { i18n } from '@kbn/i18n'; -import { JSONLoader } from '@loaders.gl/json'; -import { loadInBatches } from '@loaders.gl/core'; - -export const fileHandler = async ({ - file, - setFileProgress, - cleanAndValidate, - getFileParseActive, -}) => { - const filePromise = new Promise(async (resolve, reject) => { - if (!file) { - reject( - new Error( - i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { - defaultMessage: 'Error, no file provided', - }) - ) - ); - return; - } - - const batches = await loadInBatches(file, JSONLoader, { - json: { - jsonpaths: ['$.features'], - _rootObjectBatches: true, - }, - }); - - let featuresProcessed = 0; - const features = []; - const errors = []; - let boolGeometryErrs = false; - let parsedGeojson; - for await (const batch of batches) { - if (getFileParseActive()) { - switch (batch.batchType) { - case 'root-object-batch-complete': - if (!getFileParseActive()) { - resolve(null); - return; - } - if (featuresProcessed) { - parsedGeojson = { ...batch.container, features }; - } else { - // Handle single feature geoJson - const cleanedSingleFeature = cleanAndValidate(batch.container); - if (cleanedSingleFeature.geometry && cleanedSingleFeature.geometry.type) { - parsedGeojson = cleanedSingleFeature; - featuresProcessed++; - } - } - break; - default: - for (const feature of batch.data) { - if (!feature.geometry || !feature.geometry.type) { - if (!boolGeometryErrs) { - boolGeometryErrs = true; - errors.push( - new Error( - i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { - defaultMessage: 'Some features without geometry omitted', - }) - ) - ); - } - } else { - const cleanFeature = cleanAndValidate(feature); - features.push(cleanFeature); - featuresProcessed++; - } - } - } - setFileProgress({ - featuresProcessed, - bytesProcessed: batch.bytesUsed, - totalBytes: file.size, - }); - } else { - break; - } - } - - if (!featuresProcessed && getFileParseActive()) { - reject( - new Error( - i18n.translate('xpack.fileUpload.fileParser.noFeaturesDetected', { - defaultMessage: 'Error, no features detected', - }) - ) - ); - } else if (!getFileParseActive()) { - resolve(null); - } else { - resolve({ - errors, - parsedGeojson, - }); - } - }); - - return filePromise; -}; - -export function jsonPreview(fileResults, previewFunction) { - if (fileResults && fileResults.parsedGeojson && previewFunction) { - const defaultName = _.get(fileResults.parsedGeojson, 'name', 'Import File'); - previewFunction(fileResults.parsedGeojson, defaultName); - } -} - -export async function parseFile({ - file, - transformDetails, - onFileUpload: previewCallback = null, - setFileProgress, - getFileParseActive, -}) { - let cleanAndValidate; - if (typeof transformDetails === 'object') { - cleanAndValidate = transformDetails.cleanAndValidate; - } else { - switch (transformDetails) { - case 'geo': - cleanAndValidate = geoJsonCleanAndValidate; - break; - default: - throw i18n.translate('xpack.fileUpload.fileParser.transformDetailsNotDefined', { - defaultMessage: 'Index options for {transformDetails} not defined', - values: { transformDetails }, - }); - } - } - - const fileResults = await fileHandler({ - file, - setFileProgress, - cleanAndValidate, - getFileParseActive, - }); - jsonPreview(fileResults, previewCallback); - return fileResults; -} diff --git a/x-pack/plugins/file_upload/public/util/file_parser.test.js b/x-pack/plugins/file_upload/public/util/file_parser.test.js index 876cec9a7cc65..382df3df2a79a 100644 --- a/x-pack/plugins/file_upload/public/util/file_parser.test.js +++ b/x-pack/plugins/file_upload/public/util/file_parser.test.js @@ -32,7 +32,7 @@ const getFileRef = (geoJsonObj = testJson) => { return new File([fileContent], 'test.json', { type: 'text/json' }); }; -const getFileParseActiveFactory = (boolActive = true) => { +const isFileParseActiveFactory = (boolActive = true) => { return jest.fn(() => boolActive); }; @@ -52,13 +52,13 @@ describe('parse file', () => { const fileRef = getFileRef(); // Cancel file parse - const getFileParseActive = getFileParseActiveFactory(false); + const isFileParseActive = isFileParseActiveFactory(false); const fileHandlerResult = await fileHandler({ file: fileRef, setFileProgress, cleanAndValidate, - getFileParseActive, + isFileParseActive, }); expect(fileHandlerResult).toBeNull(); @@ -66,12 +66,12 @@ describe('parse file', () => { it('should normally read single feature valid data', async () => { const fileRef = getFileRef(); - const getFileParseActive = getFileParseActiveFactory(); + const isFileParseActive = isFileParseActiveFactory(); const { errors } = await fileHandler({ file: fileRef, setFileProgress, cleanAndValidate: (x) => x, - getFileParseActive, + isFileParseActive, }); expect(setFileProgress.mock.calls.length).toEqual(1); @@ -91,12 +91,12 @@ describe('parse file', () => { }; const fileRef = getFileRef(testSinglePointJson); - const getFileParseActive = getFileParseActiveFactory(); + const isFileParseActive = isFileParseActiveFactory(); const { errors } = await fileHandler({ file: fileRef, setFileProgress, cleanAndValidate: (x) => x, - getFileParseActive, + isFileParseActive, }); expect(setFileProgress.mock.calls.length).toEqual(1); @@ -105,13 +105,13 @@ describe('parse file', () => { it('should throw if no valid features', async () => { const fileRef = getFileRef(); - const getFileParseActive = getFileParseActiveFactory(); + const isFileParseActive = isFileParseActiveFactory(); await fileHandler({ file: fileRef, setFileProgress, cleanAndValidate: () => ({ not: 'the correct content' }), // Simulate clean and validate fail - getFileParseActive, + isFileParseActive, }).catch((e) => { expect(e.message).toMatch('Error, no features detected'); }); diff --git a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js index 99cc91d438a0e..e19686aedd455 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js +++ b/x-pack/plugins/ml/public/application/datavisualizer/file_based/components/import_view/import_view.js @@ -187,15 +187,9 @@ export class ImportView extends Component { errors.push(`${parseError} ${error.message}`); } - const indexCreationSettings = { - settings, - mappings, - }; - try { if (createPipeline) { pipeline = JSON.parse(pipelineString); - indexCreationSettings.pipeline = pipeline; } } catch (error) { success = false; @@ -225,7 +219,6 @@ export class ImportView extends Component { const importer = await getFileUpload().importerFactory(format, { excludeLinesPattern: results.exclude_lines_pattern, multilineStartPattern: results.multiline_start_pattern, - importConfig: indexCreationSettings, }); if (importer !== undefined) { const readResp = importer.read(data, this.setReadProgress); @@ -241,7 +234,12 @@ export class ImportView extends Component { } if (success) { - const initializeImportResp = await importer.initializeImport(index); + const initializeImportResp = await importer.initializeImport( + index, + settings, + mappings, + pipeline + ); const indexCreated = initializeImportResp.index !== undefined; this.setState({ From 54bf38bd87f5d08f16cabe6bef9892ab32ed2d2b Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 07:26:30 -0700 Subject: [PATCH 03/11] move file_parser tests to geojson_importer tests --- .../geo_json_clean_and_validate.js | 0 .../geo_json_clean_and_validate.test.js | 0 .../geojson_importer/geojson_importer.test.js | 192 ++++++++++++++++++ .../geojson_importer.ts | 97 ++++----- .../public/importer/geojson_importer/index.ts | 8 + .../file_upload/public/importer/importer.ts | 2 +- .../public/importer/ndjson_importer.ts | 2 +- .../file_upload/public/importer/types.ts | 7 +- .../public/util/file_parser.test.js | 119 ----------- 9 files changed, 257 insertions(+), 170 deletions(-) rename x-pack/plugins/file_upload/public/{util => importer/geojson_importer}/geo_json_clean_and_validate.js (100%) rename x-pack/plugins/file_upload/public/{util => importer/geojson_importer}/geo_json_clean_and_validate.test.js (100%) create mode 100644 x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js rename x-pack/plugins/file_upload/public/importer/{ => geojson_importer}/geojson_importer.ts (52%) create mode 100644 x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts delete mode 100644 x-pack/plugins/file_upload/public/util/file_parser.test.js diff --git a/x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.js similarity index 100% rename from x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.js diff --git a/x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.test.js similarity index 100% rename from x-pack/plugins/file_upload/public/util/geo_json_clean_and_validate.test.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.test.js diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js new file mode 100644 index 0000000000000..243361c26e834 --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { GeoJsonImporter } from './geojson_importer'; +import '@loaders.gl/polyfills'; + +describe('readFile', () => { + const setFileProgress = jest.fn((a) => a); + const FEATURE_COLLECTION = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }, + ], + }; + const FILE_WITH_FEATURE_COLLECTION = new File( + [JSON.stringify(FEATURE_COLLECTION)], + 'testfile.json', + { type: 'text/json' } + ); + + beforeEach(() => { + jest.resetAllMocks(); + jest.restoreAllMocks(); + }); + + test('should throw error if no file provided', async () => { + const importer = new GeoJsonImporter(); + await importer + .readFile(null, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no file provided'); + }); + }); + + test('should abort if file parse is cancelled', async () => { + const importer = new GeoJsonImporter(); + + const results = await importer.readFile(FILE_WITH_FEATURE_COLLECTION, setFileProgress, () => { + return false; + }); + + expect(results).toBeNull(); + }); + + test('should read features from feature collection', async () => { + const importer = new GeoJsonImporter(); + const results = await importer.readFile(FILE_WITH_FEATURE_COLLECTION, setFileProgress, () => { + return true; + }); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: [], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should remove features without geometry', async () => { + const fileWithFeaturesWithoutGeometry = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }, + {}, + { geometry: {} }, + ], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + const results = await importer.readFile( + fileWithFeaturesWithoutGeometry, + setFileProgress, + () => { + return true; + } + ); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: ['2 features without geometry omitted'], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should read unwrapped feature', async () => { + const fileWithUnwrapedFeature = new File( + [ + JSON.stringify({ + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + const results = await importer.readFile(fileWithUnwrapedFeature, setFileProgress, () => { + return true; + }); + + expect(setFileProgress).toHaveBeenCalled(); + expect(results).toEqual({ + errors: [], + parsedGeojson: FEATURE_COLLECTION, + }); + }); + + test('should throw if no features', async () => { + const fileWithNoFeatures = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + await importer + .readFile(fileWithNoFeatures, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no features detected'); + }); + }); + + test('should throw if no features with geometry', async () => { + const fileWithFeaturesWithNoGeometry = new File( + [ + JSON.stringify({ + type: 'FeatureCollection', + features: [ + {}, + { + geometry: {}, + }, + ], + }), + ], + 'testfile.json', + { type: 'text/json' } + ); + + const importer = new GeoJsonImporter(); + await importer + .readFile(fileWithFeaturesWithNoGeometry, setFileProgress, () => { + return true; + }) + .catch((e) => { + expect(e.message).toMatch('Error, no features detected'); + }); + }); +}); diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts similarity index 52% rename from x-pack/plugins/file_upload/public/importer/geojson_importer.ts rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts index 16ab862ad471d..69e1db5aa3e5b 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts @@ -5,23 +5,29 @@ * 2.0. */ -import { Feature } from 'geojson'; +import { Feature, FeatureCollection } from 'geojson'; import { i18n } from '@kbn/i18n'; +// @ts-expect-error import { JSONLoader } from '@loaders.gl/json'; import { loadInBatches } from '@loaders.gl/core'; -import { ImportFactoryOptions } from './types'; -import { Importer } from './importer'; -import { geoJsonCleanAndValidate } from '../util/geo_json_clean_and_validate'; +import { CreateDocsResponse } from '../types'; +import { Importer } from '../importer'; +// @ts-expect-error +import { geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; export class GeoJsonImporter extends Importer { constructor() { super(); } - public read(data: ArrayBuffer) { + public read(data: ArrayBuffer): { success: boolean } { throw new Error('read(data: ArrayBuffer) not supported, use readFile instead.'); } + protected _createDocs(text: string): CreateDocsResponse { + throw new Error('_createDocs not implemented.'); + } + public async readFile( file: File, setFileProgress: ({ @@ -34,7 +40,7 @@ export class GeoJsonImporter extends Importer { totalBytes: number; }) => void, isFileParseActive: () => boolean - ) { + ): Promise<{ errors: string[]; parsedGeojson: FeatureCollection } | null> { if (!file) { throw new Error( i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { @@ -43,7 +49,7 @@ export class GeoJsonImporter extends Importer { ); } - const filePromise = new Promise(async (resolve, reject) => { + return new Promise(async (resolve, reject) => { const batches = await loadInBatches(file, JSONLoader, { json: { jsonpaths: ['$.features'], @@ -51,50 +57,23 @@ export class GeoJsonImporter extends Importer { }, }); - let featuresProcessed = 0; - const features: Feature[] = []; - const errors: string = []; - let boolGeometryErrs = false; - let parsedGeojson; + const rawFeatures: unknown[] = []; for await (const batch of batches) { if (!isFileParseActive()) { break; } if (batch.batchType === 'root-object-batch-complete') { - if (featuresProcessed > 0) { - parsedGeojson = { ...batch.container, features }; - } else { - // Handle single feature geoJson - const cleanedSingleFeature = geoJsonCleanAndValidate(batch.container); - if (cleanedSingleFeature.geometry && cleanedSingleFeature.geometry.type) { - parsedGeojson = cleanedSingleFeature; - featuresProcessed++; - } + // Handle single feature geoJson + if (rawFeatures.length === 0) { + rawFeatures.push(batch.container); } } else { - for (const feature of batch.data) { - if (!feature.geometry || !feature.geometry.type) { - if (!boolGeometryErrs) { - boolGeometryErrs = true; - errors.push( - new Error( - i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { - defaultMessage: 'Some features without geometry omitted', - }) - ) - ); - } - } else { - const cleanFeature = geoJsonCleanAndValidate(feature); - features.push(cleanFeature); - featuresProcessed++; - } - } + rawFeatures.push(...batch.data); } setFileProgress({ - featuresProcessed, + featuresProcessed: rawFeatures.length, bytesProcessed: batch.bytesUsed, totalBytes: file.size, }); @@ -105,7 +84,7 @@ export class GeoJsonImporter extends Importer { return; } - if (featuresProcessed === 0) { + if (rawFeatures.length === 0) { reject( new Error( i18n.translate('xpack.fileUpload.fileParser.noFeaturesDetected', { @@ -113,14 +92,36 @@ export class GeoJsonImporter extends Importer { }) ) ); - } else { - resolve({ - errors, - parsedGeojson, - }); + return; } - }); - return filePromise; + const features: Feature[] = []; + let invalidCount = 0; + for (let i = 0; i < rawFeatures.length; i++) { + const rawFeature = rawFeatures[i] as Feature; + if (!rawFeature.geometry || !rawFeature.geometry.type) { + invalidCount++; + } else { + features.push(geoJsonCleanAndValidate(rawFeature)); + } + } + + const errors: string[] = []; + if (invalidCount > 0) { + errors.push( + i18n.translate('xpack.fileUpload.fileParser.featuresOmitted', { + defaultMessage: '{invalidCount} features without geometry omitted', + values: { invalidCount }, + }) + ); + } + resolve({ + errors, + parsedGeojson: { + type: 'FeatureCollection', + features, + }, + }); + }); } } diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts new file mode 100644 index 0000000000000..9ffb84e603161 --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export { GeoJsonImporter } from './geojson_importer'; diff --git a/x-pack/plugins/file_upload/public/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts index 2ec8fb5ed7e1c..146d6f7f2b4a6 100644 --- a/x-pack/plugins/file_upload/public/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -18,7 +18,7 @@ import { IngestPipeline, MB, } from '../../common'; -import { CreateDocsResponse, IImporter, ImportConfig, ImportResults } from './types'; +import { CreateDocsResponse, IImporter, ImportResults } from './types'; const CHUNK_SIZE = 5000; const MAX_CHUNK_CHAR_COUNT = 1000000; diff --git a/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts index 552cd0783c0d8..7129a07440cf3 100644 --- a/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/ndjson_importer.ts @@ -6,7 +6,7 @@ */ import { Importer } from './importer'; -import { CreateDocsResponse, ImportFactoryOptions } from './types'; +import { CreateDocsResponse } from './types'; export class NdjsonImporter extends Importer { constructor() { diff --git a/x-pack/plugins/file_upload/public/importer/types.ts b/x-pack/plugins/file_upload/public/importer/types.ts index 81324f02b68cc..2654b7dc3bbe8 100644 --- a/x-pack/plugins/file_upload/public/importer/types.ts +++ b/x-pack/plugins/file_upload/public/importer/types.ts @@ -35,7 +35,12 @@ export interface ImportFactoryOptions { export interface IImporter { read(data: ArrayBuffer): { success: boolean }; - initializeImport(index: string): Promise; + initializeImport( + index: string, + settings: Settings, + mappings: Mappings, + pipeline: IngestPipeline + ): Promise; import( id: string, index: string, diff --git a/x-pack/plugins/file_upload/public/util/file_parser.test.js b/x-pack/plugins/file_upload/public/util/file_parser.test.js deleted file mode 100644 index 382df3df2a79a..0000000000000 --- a/x-pack/plugins/file_upload/public/util/file_parser.test.js +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { fileHandler } from './file_parser'; -import '@loaders.gl/polyfills'; - -const cleanAndValidate = jest.fn((a) => a); -const setFileProgress = jest.fn((a) => a); - -const testJson = { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - }, -}; - -const getFileRef = (geoJsonObj = testJson) => { - const fileContent = JSON.stringify(geoJsonObj); - return new File([fileContent], 'test.json', { type: 'text/json' }); -}; - -const isFileParseActiveFactory = (boolActive = true) => { - return jest.fn(() => boolActive); -}; - -describe('parse file', () => { - afterEach(() => { - jest.resetAllMocks(); - jest.restoreAllMocks(); - }); - - it('should reject and throw error if no file provided', async () => { - await fileHandler({ file: null }).catch((e) => { - expect(e.message).toMatch('Error, no file provided'); - }); - }); - - it('should abort and resolve to null if file parse cancelled', async () => { - const fileRef = getFileRef(); - - // Cancel file parse - const isFileParseActive = isFileParseActiveFactory(false); - - const fileHandlerResult = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate, - isFileParseActive, - }); - - expect(fileHandlerResult).toBeNull(); - }); - - it('should normally read single feature valid data', async () => { - const fileRef = getFileRef(); - const isFileParseActive = isFileParseActiveFactory(); - const { errors } = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: (x) => x, - isFileParseActive, - }); - - expect(setFileProgress.mock.calls.length).toEqual(1); - expect(errors.length).toEqual(0); - }); - - it('should normally read a valid single feature file', async () => { - const testSinglePointJson = { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [30, 10], - }, - properties: { - name: 'Point island', - }, - }; - - const fileRef = getFileRef(testSinglePointJson); - const isFileParseActive = isFileParseActiveFactory(); - const { errors } = await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: (x) => x, - isFileParseActive, - }); - - expect(setFileProgress.mock.calls.length).toEqual(1); - expect(errors.length).toEqual(0); - }); - - it('should throw if no valid features', async () => { - const fileRef = getFileRef(); - const isFileParseActive = isFileParseActiveFactory(); - - await fileHandler({ - file: fileRef, - setFileProgress, - cleanAndValidate: () => ({ not: 'the correct content' }), // Simulate clean and validate fail - isFileParseActive, - }).catch((e) => { - expect(e.message).toMatch('Error, no features detected'); - }); - }); -}); From 1fe21a11c2c8f2b645193175e48150df33d4eefb Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 07:32:26 -0700 Subject: [PATCH 04/11] rename geo_json_clean_and_validate to geojson_clean_and_validate --- ...json_clean_and_validate.js => geojson_clean_and_validate.js} | 0 ..._and_validate.test.js => geojson_clean_and_validate.test.js} | 2 +- .../public/importer/geojson_importer/geojson_importer.ts | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename x-pack/plugins/file_upload/public/importer/geojson_importer/{geo_json_clean_and_validate.js => geojson_clean_and_validate.js} (100%) rename x-pack/plugins/file_upload/public/importer/geojson_importer/{geo_json_clean_and_validate.test.js => geojson_clean_and_validate.test.js} (97%) diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.js similarity index 100% rename from x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.js diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js similarity index 97% rename from x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.test.js rename to x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js index 7203e50674c38..0f8d126251dfb 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer/geo_json_clean_and_validate.test.js +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_clean_and_validate.test.js @@ -5,7 +5,7 @@ * 2.0. */ -import { cleanGeometry, geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; +import { cleanGeometry, geoJsonCleanAndValidate } from './geojson_clean_and_validate'; import * as jsts from 'jsts'; describe('geo_json_clean_and_validate', () => { diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts index 69e1db5aa3e5b..f182383554d13 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts @@ -13,7 +13,7 @@ import { loadInBatches } from '@loaders.gl/core'; import { CreateDocsResponse } from '../types'; import { Importer } from '../importer'; // @ts-expect-error -import { geoJsonCleanAndValidate } from './geo_json_clean_and_validate'; +import { geoJsonCleanAndValidate } from './geojson_clean_and_validate'; export class GeoJsonImporter extends Importer { constructor() { From 914a97ed6be5054861d8ebeab5726ef506c25c1a Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 13:24:16 -0700 Subject: [PATCH 05/11] replace file_upload import with Importer.import --- x-pack/plugins/file_upload/common/types.ts | 2 +- .../components/json_index_file_picker.js | 15 +- .../components/json_upload_and_parse.js | 191 +++++++++-------- .../geojson_importer/geojson_importer.test.js | 63 ++++-- .../geojson_importer/geojson_importer.ts | 56 ++++- .../file_upload/public/importer/importer.ts | 4 +- .../file_upload/public/importer/types.ts | 4 +- .../file_upload/public/util/geo_processing.js | 78 ------- .../public/util/geo_processing.test.js | 149 ------------- .../public/util/indexing_service.js | 200 +----------------- .../public/util/size_limited_chunking.js | 37 ---- .../public/util/size_limited_chunking.test.js | 28 --- .../layers/file_upload_wizard/wizard.tsx | 40 ++-- 13 files changed, 230 insertions(+), 637 deletions(-) delete mode 100644 x-pack/plugins/file_upload/public/util/geo_processing.js delete mode 100644 x-pack/plugins/file_upload/public/util/geo_processing.test.js delete mode 100644 x-pack/plugins/file_upload/public/util/size_limited_chunking.js delete mode 100644 x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js diff --git a/x-pack/plugins/file_upload/common/types.ts b/x-pack/plugins/file_upload/common/types.ts index eac001fc02f15..c01e514f0f720 100644 --- a/x-pack/plugins/file_upload/common/types.ts +++ b/x-pack/plugins/file_upload/common/types.ts @@ -26,7 +26,7 @@ export interface Doc { message: string; } -export type ImportDoc = Doc | string; +export type ImportDoc = Doc | string | object; export interface Settings { pipeline?: string; diff --git a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js index bfc06d0fc7273..e8af6efa2c9bc 100644 --- a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js +++ b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js @@ -5,7 +5,6 @@ * 2.0. */ -import _ from 'lodash'; import React, { Fragment, Component } from 'react'; import { EuiFilePicker, EuiFormRow, EuiProgress } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; @@ -178,28 +177,24 @@ export class JsonIndexFilePicker extends Component { resetFileAndIndexSettings(); return; } - const { errors, parsedGeojson } = fileResult; - if (errors.length) { - // Set only the first error for now (since there's only one). - // TODO: Add handling in case of further errors - const error = errors[0]; + if (fileResult.errors.length) { this.setState({ fileUploadError: ( ), }); } - if (parsedGeojson) { - onFileUpload(parsedGeojson, _.get(parsedGeojson, 'name', file.name)); + if (fileResult.parsedGeojson) { + onFileUpload(fileResult.parsedGeojson, file.name); } setIndexName(defaultIndexName); setFileRef(file); - setParsedFile(parsedGeojson); + setParsedFile(fileResult); } render() { diff --git a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js index 0ab60a7de436c..0742f2fcb4706 100644 --- a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js +++ b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js @@ -9,13 +9,13 @@ import React, { Component, Fragment } from 'react'; import { i18n } from '@kbn/i18n'; import { EuiForm } from '@elastic/eui'; import PropTypes from 'prop-types'; -import { indexData, createIndexPattern } from '../util/indexing_service'; -import { getGeoIndexTypesForFeatures } from '../util/geo_processing'; import { IndexSettings } from './index_settings'; import { JsonIndexFilePicker } from './json_index_file_picker'; import { JsonImportProgress } from './json_import_progress'; import _ from 'lodash'; import { GeoJsonImporter } from '../importer/geojson_importer'; +import { ES_FIELD_TYPES } from '../../../../../src/plugins/data/public'; +import { getIndexPatternService } from '../kibana_services'; const INDEXING_STAGE = { INDEXING_STARTED: i18n.translate('xpack.fileUpload.jsonUploadAndParse.dataIndexingStarted', { @@ -89,11 +89,8 @@ export class JsonUploadAndParse extends Component { }); }; - componentDidUpdate(prevProps, prevState) { - if (!_.isEqual(prevState.parsedFile, this.state.parsedFile)) { - this._setIndexTypes({ ...this.state, ...this.props }); - } - this._setSelectedType(this.state); + componentDidUpdate() { + this._updateIndexType(); this._setIndexReady({ ...this.state, ...this.props }); this._indexData({ ...this.state, ...this.props }); if (this.props.isIndexingTriggered && !this.state.showImportProgress && this._isMounted) { @@ -101,11 +98,30 @@ export class JsonUploadAndParse extends Component { } } - _setSelectedType = ({ selectedIndexType, indexTypes }) => { - if (!selectedIndexType && indexTypes.length) { - this.setState({ selectedIndexType: indexTypes[0] }); + _updateIndexType() { + let nextIndexTypes = []; + if (this.state.parsedFile) { + nextIndexTypes = + this.state.parsedFile.geometryTypes.includes('Point') || + this.state.parsedFile.geometryTypes.includes('MultiPoint') + ? [ES_FIELD_TYPES.GEO_POINT, ES_FIELD_TYPES.GEO_SHAPE] + : [ES_FIELD_TYPES.GEO_SHAPE]; } - }; + if (!_.isEqual(nextIndexTypes, this.state.indexTypes)) { + this.setState({ indexTypes: nextIndexTypes }); + } + + if (!this.state.selectedIndexType && nextIndexTypes.length) { + // auto select index type + this.setState({ selectedIndexType: nextIndexTypes[0] }); + } else if ( + this.state.selectedIndexType && + !nextIndexTypes.includes(this.state.selectedIndexType) + ) { + // unselected indexType if selected type is not longer an option + this.setState({ selectedIndexType: null }); + } + } _setIndexReady = ({ parsedFile, @@ -133,14 +149,12 @@ export class JsonUploadAndParse extends Component { indexedFile, parsedFile, indexRequestInFlight, - transformDetails, indexName, - appName, selectedIndexType, isIndexingTriggered, isIndexReady, onIndexingComplete, - boolCreateIndexPattern, + onIndexingError, }) => { // Check index ready const filesAreEqual = _.isEqual(indexedFile, parsedFile); @@ -152,108 +166,108 @@ export class JsonUploadAndParse extends Component { currentIndexingStage: INDEXING_STAGE.WRITING_TO_INDEX, }); - // Index data - const indexDataResp = await indexData( - parsedFile, - transformDetails, + this.state.geojsonImporter.setDocs(parsedFile.parsedGeojson, selectedIndexType); + + // initialize import + const settings = { + number_of_shards: 1, + }; + const mappings = { + properties: { + coordinates: { + type: this.state.selectedIndexType, + }, + }, + }; + const ingestPipeline = {}; + const initializeImportResp = await this.state.geojsonImporter.initializeImport( indexName, - selectedIndexType, - appName + settings, + mappings, + ingestPipeline ); - if (!this._isMounted) { return; } - - // Index error - if (!indexDataResp.success) { + if (initializeImportResp.index === undefined || initializeImportResp.id === undefined) { this.setState({ - indexedFile: null, - indexDataResp, indexRequestInFlight: false, currentIndexingStage: INDEXING_STAGE.INDEXING_ERROR, }); this._resetFileAndIndexSettings(); - if (onIndexingComplete) { - onIndexingComplete({ indexDataResp }); - } + onIndexingError(); return; } - // Index data success. Update state & create index pattern - this.setState({ - indexDataResp, - indexedFile: parsedFile, - currentIndexingStage: INDEXING_STAGE.INDEXING_COMPLETE, - }); - let indexPatternResp; - if (boolCreateIndexPattern) { - indexPatternResp = await this._createIndexPattern(this.state); + // import file + const importResp = await this.state.geojsonImporter.import( + initializeImportResp.id, + indexName, + initializeImportResp.pipelineId, + () => {} + ); + if (!this._isMounted) { + return; } - - // Indexing complete, update state & callback (if any) - if (!this._isMounted || !indexPatternResp) { + if (!importResp.success) { + this.setState({ + indexDataResp: importResp, + indexRequestInFlight: false, + currentIndexingStage: INDEXING_STAGE.INDEXING_ERROR, + }); + this._resetFileAndIndexSettings(); + onIndexingError(); return; } this.setState({ - currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_COMPLETE, + indexDataResp: importResp, + indexedFile: parsedFile, + currentIndexingStage: INDEXING_STAGE.INDEXING_COMPLETE, }); - if (onIndexingComplete) { - onIndexingComplete({ - indexDataResp, - ...(boolCreateIndexPattern ? { indexPatternResp } : {}), - }); - } - }; - _createIndexPattern = async ({ indexName }) => { - if (!this._isMounted) { - return; - } + // create index pattern this.setState({ indexPatternRequestInFlight: true, currentIndexingStage: INDEXING_STAGE.CREATING_INDEX_PATTERN, }); - const indexPatternResp = await createIndexPattern(indexName); - + let indexPattern; + try { + indexPattern = await getIndexPatternService().createAndSave( + { + title: indexName, + }, + true + ); + } catch (error) { + if (this._isMounted) { + this.setState({ + indexPatternRequestInFlight: false, + currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_ERROR, + }); + this._resetFileAndIndexSettings(); + onIndexingError(); + } + return; + } if (!this._isMounted) { return; } this.setState({ - indexPatternResp, + indexPatternResp: { + success: true, + id: indexPattern.id, + fields: indexPattern.fields, + }, indexPatternRequestInFlight: false, }); + this.setState({ + currentIndexingStage: INDEXING_STAGE.INDEX_PATTERN_COMPLETE, + }); this._resetFileAndIndexSettings(); - - return indexPatternResp; - }; - - // This is mostly for geo. Some data have multiple valid index types that can - // be chosen from, such as 'geo_point' vs. 'geo_shape' for point data - _setIndexTypes = ({ transformDetails, parsedFile }) => { - if (parsedFile) { - // User-provided index types - if (typeof transformDetails === 'object') { - this.setState({ indexTypes: transformDetails.indexTypes }); - } else { - // Included index types - switch (transformDetails) { - case 'geo': - const featureTypes = _.uniq( - parsedFile.features - ? parsedFile.features.map(({ geometry }) => geometry.type) - : [parsedFile.geometry.type] - ); - this.setState({ - indexTypes: getGeoIndexTypesForFeatures(featureTypes), - }); - break; - default: - this.setState({ indexTypes: [] }); - return; - } - } - } + onIndexingComplete({ + indexDataResp: importResp, + indexPattern, + }); }; render() { @@ -308,16 +322,13 @@ export class JsonUploadAndParse extends Component { JsonUploadAndParse.defaultProps = { isIndexingTriggered: false, - boolCreateIndexPattern: true, }; JsonUploadAndParse.propTypes = { - appName: PropTypes.string, isIndexingTriggered: PropTypes.bool, - boolCreateIndexPattern: PropTypes.bool, - transformDetails: PropTypes.oneOfType([PropTypes.string, PropTypes.object]), onIndexReadyStatusChange: PropTypes.func, onIndexingComplete: PropTypes.func, + onIndexingError: PropTypes.func, onFileUpload: PropTypes.func, onFileRemove: PropTypes.func, }; diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js index 243361c26e834..e348686dc060a 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.test.js @@ -6,25 +6,28 @@ */ import { GeoJsonImporter } from './geojson_importer'; +import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/public'; import '@loaders.gl/polyfills'; +const FEATURE_COLLECTION = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + population: 200, + }, + geometry: { + type: 'Point', + coordinates: [-112.0372, 46.608058], + }, + }, + ], +}; + describe('readFile', () => { const setFileProgress = jest.fn((a) => a); - const FEATURE_COLLECTION = { - type: 'FeatureCollection', - features: [ - { - type: 'Feature', - properties: { - population: 200, - }, - geometry: { - type: 'Point', - coordinates: [-112.0372, 46.608058], - }, - }, - ], - }; + const FILE_WITH_FEATURE_COLLECTION = new File( [JSON.stringify(FEATURE_COLLECTION)], 'testfile.json', @@ -66,6 +69,7 @@ describe('readFile', () => { expect(setFileProgress).toHaveBeenCalled(); expect(results).toEqual({ errors: [], + geometryTypes: ['Point'], parsedGeojson: FEATURE_COLLECTION, }); }); @@ -107,6 +111,7 @@ describe('readFile', () => { expect(setFileProgress).toHaveBeenCalled(); expect(results).toEqual({ errors: ['2 features without geometry omitted'], + geometryTypes: ['Point'], parsedGeojson: FEATURE_COLLECTION, }); }); @@ -137,6 +142,7 @@ describe('readFile', () => { expect(setFileProgress).toHaveBeenCalled(); expect(results).toEqual({ errors: [], + geometryTypes: ['Point'], parsedGeojson: FEATURE_COLLECTION, }); }); @@ -190,3 +196,30 @@ describe('readFile', () => { }); }); }); + +describe('setDocs', () => { + test('should convert features to geo_point ES documents', () => { + const importer = new GeoJsonImporter(); + importer.setDocs(FEATURE_COLLECTION, ES_FIELD_TYPES.GEO_POINT); + expect(importer.getDocs()).toEqual([ + { + coordinates: [-112.0372, 46.608058], + population: 200, + }, + ]); + }); + + test('should convert features to geo_shape ES documents', () => { + const importer = new GeoJsonImporter(); + importer.setDocs(FEATURE_COLLECTION, ES_FIELD_TYPES.GEO_SHAPE); + expect(importer.getDocs()).toEqual([ + { + coordinates: { + type: 'point', + coordinates: [-112.0372, 46.608058], + }, + population: 200, + }, + ]); + }); +}); diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts index f182383554d13..c904bff94eba6 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts @@ -5,13 +5,23 @@ * 2.0. */ -import { Feature, FeatureCollection } from 'geojson'; +import { + Feature, + FeatureCollection, + Point, + MultiPoint, + LineString, + MultiLineString, + Polygon, + MultiPolygon, +} from 'geojson'; import { i18n } from '@kbn/i18n'; // @ts-expect-error import { JSONLoader } from '@loaders.gl/json'; import { loadInBatches } from '@loaders.gl/core'; import { CreateDocsResponse } from '../types'; import { Importer } from '../importer'; +import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/public'; // @ts-expect-error import { geoJsonCleanAndValidate } from './geojson_clean_and_validate'; @@ -28,6 +38,39 @@ export class GeoJsonImporter extends Importer { throw new Error('_createDocs not implemented.'); } + public getDocs() { + return this._docArray; + } + + public setDocs( + featureCollection: FeatureCollection, + geoFieldType: ES_FIELD_TYPES.GEO_POINT | ES_FIELD_TYPES.GEO_SHAPE + ) { + this._docArray = []; + for (let i = 0; i < featureCollection.features.length; i++) { + const feature = featureCollection.features[i]; + const geometry = feature.geometry as + | Point + | MultiPoint + | LineString + | MultiLineString + | Polygon + | MultiPolygon; + const coordinates = + geoFieldType === ES_FIELD_TYPES.GEO_SHAPE + ? { + type: geometry.type.toLowerCase(), + coordinates: geometry.coordinates, + } + : geometry.coordinates; + const properties = feature.properties ? feature.properties : {}; + this._docArray.push({ + coordinates, + ...properties, + }); + } + } + public async readFile( file: File, setFileProgress: ({ @@ -40,7 +83,11 @@ export class GeoJsonImporter extends Importer { totalBytes: number; }) => void, isFileParseActive: () => boolean - ): Promise<{ errors: string[]; parsedGeojson: FeatureCollection } | null> { + ): Promise<{ + errors: string[]; + geometryTypes: string[]; + parsedGeojson: FeatureCollection; + } | null> { if (!file) { throw new Error( i18n.translate('xpack.fileUpload.fileParser.noFileProvided', { @@ -96,12 +143,16 @@ export class GeoJsonImporter extends Importer { } const features: Feature[] = []; + const geometryTypesMap = new Map(); let invalidCount = 0; for (let i = 0; i < rawFeatures.length; i++) { const rawFeature = rawFeatures[i] as Feature; if (!rawFeature.geometry || !rawFeature.geometry.type) { invalidCount++; } else { + if (!geometryTypesMap.has(rawFeature.geometry.type)) { + geometryTypesMap.set(rawFeature.geometry.type, true); + } features.push(geoJsonCleanAndValidate(rawFeature)); } } @@ -117,6 +168,7 @@ export class GeoJsonImporter extends Importer { } resolve({ errors, + geometryTypes: Array.from(geometryTypesMap.keys()), parsedGeojson: { type: 'FeatureCollection', features, diff --git a/x-pack/plugins/file_upload/public/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts index 146d6f7f2b4a6..831dd557e14ca 100644 --- a/x-pack/plugins/file_upload/public/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -72,7 +72,7 @@ export abstract class Importer implements IImporter { } : {}; - const createIndexResp = await callImportRoute({ + return await callImportRoute({ id: undefined, index, data: [], @@ -80,8 +80,6 @@ export abstract class Importer implements IImporter { mappings, ingestPipeline, }); - - return createIndexResp; } public async import( diff --git a/x-pack/plugins/file_upload/public/importer/types.ts b/x-pack/plugins/file_upload/public/importer/types.ts index 2654b7dc3bbe8..a2baee6b1dcd0 100644 --- a/x-pack/plugins/file_upload/public/importer/types.ts +++ b/x-pack/plugins/file_upload/public/importer/types.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { ImportDoc, Mappings, Settings, IngestPipeline } from '../../common'; +import { IngestPipeline, ImportDoc, ImportResponse, Mappings, Settings } from '../../common'; export interface ImportConfig { settings: Settings; @@ -40,7 +40,7 @@ export interface IImporter { settings: Settings, mappings: Mappings, pipeline: IngestPipeline - ): Promise; + ): Promise; import( id: string, index: string, diff --git a/x-pack/plugins/file_upload/public/util/geo_processing.js b/x-pack/plugins/file_upload/public/util/geo_processing.js deleted file mode 100644 index c90c55c2b49ac..0000000000000 --- a/x-pack/plugins/file_upload/public/util/geo_processing.js +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import _ from 'lodash'; - -export const ES_GEO_FIELD_TYPE = { - GEO_POINT: 'geo_point', - GEO_SHAPE: 'geo_shape', -}; - -export function getGeoIndexTypesForFeatures(featureTypes) { - const hasNoFeatureType = !featureTypes || !featureTypes.length; - if (hasNoFeatureType) { - return []; - } - - const isPoint = featureTypes.includes('Point') || featureTypes.includes('MultiPoint'); - if (!isPoint) { - return [ES_GEO_FIELD_TYPE.GEO_SHAPE]; - } else if (isPoint && featureTypes.length === 1) { - return [ES_GEO_FIELD_TYPE.GEO_POINT, ES_GEO_FIELD_TYPE.GEO_SHAPE]; - } - return [ES_GEO_FIELD_TYPE.GEO_SHAPE]; -} - -// Reduces & flattens geojson to coordinates and properties (if any) -export function geoJsonToEs(parsedGeojson, datatype) { - if (!parsedGeojson) { - return []; - } - const features = parsedGeojson.type === 'Feature' ? [parsedGeojson] : parsedGeojson.features; - - if (datatype === ES_GEO_FIELD_TYPE.GEO_SHAPE) { - return features.reduce((accu, { geometry, properties }) => { - const { coordinates } = geometry; - accu.push({ - coordinates: { - type: geometry.type.toLowerCase(), - coordinates: coordinates, - }, - ...(!_.isEmpty(properties) ? { ...properties } : {}), - }); - return accu; - }, []); - } else if (datatype === ES_GEO_FIELD_TYPE.GEO_POINT) { - return features.reduce((accu, { geometry, properties }) => { - const { coordinates } = geometry; - accu.push({ - coordinates, - ...(!_.isEmpty(properties) ? { ...properties } : {}), - }); - return accu; - }, []); - } else { - return []; - } -} - -export function getGeoJsonIndexingDetails(parsedGeojson, dataType) { - return { - data: geoJsonToEs(parsedGeojson, dataType), - ingestPipeline: {}, - mappings: { - properties: { - coordinates: { - type: dataType, - }, - }, - }, - settings: { - number_of_shards: 1, - }, - }; -} diff --git a/x-pack/plugins/file_upload/public/util/geo_processing.test.js b/x-pack/plugins/file_upload/public/util/geo_processing.test.js deleted file mode 100644 index 37b665c0a3e16..0000000000000 --- a/x-pack/plugins/file_upload/public/util/geo_processing.test.js +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { ES_GEO_FIELD_TYPE, geoJsonToEs } from './geo_processing'; - -describe('geo_processing', () => { - describe('getGeoJsonToEs', () => { - const parsedPointFeature = { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [105.7, 18.9], - }, - properties: { - name: 'Dogeville', - }, - }; - - it('should convert point feature to flattened ES compatible feature', () => { - const esFeatureArr = geoJsonToEs(parsedPointFeature, ES_GEO_FIELD_TYPE.GEO_POINT); - expect(esFeatureArr).toEqual([ - { - coordinates: [105.7, 18.9], - name: 'Dogeville', - }, - ]); - }); - - it('should convert point feature collection to flattened ES compatible feature', () => { - const parsedPointFeatureCollection = { - type: 'FeatureCollection', - features: [ - { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [34.1, 15.3], - }, - properties: { - name: 'Meowsers City', - }, - }, - ], - }; - - const esFeatureArr = geoJsonToEs(parsedPointFeatureCollection, ES_GEO_FIELD_TYPE.GEO_POINT); - expect(esFeatureArr).toEqual([ - { - coordinates: [34.1, 15.3], - name: 'Meowsers City', - }, - ]); - }); - - it('should convert shape feature to flattened ES compatible feature', () => { - const parsedShapeFeature = { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - }, - properties: { - name: 'Whiskers City', - }, - }; - - const esFeatureArr = geoJsonToEs(parsedShapeFeature, ES_GEO_FIELD_TYPE.GEO_SHAPE); - expect(esFeatureArr).toEqual([ - { - coordinates: { - coordinates: [ - [ - [-104.05, 78.99], - [-87.22, 78.98], - [-86.58, 75.94], - [-104.03, 75.94], - [-104.05, 78.99], - ], - ], - type: 'polygon', - }, - name: 'Whiskers City', - }, - ]); - }); - - it('should convert shape feature collection to flattened ES compatible feature', () => { - const parsedShapeFeatureCollection = { - type: 'FeatureCollection', - features: [ - { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-104.05, 79.89], - [-87.22, 79.88], - [-86.58, 74.84], - [-104.03, 75.84], - [-104.05, 78.89], - ], - ], - }, - properties: { - name: 'Woof Crossing', - }, - }, - ], - }; - - const esFeatureArr = geoJsonToEs(parsedShapeFeatureCollection, ES_GEO_FIELD_TYPE.GEO_SHAPE); - expect(esFeatureArr).toEqual([ - { - coordinates: { - coordinates: [ - [ - [-104.05, 79.89], - [-87.22, 79.88], - [-86.58, 74.84], - [-104.03, 75.84], - [-104.05, 78.89], - ], - ], - type: 'polygon', - }, - name: 'Woof Crossing', - }, - ]); - }); - - it('should return an empty for an unhandled datatype', () => { - const esFeatureArr = geoJsonToEs(parsedPointFeature, 'different datatype'); - expect(esFeatureArr).toEqual([]); - }); - }); -}); diff --git a/x-pack/plugins/file_upload/public/util/indexing_service.js b/x-pack/plugins/file_upload/public/util/indexing_service.js index 92b1afbf16936..cb9bc9a2e1ce6 100644 --- a/x-pack/plugins/file_upload/public/util/indexing_service.js +++ b/x-pack/plugins/file_upload/public/util/indexing_service.js @@ -6,205 +6,7 @@ */ import { http as httpService } from './http_service'; -import { getIndexPatternService, getSavedObjectsClient } from '../kibana_services'; -import { getGeoJsonIndexingDetails } from './geo_processing'; -import { sizeLimitedChunking } from './size_limited_chunking'; -import { i18n } from '@kbn/i18n'; - -export async function indexData(parsedFile, transformDetails, indexName, dataType, appName) { - if (!parsedFile) { - throw i18n.translate('xpack.fileUpload.indexingService.noFileImported', { - defaultMessage: 'No file imported.', - }); - } - - // Perform any processing required on file prior to indexing - const transformResult = transformDataByFormatForIndexing(transformDetails, parsedFile, dataType); - if (!transformResult.success) { - throw i18n.translate('xpack.fileUpload.indexingService.transformResultError', { - defaultMessage: 'Error transforming data: {error}', - values: { error: transformResult.error }, - }); - } - - // Create new index - const { indexingDetails } = transformResult; - const createdIndex = await writeToIndex({ - appName, - ...indexingDetails, - id: undefined, - data: [], - index: indexName, - }); - const id = createdIndex && createdIndex.id; - try { - if (!id) { - throw i18n.translate('xpack.fileUpload.indexingService.errorCreatingIndex', { - defaultMessage: 'Error creating index', - }); - } - } catch (error) { - return { - error, - success: false, - }; - } - - // Write to index - const indexWriteResults = await chunkDataAndWriteToIndex({ - id, - index: indexName, - ...indexingDetails, - settings: {}, - mappings: {}, - }); - return indexWriteResults; -} - -function transformDataByFormatForIndexing(transform, parsedFile, dataType) { - let indexingDetails; - if (!transform) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noTransformDefined', { - defaultMessage: 'No transform defined', - }), - }; - } - if (typeof transform !== 'object') { - switch (transform) { - case 'geo': - indexingDetails = getGeoJsonIndexingDetails(parsedFile, dataType); - break; - default: - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noHandlingForTransform', { - defaultMessage: 'No handling defined for transform: {transform}', - values: { transform }, - }), - }; - } - } else { - // Custom transform - indexingDetails = transform.getIndexingDetails(parsedFile); - } - if (indexingDetails && indexingDetails.data && indexingDetails.data.length) { - return { - success: true, - indexingDetails, - }; - } else if (indexingDetails && indexingDetails.data) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.noIndexingDetailsForDatatype', { - defaultMessage: `No indexing details defined for datatype: {dataType}`, - values: { dataType }, - }), - }; - } else { - return { - success: false, - error: i18n.translate('xpack.fileUpload.indexingService.unknownTransformError', { - defaultMessage: 'Unknown error performing transform: {transform}', - values: { transform }, - }), - }; - } -} - -async function writeToIndex(indexingDetails) { - const query = indexingDetails.id ? { id: indexingDetails.id } : null; - const { index, data, settings, mappings, ingestPipeline } = indexingDetails; - - return await httpService({ - url: `/api/file_upload/import`, - method: 'POST', - ...(query ? { query } : {}), - data: { - index, - data, - settings, - mappings, - ingestPipeline, - }, - }); -} - -async function chunkDataAndWriteToIndex({ id, index, data, mappings, settings }) { - if (!index) { - return { - success: false, - error: i18n.translate('xpack.fileUpload.noIndexSuppliedErrorMessage', { - defaultMessage: 'No index provided.', - }), - }; - } - - const chunks = sizeLimitedChunking(data); - - let success = true; - let failures = []; - let error; - let docCount = 0; - - for (let i = 0; i < chunks.length; i++) { - const aggs = { - id, - index, - data: chunks[i], - settings, - mappings, - ingestPipeline: {}, // TODO: Support custom ingest pipelines - }; - - let resp = { - success: false, - failures: [], - docCount: 0, - }; - resp = await writeToIndex(aggs); - - failures = [...failures, ...resp.failures]; - if (resp.success) { - ({ success } = resp); - docCount = docCount + resp.docCount; - } else { - success = false; - error = resp.error; - docCount = 0; - break; - } - } - - return { - success, - failures, - docCount, - ...(error ? { error } : {}), - }; -} - -export async function createIndexPattern(indexPatternName) { - try { - const indexPattern = await getIndexPatternService().createAndSave( - { - title: indexPatternName, - }, - true - ); - return { - success: true, - id: indexPattern.id, - fields: indexPattern.fields, - }; - } catch (error) { - return { - success: false, - error, - }; - } -} +import { getSavedObjectsClient } from '../kibana_services'; export const getExistingIndexNames = async () => { const indexes = await httpService({ diff --git a/x-pack/plugins/file_upload/public/util/size_limited_chunking.js b/x-pack/plugins/file_upload/public/util/size_limited_chunking.js deleted file mode 100644 index 09d4e8ca8e3a2..0000000000000 --- a/x-pack/plugins/file_upload/public/util/size_limited_chunking.js +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -const MAX_BYTES = 31457280; - -// MAX_BYTES is a good guideline for splitting up posts, but this logic -// occasionally sizes chunks so closely to the limit, that the remaining content -// of a post (besides features) tips it over the max. Adding a 2MB buffer -// to ensure this doesn't happen -const CHUNK_BUFFER = 2097152; - -// Add data elements to chunk until limit is met -export function sizeLimitedChunking(dataArr, maxByteSize = MAX_BYTES - CHUNK_BUFFER) { - let chunkSize = 0; - - return dataArr.reduce( - (accu, el) => { - const featureByteSize = new Blob([JSON.stringify(el)], { type: 'application/json' }).size; - if (featureByteSize > maxByteSize) { - throw `Some features exceed maximum chunk size of ${maxByteSize}`; - } else if (chunkSize + featureByteSize < maxByteSize) { - const lastChunkRef = accu.length - 1; - chunkSize += featureByteSize; - accu[lastChunkRef].push(el); - } else { - chunkSize = featureByteSize; - accu.push([el]); - } - return accu; - }, - [[]] - ); -} diff --git a/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js b/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js deleted file mode 100644 index a87c7a93ad83a..0000000000000 --- a/x-pack/plugins/file_upload/public/util/size_limited_chunking.test.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { sizeLimitedChunking } from './size_limited_chunking'; - -describe('size_limited_chunking', () => { - // 1000 elements where element value === index - const testArr = Array.from(Array(1000), (_, x) => x); - - it('should limit each sub-array to the max chunk size', () => { - // Confirm valid geometry - const chunkLimit = 100; - const chunkedArr = sizeLimitedChunking(testArr, chunkLimit); - chunkedArr.forEach((sizeLimitedArr) => { - const arrByteSize = new Blob(sizeLimitedArr, { type: 'application/json' }).size; - - // Chunk size should be less than chunk limit - expect(arrByteSize).toBeLessThan(chunkLimit); - // # of arrays generated should be greater than original array length - // divided by chunk limit - expect(chunkedArr.length).toBeGreaterThanOrEqual(testArr.length / chunkLimit); - }); - }); -}); diff --git a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx index 44a22f1529f18..5502eafe19ff2 100644 --- a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx +++ b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx @@ -8,7 +8,7 @@ import React, { Component } from 'react'; import { FeatureCollection } from 'geojson'; import { EuiPanel } from '@elastic/eui'; -import { IFieldType } from 'src/plugins/data/public'; +import { IndexPattern, IFieldType } from 'src/plugins/data/public'; import { ES_GEO_FIELD_TYPE, DEFAULT_MAX_RESULT_WINDOW, @@ -91,43 +91,28 @@ export class ClientFileCreateSourceEditor extends Component { + _onIndexingComplete = (results: { indexDataResp: unknown; indexPattern: IndexPattern }) => { if (!this._isMounted) { return; } this.props.advanceToNextStep(); - const { indexDataResp, indexPatternResp } = indexResponses; - - // @ts-ignore - const indexCreationFailed = !(indexDataResp && indexDataResp.success); - // @ts-ignore - const allDocsFailed = indexDataResp.failures.length === indexDataResp.docCount; - // @ts-ignore - const indexPatternCreationFailed = !(indexPatternResp && indexPatternResp.success); - if (indexCreationFailed || allDocsFailed || indexPatternCreationFailed) { - this.setState({ indexingStage: INDEXING_STAGE.ERROR }); - return; - } - - // @ts-ignore - const { fields, id: indexPatternId } = indexPatternResp; - const geoField = fields.find((field: IFieldType) => + const geoField = results.indexPattern.fields.find((field: IFieldType) => [ES_GEO_FIELD_TYPE.GEO_POINT as string, ES_GEO_FIELD_TYPE.GEO_SHAPE as string].includes( field.type ) ); - if (!indexPatternId || !geoField) { + if (!results.indexPattern.id || !geoField) { this.setState({ indexingStage: INDEXING_STAGE.ERROR }); this.props.previewLayers([]); } else { const esSearchSourceConfig = { - indexPatternId, + indexPatternId: results.indexPattern.id, geoField: geoField.name, // Only turn on bounds filter for large doc counts // @ts-ignore - filterByMapBounds: indexDataResp.docCount > DEFAULT_MAX_RESULT_WINDOW, + filterByMapBounds: results.indexDataResp.docCount > DEFAULT_MAX_RESULT_WINDOW, scalingType: geoField.type === ES_GEO_FIELD_TYPE.GEO_POINT ? SCALING_TYPES.CLUSTERS @@ -140,6 +125,16 @@ export class ClientFileCreateSourceEditor extends Component { + if (!this._isMounted) { + return; + } + + this.props.advanceToNextStep(); + + this.setState({ indexingStage: INDEXING_STAGE.ERROR }); + }; + // Called on file upload screen when UI state changes _onIndexReady = (indexReady: boolean) => { if (!this._isMounted) { @@ -167,13 +162,12 @@ export class ClientFileCreateSourceEditor extends Component ); From 4fc4e53e0ca69f5daa43ec106c6fe9d3ce177b21 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 13:37:18 -0700 Subject: [PATCH 06/11] simplify JsonIndexFilePicker props --- .../public/components/json_index_file_picker.js | 14 ++------------ .../public/components/json_upload_and_parse.js | 7 ++++--- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js index e8af6efa2c9bc..78bf7378578de 100644 --- a/x-pack/plugins/file_upload/public/components/json_index_file_picker.js +++ b/x-pack/plugins/file_upload/public/components/json_index_file_picker.js @@ -112,13 +112,7 @@ export class JsonIndexFilePicker extends Component { async _parseFile(file) { const { currentFileTracker } = this.state; - const { - setFileRef, - setParsedFile, - resetFileAndIndexSettings, - onFileUpload, - setIndexName, - } = this.props; + const { setFileRef, setParsedFile, resetFileAndIndexSettings } = this.props; if (file.size > MAX_FILE_SIZE) { this.setState({ @@ -189,12 +183,8 @@ export class JsonIndexFilePicker extends Component { ), }); } - if (fileResult.parsedGeojson) { - onFileUpload(fileResult.parsedGeojson, file.name); - } - setIndexName(defaultIndexName); setFileRef(file); - setParsedFile(fileResult); + setParsedFile(fileResult, defaultIndexName); } render() { diff --git a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js index 0742f2fcb4706..c735122709b03 100644 --- a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js +++ b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js @@ -297,11 +297,12 @@ export class JsonUploadAndParse extends Component { ) : ( this.setState({ indexName })} setFileRef={(fileRef) => this.setState({ fileRef })} - setParsedFile={(parsedFile) => this.setState({ parsedFile })} + setParsedFile={(parsedFile, indexName) => { + this.setState({ parsedFile, indexName }); + this.props.onFileUpload(parsedFile.parsedGeojson, indexName); + }} resetFileAndIndexSettings={this._resetFileAndIndexSettings} geojsonImporter={this.state.geojsonImporter} /> From c4f7e4100230163c63d9c1a429ceca3c3d7ae62e Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 13:56:48 -0700 Subject: [PATCH 07/11] tslint --- .../file_upload/public/lazy_load_bundle/index.ts | 12 ++++++------ .../classes/layers/file_upload_wizard/wizard.tsx | 4 ++-- .../ml/public/application/util/dependency_cache.ts | 2 +- x-pack/plugins/ml/public/plugin.ts | 1 + 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts index 5ed0887d1375d..9cfc0896f5c2d 100644 --- a/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts +++ b/x-pack/plugins/file_upload/public/lazy_load_bundle/index.ts @@ -7,19 +7,19 @@ import React from 'react'; import { FeatureCollection } from 'geojson'; -import { IImporter, ImportFactoryOptions } from '../importer'; +import { IndexPattern } from 'src/plugins/data/public'; +import { IImporter, ImportFactoryOptions, ImportResults } from '../importer'; export interface FileUploadComponentProps { - appName: string; isIndexingTriggered: boolean; onFileUpload: (geojsonFile: FeatureCollection, name: string) => void; onFileRemove: () => void; onIndexReady: (indexReady: boolean) => void; - transformDetails: string; - onIndexingComplete: (indexResponses: { - indexDataResp: unknown; - indexPatternResp: unknown; + onIndexingComplete: (results: { + indexDataResp: ImportResults; + indexPattern: IndexPattern; }) => void; + onIndexingError: () => void; } let loadModulesPromise: Promise; diff --git a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx index 5502eafe19ff2..138ed7a8cd0b1 100644 --- a/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx +++ b/x-pack/plugins/maps/public/classes/layers/file_upload_wizard/wizard.tsx @@ -19,7 +19,7 @@ import { GeoJsonFileSource } from '../../sources/geojson_file_source'; import { VectorLayer } from '../../layers/vector_layer'; import { createDefaultLayerDescriptor } from '../../sources/es_search_source'; import { RenderWizardArguments } from '../../layers/layer_wizard_registry'; -import { FileUploadComponentProps } from '../../../../../file_upload/public'; +import { FileUploadComponentProps, ImportResults } from '../../../../../file_upload/public'; export const INDEX_SETUP_STEP_ID = 'INDEX_SETUP_STEP_ID'; export const INDEXING_STEP_ID = 'INDEXING_STEP_ID'; @@ -91,7 +91,7 @@ export class ClientFileCreateSourceEditor extends Component { + _onIndexingComplete = (results: { indexDataResp: ImportResults; indexPattern: IndexPattern }) => { if (!this._isMounted) { return; } diff --git a/x-pack/plugins/ml/public/application/util/dependency_cache.ts b/x-pack/plugins/ml/public/application/util/dependency_cache.ts index 3df1e0a504649..c49113ffe156e 100644 --- a/x-pack/plugins/ml/public/application/util/dependency_cache.ts +++ b/x-pack/plugins/ml/public/application/util/dependency_cache.ts @@ -23,7 +23,7 @@ import type { IndexPatternsContract, DataPublicPluginStart } from 'src/plugins/d import type { SharePluginStart } from 'src/plugins/share/public'; import type { SecurityPluginSetup } from '../../../../security/public'; import type { MapsStartApi } from '../../../../maps/public'; -import type { FileUploadPluginStart } from '../../../../fileUpload/public'; +import type { FileUploadPluginStart } from '../../../../file_upload/public'; export interface DependencyCache { timefilter: DataPublicPluginSetup['query']['timefilter'] | null; diff --git a/x-pack/plugins/ml/public/plugin.ts b/x-pack/plugins/ml/public/plugin.ts index 65f26e0c26f08..a2d4114a96108 100644 --- a/x-pack/plugins/ml/public/plugin.ts +++ b/x-pack/plugins/ml/public/plugin.ts @@ -121,6 +121,7 @@ export class MlPlugin implements Plugin { lens: pluginsStart.lens, kibanaVersion, triggersActionsUi: pluginsStart.triggersActionsUi, + fileUpload: pluginsStart.fileUpload, }, params ); From 5e392d46a9e74655c4e577620042f5ad67f66ec3 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Tue, 16 Feb 2021 15:50:38 -0700 Subject: [PATCH 08/11] i18n fixes and tslint fixes --- .../importer/geojson_importer/geojson_importer.ts | 3 +-- .../public/importer/geojson_importer/loaders.js | 10 ++++++++++ .../plugins/file_upload/public/importer/importer.ts | 9 +++------ x-pack/plugins/translations/translations/ja-JP.json | 12 ------------ x-pack/plugins/translations/translations/zh-CN.json | 12 ------------ 5 files changed, 14 insertions(+), 32 deletions(-) create mode 100644 x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts index c904bff94eba6..189084e9180da 100644 --- a/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/geojson_importer.ts @@ -17,8 +17,7 @@ import { } from 'geojson'; import { i18n } from '@kbn/i18n'; // @ts-expect-error -import { JSONLoader } from '@loaders.gl/json'; -import { loadInBatches } from '@loaders.gl/core'; +import { JSONLoader, loadInBatches } from './loaders'; import { CreateDocsResponse } from '../types'; import { Importer } from '../importer'; import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/public'; diff --git a/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js b/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js new file mode 100644 index 0000000000000..eb6d69a4b57b7 --- /dev/null +++ b/x-pack/plugins/file_upload/public/importer/geojson_importer/loaders.js @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// Loading @loaders.gl from javascriopt file to typescript compilation failures within @loaders.gl. +export { JSONLoader } from '@loaders.gl/json'; +export { loadInBatches } from '@loaders.gl/core'; diff --git a/x-pack/plugins/file_upload/public/importer/importer.ts b/x-pack/plugins/file_upload/public/importer/importer.ts index 831dd557e14ca..8bdb465bd69cf 100644 --- a/x-pack/plugins/file_upload/public/importer/importer.ts +++ b/x-pack/plugins/file_upload/public/importer/importer.ts @@ -91,12 +91,9 @@ export abstract class Importer implements IImporter { if (!id || !index) { return { success: false, - error: i18n.translate( - 'xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage', - { - defaultMessage: 'no ID or index supplied', - } - ), + error: i18n.translate('xpack.fileUpload.import.noIdOrIndexSuppliedErrorMessage', { + defaultMessage: 'no ID or index supplied', + }), }; } diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index 697cc18ad41d1..a57cf93ca1894 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -7470,19 +7470,10 @@ "xpack.features.savedObjectsManagementFeatureName": "保存されたオブジェクトの管理", "xpack.features.visualizeFeatureName": "可視化", "xpack.fileUpload.enterIndexName": "インデックス名を入力", - "xpack.fileUpload.fileParser.featuresOmitted": "ジオメトリのない一部の機能は省略されました", "xpack.fileUpload.fileParser.noFeaturesDetected": "エラー、機能が検出されませんでした", "xpack.fileUpload.fileParser.noFileProvided": "エラー、ファイルが提供されていません", - "xpack.fileUpload.fileParser.transformDetailsNotDefined": "{transformDetails}のインデックスオプションが定義されていません", "xpack.fileUpload.httpService.fetchError": "フェッチ実行エラー:{error}", "xpack.fileUpload.httpService.noUrl": "URLが指定されていません", - "xpack.fileUpload.indexingService.errorCreatingIndex": "インデックスの作成中にエラーが発生しました", - "xpack.fileUpload.indexingService.noFileImported": "ファイルはインポートされていません。", - "xpack.fileUpload.indexingService.noHandlingForTransform": "変換の処理が定義されていません。{transform}", - "xpack.fileUpload.indexingService.noIndexingDetailsForDatatype": "データ型のインデックス詳細が定義されていません。{dataType}", - "xpack.fileUpload.indexingService.noTransformDefined": "変換が定義されていません", - "xpack.fileUpload.indexingService.transformResultError": "データの変換エラー:{error}", - "xpack.fileUpload.indexingService.unknownTransformError": "変換の実行中に不明なエラーが発生しました。{transform}", "xpack.fileUpload.indexNameReqField": "インデックス名、必須フィールド", "xpack.fileUpload.indexSettings.enterIndexNameLabel": "インデックス名", "xpack.fileUpload.indexSettings.enterIndexTypeLabel": "インデックスタイプ", @@ -7507,7 +7498,6 @@ "xpack.fileUpload.jsonIndexFilePicker.filePicker": "ファイルをアップロード", "xpack.fileUpload.jsonIndexFilePicker.filePickerLabel": "アップロードするファイルを選択", "xpack.fileUpload.jsonIndexFilePicker.fileProcessingError": "ファイル処理エラー: {errorMessage}", - "xpack.fileUpload.jsonIndexFilePicker.fileSizeError": "ファイルサイズエラー:{errorMessage}", "xpack.fileUpload.jsonIndexFilePicker.formatsAccepted": "許可されているフォーマット:{acceptedFileTypeStringMessage}", "xpack.fileUpload.jsonIndexFilePicker.maxSize": "最大サイズ:{maxFileSize}", "xpack.fileUpload.jsonIndexFilePicker.noFileNameError": "ファイル名が指定されていません", @@ -7520,7 +7510,6 @@ "xpack.fileUpload.jsonUploadAndParse.indexPatternComplete": "インデックスパターンの完了", "xpack.fileUpload.jsonUploadAndParse.indexPatternError": "インデックスパターンエラー", "xpack.fileUpload.jsonUploadAndParse.writingToIndex": "インデックスに書き込み中", - "xpack.fileUpload.noIndexSuppliedErrorMessage": "インデックスが指定されていません。", "xpack.fleet.agentBulkActions.agentsSelected": "{count, plural, other {#個のエージェント}}が選択されました", "xpack.fleet.agentBulkActions.clearSelection": "選択した項目をクリア", "xpack.fleet.agentBulkActions.reassignPolicy": "新しいポリシーに割り当てる", @@ -13167,7 +13156,6 @@ "xpack.ml.fileDatavisualizer.importView.indexNameContainsIllegalCharactersErrorMessage": "インデックス名に許可されていない文字が含まれています", "xpack.ml.fileDatavisualizer.importView.indexPatternDoesNotMatchIndexNameErrorMessage": "インデックスパターンがインデックス名と一致しません", "xpack.ml.fileDatavisualizer.importView.indexPatternNameAlreadyExistsErrorMessage": "インデックスパターン名が既に存在します", - "xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage": "ID またはインデックスが提供されていません", "xpack.ml.fileDatavisualizer.importView.parseMappingsError": "マッピングのパース中にエラーが発生しました:", "xpack.ml.fileDatavisualizer.importView.parsePipelineError": "投入パイプラインのパース中にエラーが発生しました:", "xpack.ml.fileDatavisualizer.importView.parseSettingsError": "設定のパース中にエラーが発生しました:", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index 7c62261317334..d73524ed9b683 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -7489,19 +7489,10 @@ "xpack.features.savedObjectsManagementFeatureName": "已保存对象管理", "xpack.features.visualizeFeatureName": "Visualize", "xpack.fileUpload.enterIndexName": "输入索引名称", - "xpack.fileUpload.fileParser.featuresOmitted": "不具有几何形状的一些特征已省略", "xpack.fileUpload.fileParser.noFeaturesDetected": "错误,未检测到特征", "xpack.fileUpload.fileParser.noFileProvided": "错误,未提供任何文件", - "xpack.fileUpload.fileParser.transformDetailsNotDefined": "未定义 {transformDetails} 的索引选项", "xpack.fileUpload.httpService.fetchError": "执行提取时出错:{error}", "xpack.fileUpload.httpService.noUrl": "未提供 URL", - "xpack.fileUpload.indexingService.errorCreatingIndex": "创建索引时出错", - "xpack.fileUpload.indexingService.noFileImported": "未导入任何文件。", - "xpack.fileUpload.indexingService.noHandlingForTransform": "没有为转换 {transform} 定义任何处理方式", - "xpack.fileUpload.indexingService.noIndexingDetailsForDatatype": "没有为数据类型 {dataType} 定义任何索引详情", - "xpack.fileUpload.indexingService.noTransformDefined": "未定义任何转换", - "xpack.fileUpload.indexingService.transformResultError": "转换数据时出错:{error}", - "xpack.fileUpload.indexingService.unknownTransformError": "执行转换 {transform} 时出现未知错误", "xpack.fileUpload.indexNameReqField": "索引名称,必填字段", "xpack.fileUpload.indexSettings.enterIndexNameLabel": "索引名称", "xpack.fileUpload.indexSettings.enterIndexTypeLabel": "索引类型", @@ -7526,7 +7517,6 @@ "xpack.fileUpload.jsonIndexFilePicker.filePicker": "上传文件", "xpack.fileUpload.jsonIndexFilePicker.filePickerLabel": "选择文件进行上传", "xpack.fileUpload.jsonIndexFilePicker.fileProcessingError": "文件处理错误:{errorMessage}", - "xpack.fileUpload.jsonIndexFilePicker.fileSizeError": "文件大小错误:{errorMessage}", "xpack.fileUpload.jsonIndexFilePicker.formatsAccepted": "接受的格式:{acceptedFileTypeStringMessage}", "xpack.fileUpload.jsonIndexFilePicker.maxSize": "最大大小:{maxFileSize}", "xpack.fileUpload.jsonIndexFilePicker.noFileNameError": "未提供任何文件名称", @@ -7539,7 +7529,6 @@ "xpack.fileUpload.jsonUploadAndParse.indexPatternComplete": "索引模式完成", "xpack.fileUpload.jsonUploadAndParse.indexPatternError": "索引模式错误", "xpack.fileUpload.jsonUploadAndParse.writingToIndex": "正在写入索引", - "xpack.fileUpload.noIndexSuppliedErrorMessage": "未提供任何索引。", "xpack.fleet.agentBulkActions.agentsSelected": "已选择 {count, plural, other {# 个代理}}", "xpack.fleet.agentBulkActions.clearSelection": "清除所选内容", "xpack.fleet.agentBulkActions.reassignPolicy": "分配到新策略", @@ -13198,7 +13187,6 @@ "xpack.ml.fileDatavisualizer.importView.indexNameContainsIllegalCharactersErrorMessage": "索引名称包含非法字符", "xpack.ml.fileDatavisualizer.importView.indexPatternDoesNotMatchIndexNameErrorMessage": "索引模式与索引名称不匹配", "xpack.ml.fileDatavisualizer.importView.indexPatternNameAlreadyExistsErrorMessage": "索引模式名称已存在", - "xpack.ml.fileDatavisualizer.importView.noIdOrIndexSuppliedErrorMessage": "未提供任何 ID 或索引", "xpack.ml.fileDatavisualizer.importView.parseMappingsError": "解析映射时出错:", "xpack.ml.fileDatavisualizer.importView.parsePipelineError": "解析采集管道时出错:", "xpack.ml.fileDatavisualizer.importView.parseSettingsError": "解析设置时出错:", From bb9d62d7cd92793e227075f0b7724ff760504a02 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Wed, 17 Feb 2021 08:24:17 -0700 Subject: [PATCH 09/11] update functional test to account for change in layer name --- .../maps/import_geojson/add_layer_import_panel.js | 15 +++++++-------- x-pack/test/functional/page_objects/gis_page.ts | 5 +++++ 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js b/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js index 46b87b1c4195c..b40f9a4bc233e 100644 --- a/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js +++ b/x-pack/test/functional/apps/maps/import_geojson/add_layer_import_panel.js @@ -11,7 +11,6 @@ import path from 'path'; export default function ({ getPageObjects, getService }) { const PageObjects = getPageObjects(['maps', 'common']); - const IMPORT_FILE_PREVIEW_NAME = 'Import File'; const FILE_LOAD_DIR = 'test_upload_files'; const DEFAULT_LOAD_FILE_NAME = 'point.json'; const security = getService('security'); @@ -39,8 +38,8 @@ export default function ({ getPageObjects, getService }) { }); it('should add GeoJSON file to map', async () => { - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(true); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(2); const filePickerLoadedFile = await PageObjects.maps.hasFilePickerLoadedFile( DEFAULT_LOAD_FILE_NAME @@ -51,9 +50,9 @@ export default function ({ getPageObjects, getService }) { it('should remove layer on cancel', async () => { await PageObjects.maps.cancelLayerAdd(); - await PageObjects.maps.waitForLayerDeleted(IMPORT_FILE_PREVIEW_NAME); - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(false); + await PageObjects.maps.waitForLayerDeleted('point'); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(1); }); it('should replace layer on input change', async () => { @@ -83,8 +82,8 @@ export default function ({ getPageObjects, getService }) { ); expect(filePickerLoadedFile).to.be(true); // Check that no file is loaded in layer preview - const layerLoadedInToc = await PageObjects.maps.doesLayerExist(IMPORT_FILE_PREVIEW_NAME); - expect(layerLoadedInToc).to.be(false); + const numberOfLayers = await PageObjects.maps.getNumberOfLayers(); + expect(numberOfLayers).to.be(1); }); it('should prevent import button from activating unless valid index name provided', async () => { diff --git a/x-pack/test/functional/page_objects/gis_page.ts b/x-pack/test/functional/page_objects/gis_page.ts index b0649db248621..097176b72579c 100644 --- a/x-pack/test/functional/page_objects/gis_page.ts +++ b/x-pack/test/functional/page_objects/gis_page.ts @@ -335,6 +335,11 @@ export function GisPageProvider({ getService, getPageObjects }: FtrProviderConte } } + async getNumberOfLayers() { + const tocEntries = await find.allByCssSelector('.mapTocEntry'); + return tocEntries.length; + } + async doesLayerExist(layerName: string) { return await testSubjects.exists( `layerTocActionsPanelToggleButton${escapeLayerName(layerName)}` From 93b7757805c5cf3e13bad09217810d5e46a172cf Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Fri, 19 Feb 2021 06:56:14 -0700 Subject: [PATCH 10/11] review feedback --- .../public/components/json_upload_and_parse.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js index c735122709b03..d4f6858eb5995 100644 --- a/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js +++ b/x-pack/plugins/file_upload/public/components/json_upload_and_parse.js @@ -44,6 +44,8 @@ const INDEXING_STAGE = { }; export class JsonUploadAndParse extends Component { + geojsonImporter = new GeoJsonImporter(); + state = { // File state fileRef: null, @@ -64,7 +66,6 @@ export class JsonUploadAndParse extends Component { currentIndexingStage: INDEXING_STAGE.INDEXING_STARTED, indexDataResp: '', indexPatternResp: '', - geojsonImporter: new GeoJsonImporter(), }; componentDidMount() { @@ -166,7 +167,7 @@ export class JsonUploadAndParse extends Component { currentIndexingStage: INDEXING_STAGE.WRITING_TO_INDEX, }); - this.state.geojsonImporter.setDocs(parsedFile.parsedGeojson, selectedIndexType); + this.geojsonImporter.setDocs(parsedFile.parsedGeojson, selectedIndexType); // initialize import const settings = { @@ -180,7 +181,7 @@ export class JsonUploadAndParse extends Component { }, }; const ingestPipeline = {}; - const initializeImportResp = await this.state.geojsonImporter.initializeImport( + const initializeImportResp = await this.geojsonImporter.initializeImport( indexName, settings, mappings, @@ -200,7 +201,7 @@ export class JsonUploadAndParse extends Component { } // import file - const importResp = await this.state.geojsonImporter.import( + const importResp = await this.geojsonImporter.import( initializeImportResp.id, indexName, initializeImportResp.pipelineId, @@ -304,7 +305,7 @@ export class JsonUploadAndParse extends Component { this.props.onFileUpload(parsedFile.parsedGeojson, indexName); }} resetFileAndIndexSettings={this._resetFileAndIndexSettings} - geojsonImporter={this.state.geojsonImporter} + geojsonImporter={this.geojsonImporter} /> Date: Fri, 19 Feb 2021 10:04:48 -0700 Subject: [PATCH 11/11] dependency_cache review feedback --- x-pack/plugins/ml/public/application/app.tsx | 1 + x-pack/plugins/ml/public/application/util/dependency_cache.ts | 4 +--- x-pack/plugins/ml/public/plugin.ts | 1 - 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/ml/public/application/app.tsx b/x-pack/plugins/ml/public/application/app.tsx index 3df67bc16ab05..107bbda23ecb9 100644 --- a/x-pack/plugins/ml/public/application/app.tsx +++ b/x-pack/plugins/ml/public/application/app.tsx @@ -124,6 +124,7 @@ export const renderApp = ( security: deps.security, urlGenerators: deps.share.urlGenerators, maps: deps.maps, + fileUpload: deps.fileUpload, }); appMountParams.onAppLeave((actions) => actions.default()); diff --git a/x-pack/plugins/ml/public/application/util/dependency_cache.ts b/x-pack/plugins/ml/public/application/util/dependency_cache.ts index c49113ffe156e..215f087020d6f 100644 --- a/x-pack/plugins/ml/public/application/util/dependency_cache.ts +++ b/x-pack/plugins/ml/public/application/util/dependency_cache.ts @@ -87,9 +87,7 @@ export function setDependencyCache(deps: Partial) { cache.security = deps.security || null; cache.i18n = deps.i18n || null; cache.urlGenerators = deps.urlGenerators || null; - if (deps.fileUpload) { - cache.fileUpload = deps.fileUpload; - } + cache.fileUpload = deps.fileUpload || null; } export function getTimefilter() { diff --git a/x-pack/plugins/ml/public/plugin.ts b/x-pack/plugins/ml/public/plugin.ts index 70079ec3ffa78..f6d5da92f5e71 100644 --- a/x-pack/plugins/ml/public/plugin.ts +++ b/x-pack/plugins/ml/public/plugin.ts @@ -196,7 +196,6 @@ export class MlPlugin implements Plugin { basePath: core.http.basePath, http: core.http, i18n: core.i18n, - fileUpload: deps.fileUpload, }); return {