Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(Cross-platform Diagnostics): provide Cross-platform Diagnostics … #113

Merged
merged 5 commits into from
Feb 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ coverage.lcov
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
lib
lib-web

# Dependency directories
node_modules
Expand Down
3 changes: 2 additions & 1 deletion .npmignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,5 @@ test/
.gitmodules
node_modules/
webpack.config.js
test/fixtures
test/fixtures
test/static-fixtures
2 changes: 0 additions & 2 deletions dist/babel-polyfill.js

This file was deleted.

1 change: 0 additions & 1 deletion dist/babel-polyfill.js.map

This file was deleted.

17 changes: 17 additions & 0 deletions dist/vizabi-ddfcsv-reader-polyfill.js

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions dist/vizabi-ddfcsv-reader-polyfill.js.map

Large diffs are not rendered by default.

10 changes: 2 additions & 8 deletions dist/vizabi-ddfcsv-reader.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion dist/vizabi-ddfcsv-reader.js.map

Large diffs are not rendered by default.

25 changes: 13 additions & 12 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "vizabi-ddfcsv-reader",
"version": "2.5.0",
"version": "3.0.10",
"description": "Vizabi DDFcsv reader",
"author": "Vyacheslav Chub<[email protected]>",
"license": "GPL-3.0",
Expand All @@ -11,21 +11,24 @@
"tslint:src": "./node_modules/.bin/tslint -c ./tslint.json 'src/**/*.ts'",
"tslint:tests": "./node_modules/.bin/tslint -c ./test/tslint.json 'test/**/*.ts'",
"tslint": "npm run tslint:src && npm run tslint:tests",
"tsc": "tsc --project tsconfig.json",
"tsc-node": "tsc --project tsconfig.json && TARGET_DIR=lib npm run set-own-version",
"tsc-web": "tsc --project tsconfig-web.json && TARGET_DIR=lib-web npm run set-own-version",
"submodules:init": "git submodule update --remote --init -f",
"coverage:clean": "rm -rf coverage && rm -rf .nyc_output && rm -rf coverage.lcov",
"remove-unneeded": "rm -rf test/fixtures/systema_globalis/master-HEAD/etl",
"pree2e": "npm run submodules:init && npm run remove-unneeded && npm run coverage:clean && npm run tslint && npm run tsc",
"pree2e": "npm run submodules:init && npm run remove-unneeded && npm run coverage:clean && npm run tslint && npm run tsc-node",
"e2e": "nyc mocha",
"pree2e:doc": "npm run pree2e",
"e2e:doc": "mocha --require ts-node/register -R markdown > API.SPECIFICATION.md --recursive test/*.spec.ts",
"test": "npm run e2e",
"test-travis": "npm run e2e && nyc report --reporter=text-lcov > coverage.lcov && codecov",
"test-diag": "mocha --require ts-node/register --recursive test/diagnostics.spec.ts",
"set-own-version": "cd scripts && node ./set-own-version.js",
"changelog": "conventional-changelog -i CHANGELOG.md -s -p angular",
"github-release": "conventional-github-releaser -p angular",
"build": "npm run build:web && rimraf lib && npm run build:node",
"build:node": "npm run tsc",
"build:web": "npm run tsc && npm run webpack",
"build:node": "npm run tsc-node",
"build:web": "npm run tsc-web && npm run webpack",
"preversion": "npm run build",
"version": "npm run changelog && git add CHANGELOG.md",
"postversion": "git push origin master && git push --tags && npm run github-release",
Expand All @@ -52,13 +55,10 @@
"instrument": true
},
"dependencies": {
"@babel/code-frame": "7.0.0",
"@google-cloud/storage": "^2.3.1",
"@types/core-js": "2.5.0",
"@types/node": "10.5.7",
"aws-sdk": "2.308.0",
"ddf-query-validator": "1.2.1",
"cross-project-diagnostics": "0.1.5",
"ddf-query-validator": "1.3.4",
"fetch-polyfill": "0.8.2",
"https": "1.0.0",
"lodash.clonedeep": "4.5.0",
"lodash.compact": "3.0.1",
"lodash.concat": "4.5.0",
Expand All @@ -85,6 +85,7 @@
"papaparse": "4.3.6"
},
"devDependencies": {
"@babel/code-frame": "7.0.0",
"@types/chai": "4.1.4",
"@types/core-js": "2.5.0",
"@types/lodash": "4.14.116",
Expand Down Expand Up @@ -116,12 +117,12 @@
"source-map-support": "0.5.6",
"ts-loader": "4.3.0",
"ts-node": "7.0.0",
"tslib": "1.9.3",
"tslint": "5.11.0",
"tslint-no-unused-expression-chai": "0.1.3",
"typescript": "3.2.2",
"uglifyjs-webpack-plugin": "1.2.5",
"webpack": "4.8.1",
"webpack-bundle-analyzer": "^3.0.4",
"webpack-cli": "2.1.3",
"webpack-dev-server": "3.1.4",
"webpack-merge": "4.1.2",
Expand Down
21 changes: 21 additions & 0 deletions scripts/set-own-version.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
const path = require('path');
const fs = require('fs');

try {
if (!process.env.npm_package_version || !process.env.npm_package_name) {
console.log(`process.env.npm_package_version or process.env.npm_package_name are not defined!`);
process.exit(1);
}

const fileName = path.resolve('..', process.env.TARGET_DIR, 'src', 'ddfcsv-reader.js');
const content = fs.readFileSync(fileName, 'utf-8');
const oldVersionContent = /const myVersion = '';/;
const newVersionContent = `const myVersion = '${process.env.npm_package_version}';`;
const oldNameContent = /const myName = '';/;
const newNameContent = `const myName = '${process.env.npm_package_name}';`;
const newContent = content.replace(oldVersionContent, newVersionContent).replace(oldNameContent, newNameContent);
fs.writeFileSync(fileName, newContent);
} catch (e) {
console.log(e);
process.exit(1);
}
99 changes: 73 additions & 26 deletions src/ddf-csv.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,32 +38,42 @@ export function ddfCsvReader (logger?: any) {
const resourcesLookup = new Map();

let optimalFilesSet = [];
let datapackage;

function loadDataPackage (baseOptions: IBaseReaderOptions): Promise<IDatapackage> {
const datapackagePath = getFilePath(baseOptions.basePath);
const { debug, error } = baseOptions.diagnostic.prepareDiagnosticFor('loadDataPackage');

return new Promise((resolve, reject) => {
if (datapackage) {
return resolve(datapackage);
}

baseOptions.fileReader.readText(datapackagePath, (err, data) => {
if (err) {
error('file reading', err);
return reject(new DdfCsvError(FILE_READING_ERROR, err, datapackagePath));
}

let datapackage;

try {
datapackage = JSON.parse(data);
optimalFilesSet = [];
buildResourcesLookup(datapackage);
buildKeyValueLookup(datapackage);
} catch (parseErr) {
error('json file parsing', parseErr);
return reject(new DdfCsvError(JSON_PARSING_ERROR, parseErr.message, datapackagePath));
}

debug('datapackage content is ready');

resolve(datapackage);
});
});
}

async function loadConcepts (queryParam, options): Promise<object> {
async function loadConcepts (queryParam, options: IBaseReaderOptions): Promise<object> {
const { error } = options.diagnostic.prepareDiagnosticFor('loadConcepts');
// start off with internal concepts
setConceptsLookup(internalConcepts, options);
// query concepts
Expand All @@ -81,8 +91,9 @@ export function ddfCsvReader (logger?: any) {
// with conceptsLookup built, we can parse other concept properties
// according to their concept_type
result = await reparseConcepts(options);
} catch (error) {
throw error;
} catch (err) {
error('concepts processing', err);
throw err;
}
return result;
}
Expand Down Expand Up @@ -152,8 +163,9 @@ export function ddfCsvReader (logger?: any) {
concepts.forEach(row => options.conceptsLookup.set(row.concept, row));
}

async function query (queryParam, baseOptions) {
async function query (queryParam, baseOptions: IBaseReaderOptions) {
// console.log(JSON.stringify(queryParam, null, '\t'))
const { warning, error } = baseOptions.diagnostic.prepareDiagnosticFor('query');
let data;

try {
Expand All @@ -163,25 +175,31 @@ export function ddfCsvReader (logger?: any) {
await validateQueryDefinitions(queryParam, baseOptions);

if (isSchemaQuery(queryParam)) {
data = await querySchema(queryParam, { datapackage: baseOptions.datapackage });
data = await querySchema(queryParam, baseOptions);
} else {
const appropriatePlugin = getAppropriatePlugin(queryParam, baseOptions);

if (appropriatePlugin) {
optimalFilesSet = [];
const files = await appropriatePlugin.getRecommendedFilesSet();
optimalFilesSet = files;
queryParam.optimalFilesSet = [].concat(files, queryParam.optimalFilesSet);

warning('get custom optimal files list by a plugin', optimalFilesSet);
}

data = await queryData(queryParam, baseOptions);
}
} catch (error) {
throw error;
} catch (err) {
error('general query error', err);
throw err;
}

return data;
}

function queryData (queryParam, options) {
function queryData (queryParam, options: IBaseReaderOptions) {
const { debug } = options.diagnostic.prepareDiagnosticFor('queryData');
const {
select: { key = [], value = [] },
from = '',
Expand All @@ -192,32 +210,39 @@ export function ddfCsvReader (logger?: any) {
} = queryParam;
const select = { key, value };

debug('start all data loading', queryParam);

const projection = new Set(select.key.concat(select.value));
const filterFields = getFilterFields(where).filter(field => !projection.has(field));
// load all relevant resources
const resourcesPromise = loadResources(select.key, [ ...select.value, ...filterFields ], language, options);
const resourcesPromise = loadResources(select.key, [ ...select.value, ...filterFields ], language, options, queryParam);
// list of entities selected from a join clause, later insterted in where clause
const joinsPromise = getJoinFilters(join, queryParam, options);
// filter which ensures result only includes queried entity sets
const entitySetFilterPromise = getEntitySetFilter(select.key, queryParam, options);

return Promise.all([ resourcesPromise, entitySetFilterPromise, joinsPromise ])
.then(([ resourceResponses, entitySetFilter, joinFilters ]) => {
debug('finish all data loading', queryParam);
// create filter from where, join filters and entity set filters
const whereResolved = processWhere(where, joinFilters);
const filter = mergeFilters(entitySetFilter, whereResolved);

debug('dataTables processing', queryParam);
const dataTables = resourceResponses
// rename key-columns and remove irrelevant value-columns
.map(response => processResourceResponse(response, select, filterFields, options));

debug('queryResult processing', queryParam);
// join (reduce) data to one data table
const queryResult = joinData(select.key, 'overwrite', ...dataTables)
.filter(row => applyFilterRow(row, filter)) // apply filters (entity sets and where (including join))
.map(row => fillMissingValues(row, projection)) // fill any missing values with null values
.map(row => projectRow(row, projection)); // remove fields used only for filtering

debug('result ordering', queryParam);
orderData(queryResult, order_by);
debug('final result is ready', queryParam);

return queryResult;
});
Expand Down Expand Up @@ -307,23 +332,27 @@ export function ddfCsvReader (logger?: any) {
}, { $and: [] });
}

function querySchema (queryParam, { datapackage }) {
function querySchema (queryParam, baseOptions: IBaseReaderOptions) {
const { debug, error } = baseOptions.diagnostic.prepareDiagnosticFor('query');
const getSchemaFromCollection = collectionPar => {
return datapackage.ddfSchema[ collectionPar ].map(
debug(`get schema for collection ${collectionPar}`);
return baseOptions.datapackage.ddfSchema[ collectionPar ].map(
({ primaryKey, value }) => ({ key: primaryKey, value })
);
};

const collection = queryParam.from.split('.')[ 0 ];

if (datapackage.ddfSchema[ collection ]) {
if (baseOptions.datapackage.ddfSchema[ collection ]) {
return getSchemaFromCollection(collection);
} else if (collection === '*') {
return Object.keys(datapackage.ddfSchema)
return Object.keys(baseOptions.datapackage.ddfSchema)
.map(getSchemaFromCollection)
.reduce((a, b) => a.concat(b));
} else {
throwError(new DdfCsvError(DDF_ERROR, `No valid collection (${collection}) for schema query`));
const message = `No valid collection (${collection}) for schema query`;
error(message);
throwError(new DdfCsvError(DDF_ERROR, message));
}
}

Expand Down Expand Up @@ -546,10 +575,12 @@ export function ddfCsvReader (logger?: any) {
.map(row => renameHeaderRow(row, renameMap)); // rename header rows (must happen **after** projection)
}

function loadResources (key, value, language, options) {

function loadResources (key, value, language, options, queryParam) {
const { debug } = options.diagnostic.prepareDiagnosticFor('loadResource');
const resources = getResources(key, value);

debug('resources list by query', {queryParam, resources: [ ...resources ]});

return Promise.all([ ...resources ].map(
resource => loadResource(resource, language, options)
));
Expand All @@ -558,7 +589,7 @@ export function ddfCsvReader (logger?: any) {
function projectRow (row, projectionSet) {
const result = {};

for (const concept in row) {
for (const concept of Object.keys(row)) {
if (projectionSet.has(concept)) {
result[ concept ] = row[ concept ];
}
Expand All @@ -570,7 +601,7 @@ export function ddfCsvReader (logger?: any) {
function renameHeaderRow (row, renameMap) {
const result = {};

for (const concept in row) {
for (const concept of Object.keys(row)) {
result[ renameMap.get(concept) || concept ] = row[ concept ];
}

Expand Down Expand Up @@ -653,6 +684,7 @@ export function ddfCsvReader (logger?: any) {
}

function loadResource (resource, language, options) {
const { warning } = options.diagnostic.prepareDiagnosticFor('loadResource');
const filePromises = [];

if (typeof resource.data === 'undefined') {
Expand All @@ -670,7 +702,11 @@ export function ddfCsvReader (logger?: any) {

// error loading translation file is expected when specific file is not translated
// more correct would be to only resolve file-not-found errors but current solution is sufficient
resource.translations[ language ] = loadFile(translationPath, options).catch(err => Promise.resolve({}));
resource.translations[ language ] = loadFile(translationPath, options)
.catch(err => {
warning(`translation file ${translationPath}`, err);
return Promise.resolve({});
});
}

filePromises.push(resource.translations[ language ]);
Expand All @@ -687,19 +723,24 @@ export function ddfCsvReader (logger?: any) {

}

function getLanguages ({ datapackage }): string[] {
if (!datapackage.translations) {
function getLanguages (options: {datapackage}): string[] {
if (!options.datapackage.translations) {
return [];
}

return datapackage.translations.map(lang => lang.id);
return options.datapackage.translations.map(lang => lang.id);
}

function loadFile (filePath, options) {
const { debug, error } = options.diagnostic.prepareDiagnosticFor('loadFile');
const fullFilePath = getFilePath(options.basePath, filePath);

debug(`start reading "${filePath}"`);

return new Promise((resolve, reject) => {
options.fileReader.readText(fullFilePath, (err, data) => {
if (err) {
error(`fail "${filePath}" reading`, err);
return reject(new DdfCsvError(FILE_READING_ERROR, err, fullFilePath));
}

Expand All @@ -719,8 +760,14 @@ export function ddfCsvReader (logger?: any) {

return includes([ 'boolean', 'measure' ], concept.concept_type);
},
complete: result => resolve(result),
error: error => reject(new DdfCsvError(CSV_PARSING_ERROR, error, filePath))
complete: result => {
debug(`finish reading "${filePath}"`);
resolve(result);
},
error: parseErr => {
error(`fail "${filePath}" parsing`, parseErr);
reject(new DdfCsvError(CSV_PARSING_ERROR, parseErr, filePath));
}
});
});
});
Expand Down
Loading