From 10f5c7b13675cd71da0e5c311846ad4048212f3b Mon Sep 17 00:00:00 2001 From: Fran McDade Date: Wed, 29 Sep 2021 13:11:49 +1000 Subject: [PATCH 1/2] Bump Gatsby to 3.14. #1130. --- package-lock.json | 1678 +++++++++++++++++++++------------------------ package.json | 32 +- 2 files changed, 806 insertions(+), 904 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5338d62ae..4f67e846a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -75,9 +75,9 @@ } }, "@babel/eslint-parser": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.15.4.tgz", - "integrity": "sha512-hPMIAmGNbmQzXJIo2P43Zj9UhRmGev5f9nqdBFOWNGDGh6XKmjby79woBvg6y0Jur6yRfQBneDbUQ8ZVc1krFw==", + "version": "7.15.7", + "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.15.7.tgz", + "integrity": "sha512-yJkHyomClm6A2Xzb8pdAo4HzYMSXFn1O5zrCYvbFP0yQFvHueLedV8WiEno8yJOKStjUXzBZzJFeWQ7b3YMsqQ==", "requires": { "eslint-scope": "^5.1.1", "eslint-visitor-keys": "^2.1.0", @@ -470,15 +470,15 @@ } }, "@babel/plugin-proposal-object-rest-spread": { - "version": "7.14.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.14.7.tgz", - "integrity": "sha512-082hsZz+sVabfmDWo1Oct1u1AgbKbUAyVgmX4otIc7bdsRgHBXwTwb3DpDmD4Eyyx6DNiuz5UAATT655k+kL5g==", + "version": "7.15.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.15.6.tgz", + "integrity": "sha512-qtOHo7A1Vt+O23qEAX+GdBpqaIuD3i9VRrWgCJeq7WO6H2d14EK3q11urj5Te2MAeK97nMiIdRpwd/ST4JFbNg==", "requires": { - "@babel/compat-data": "^7.14.7", - "@babel/helper-compilation-targets": "^7.14.5", + "@babel/compat-data": "^7.15.0", + "@babel/helper-compilation-targets": "^7.15.4", "@babel/helper-plugin-utils": "^7.14.5", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.14.5" + "@babel/plugin-transform-parameters": "^7.15.4" } }, "@babel/plugin-proposal-optional-catch-binding": { @@ -1006,9 +1006,9 @@ } }, "@babel/preset-env": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.15.4.tgz", - "integrity": "sha512-4f2nLw+q6ht8gl3sHCmNhmA5W6b1ItLzbH3UrKuJxACHr2eCpk96jwjrAfCAaXaaVwTQGnyUYHY2EWXJGt7TUQ==", + "version": "7.15.6", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.15.6.tgz", + "integrity": "sha512-L+6jcGn7EWu7zqaO2uoTDjjMBW+88FXzV8KvrBl2z6MtRNxlsmUNRlZPaNNPUTgqhyC5DHNFk/2Jmra+ublZWw==", "requires": { "@babel/compat-data": "^7.15.0", "@babel/helper-compilation-targets": "^7.15.4", @@ -1024,7 +1024,7 @@ "@babel/plugin-proposal-logical-assignment-operators": "^7.14.5", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.14.5", "@babel/plugin-proposal-numeric-separator": "^7.14.5", - "@babel/plugin-proposal-object-rest-spread": "^7.14.7", + "@babel/plugin-proposal-object-rest-spread": "^7.15.6", "@babel/plugin-proposal-optional-catch-binding": "^7.14.5", "@babel/plugin-proposal-optional-chaining": "^7.14.5", "@babel/plugin-proposal-private-methods": "^7.14.5", @@ -1077,7 +1077,7 @@ "@babel/plugin-transform-unicode-escapes": "^7.14.5", "@babel/plugin-transform-unicode-regex": "^7.14.5", "@babel/preset-modules": "^0.1.4", - "@babel/types": "^7.15.4", + "@babel/types": "^7.15.6", "babel-plugin-polyfill-corejs2": "^0.2.2", "babel-plugin-polyfill-corejs3": "^0.2.2", "babel-plugin-polyfill-regenerator": "^0.2.2", @@ -1154,9 +1154,9 @@ } }, "@babel/standalone": { - "version": "7.15.5", - "resolved": "https://registry.npmjs.org/@babel/standalone/-/standalone-7.15.5.tgz", - "integrity": "sha512-rho2fzDGLrdYVbl0S71I8z6AREWnVvADzv7Gb4TLKhqpE6cJAvno0ALMuF253+wqhN8futx4ELWQpBYMxi4jmA==" + "version": "7.15.7", + "resolved": "https://registry.npmjs.org/@babel/standalone/-/standalone-7.15.7.tgz", + "integrity": "sha512-1dPLi+eQEJE0g1GnUM0Ik2GcS5SMXivoxt6meQxQxGWEd/DCdSBRJClUVlQ25Vbqe49g1HG5Ej0ULhmsqtSMmg==" }, "@babel/template": { "version": "7.15.4", @@ -1195,9 +1195,9 @@ } }, "@babel/types": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.4.tgz", - "integrity": "sha512-0f1HJFuGmmbrKTCZtbm3cU+b/AqdEYk5toj5iQur58xkVMlS0JWaKxTBSmCXd47uiN7vbcozAupm6Mvs80GNhw==", + "version": "7.15.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.6.tgz", + "integrity": "sha512-BPU+7QhqNjmWyDO0/vitH/CuhpV8ZmK1wpKva8nuyNF5MJfuRNWMc+hc14+u9xT93kvykMdncrJT19h74uB1Ig==", "requires": { "@babel/helper-validator-identifier": "^7.14.9", "to-fast-properties": "^2.0.0" @@ -1370,11 +1370,11 @@ }, "dependencies": { "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } } } @@ -1436,9 +1436,9 @@ } }, "@graphql-tools/import": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-6.4.0.tgz", - "integrity": "sha512-jfE01oPcmc4vzAcYLs6xT7XC4jJWrM1HNtIwc7HyyHTxrC3nf36XrF3txEZ2l20GT53+OWnMgYx1HhauLGdJmA==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-6.4.1.tgz", + "integrity": "sha512-nFWo2dI9XXs0hsBscHnTSJNfgFq2gA1bw0qbCXyQga1PJclZViO8SxcHqCf2JmShRpTFsyzsDjKA8xGKDDs8PQ==", "requires": { "resolve-from": "5.0.0", "tslib": "~2.3.0" @@ -1899,9 +1899,9 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, "@trysound/sax": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.1.1.tgz", - "integrity": "sha512-Z6DoceYb/1xSg5+e+ZlPZ9v0N16ZvZ+wYMraFue4HYrE4ttONKtsvruIRf6t9TBR0YvSOfi1hUU0fJfBLCDYow==" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==" }, "@turist/fetch": { "version": "7.1.7", @@ -2061,9 +2061,9 @@ } }, "@types/lodash": { - "version": "4.14.172", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.172.tgz", - "integrity": "sha512-/BHF5HAx3em7/KkzVKm3LrsD6HZAXuXO1AJZQ3cRRBZj4oHZDviWPYu0aEplAqDFNHZPW6d3G7KN+ONcCCC7pw==" + "version": "4.14.175", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.175.tgz", + "integrity": "sha512-XmdEOrKQ8a1Y/yxQFOMbC47G/V2VDO1GvMRnl4O75M4GW/abC5tnfzadQYkqEveqRM1dEJGFFegfPNA2vvx2iw==" }, "@types/mdast": { "version": "3.0.10", @@ -2236,14 +2236,15 @@ "integrity": "sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==" }, "@typescript-eslint/eslint-plugin": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.31.0.tgz", - "integrity": "sha512-iPKZTZNavAlOhfF4gymiSuUkgLne/nh5Oz2/mdiUmuZVD42m9PapnCnzjxuDsnpnbH3wT5s2D8bw6S39TC6GNw==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.32.0.tgz", + "integrity": "sha512-+OWTuWRSbWI1KDK8iEyG/6uK2rTm3kpS38wuVifGUTDB6kjEuNrzBI1MUtxnkneuWG/23QehABe2zHHrj+4yuA==", "requires": { - "@typescript-eslint/experimental-utils": "4.31.0", - "@typescript-eslint/scope-manager": "4.31.0", + "@typescript-eslint/experimental-utils": "4.32.0", + "@typescript-eslint/scope-manager": "4.32.0", "debug": "^4.3.1", "functional-red-black-tree": "^1.0.1", + "ignore": "^5.1.8", "regexpp": "^3.1.0", "semver": "^7.3.5", "tsutils": "^3.21.0" @@ -2260,26 +2261,26 @@ } }, "@typescript-eslint/experimental-utils": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-4.31.0.tgz", - "integrity": "sha512-Hld+EQiKLMppgKKkdUsLeVIeEOrwKc2G983NmznY/r5/ZtZCDvIOXnXtwqJIgYz/ymsy7n7RGvMyrzf1WaSQrw==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-4.32.0.tgz", + "integrity": "sha512-WLoXcc+cQufxRYjTWr4kFt0DyEv6hDgSaFqYhIzQZ05cF+kXfqXdUh+//kgquPJVUBbL3oQGKQxwPbLxHRqm6A==", "requires": { "@types/json-schema": "^7.0.7", - "@typescript-eslint/scope-manager": "4.31.0", - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/typescript-estree": "4.31.0", + "@typescript-eslint/scope-manager": "4.32.0", + "@typescript-eslint/types": "4.32.0", + "@typescript-eslint/typescript-estree": "4.32.0", "eslint-scope": "^5.1.1", "eslint-utils": "^3.0.0" } }, "@typescript-eslint/parser": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-4.31.0.tgz", - "integrity": "sha512-oWbzvPh5amMuTmKaf1wp0ySxPt2ZXHnFQBN2Szu1O//7LmOvgaKTCIDNLK2NvzpmVd5A2M/1j/rujBqO37hj3w==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-4.32.0.tgz", + "integrity": "sha512-lhtYqQ2iEPV5JqV7K+uOVlPePjClj4dOw7K4/Z1F2yvjIUvyr13yJnDzkK6uon4BjHYuHy3EG0c2Z9jEhFk56w==", "requires": { - "@typescript-eslint/scope-manager": "4.31.0", - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/typescript-estree": "4.31.0", + "@typescript-eslint/scope-manager": "4.32.0", + "@typescript-eslint/types": "4.32.0", + "@typescript-eslint/typescript-estree": "4.32.0", "debug": "^4.3.1" }, "dependencies": { @@ -2294,26 +2295,26 @@ } }, "@typescript-eslint/scope-manager": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-4.31.0.tgz", - "integrity": "sha512-LJ+xtl34W76JMRLjbaQorhR0hfRAlp3Lscdiz9NeI/8i+q0hdBZ7BsiYieLoYWqy+AnRigaD3hUwPFugSzdocg==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-4.32.0.tgz", + "integrity": "sha512-DK+fMSHdM216C0OM/KR1lHXjP1CNtVIhJ54kQxfOE6x8UGFAjha8cXgDMBEIYS2XCYjjCtvTkjQYwL3uvGOo0w==", "requires": { - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/visitor-keys": "4.31.0" + "@typescript-eslint/types": "4.32.0", + "@typescript-eslint/visitor-keys": "4.32.0" } }, "@typescript-eslint/types": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-4.31.0.tgz", - "integrity": "sha512-9XR5q9mk7DCXgXLS7REIVs+BaAswfdHhx91XqlJklmqWpTALGjygWVIb/UnLh4NWhfwhR5wNe1yTyCInxVhLqQ==" + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-4.32.0.tgz", + "integrity": "sha512-LE7Z7BAv0E2UvqzogssGf1x7GPpUalgG07nGCBYb1oK4mFsOiFC/VrSMKbZQzFJdN2JL5XYmsx7C7FX9p9ns0w==" }, "@typescript-eslint/typescript-estree": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-4.31.0.tgz", - "integrity": "sha512-QHl2014t3ptg+xpmOSSPn5hm4mY8D4s97ftzyk9BZ8RxYQ3j73XcwuijnJ9cMa6DO4aLXeo8XS3z1omT9LA/Eg==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-4.32.0.tgz", + "integrity": "sha512-tRYCgJ3g1UjMw1cGG8Yn1KzOzNlQ6u1h9AmEtPhb5V5a1TmiHWcRyF/Ic+91M4f43QeChyYlVTcf3DvDTZR9vw==", "requires": { - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/visitor-keys": "4.31.0", + "@typescript-eslint/types": "4.32.0", + "@typescript-eslint/visitor-keys": "4.32.0", "debug": "^4.3.1", "globby": "^11.0.3", "is-glob": "^4.0.1", @@ -2332,14 +2333,19 @@ } }, "@typescript-eslint/visitor-keys": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-4.31.0.tgz", - "integrity": "sha512-HUcRp2a9I+P21+O21yu3ezv3GEPGjyGiXoEUQwZXjR8UxRApGeLyWH4ZIIUSalE28aG4YsV6GjtaAVB3QKOu0w==", + "version": "4.32.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-4.32.0.tgz", + "integrity": "sha512-e7NE0qz8W+atzv3Cy9qaQ7BTLwWsm084Z0c4nIO2l3Bp6u9WIgdqCgyPyV5oSPDMIW3b20H59OOCmVk3jw3Ptw==", "requires": { - "@typescript-eslint/types": "4.31.0", + "@typescript-eslint/types": "4.32.0", "eslint-visitor-keys": "^2.0.0" } }, + "@vercel/webpack-asset-relocator-loader": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@vercel/webpack-asset-relocator-loader/-/webpack-asset-relocator-loader-1.7.0.tgz", + "integrity": "sha512-1Dy3BdOliDwxA7VZSIg55E1d/us2KvsCQOZV25fgufG//CsnZBGiSAL7qewTQf7YVHH0A9PHgzwMmKIZ8aFYVw==" + }, "@webassemblyjs/ast": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", @@ -2599,9 +2605,9 @@ "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=" }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==" + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "3.2.1", @@ -2833,11 +2839,6 @@ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, - "at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==" - }, "atob": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", @@ -2849,16 +2850,23 @@ "integrity": "sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg==" }, "autoprefixer": { - "version": "10.3.4", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.3.4.tgz", - "integrity": "sha512-EKjKDXOq7ug+jagLzmnoTRpTT0q1KVzEJqrJd0hCBa7FiG0WbFOBCcJCy2QkW1OckpO3qgttA1aWjVbeIPAecw==", + "version": "10.3.6", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.3.6.tgz", + "integrity": "sha512-3bDjTfF0MfZntwVCSd18XAT2Zndufh3Mep+mafbzdIQEeWbncVRUVDjH8/EPANV9Hq40seJ24QcYAyhUsFz7gQ==", "requires": { - "browserslist": "^4.16.8", - "caniuse-lite": "^1.0.30001252", - "colorette": "^1.3.0", + "browserslist": "^4.17.1", + "caniuse-lite": "^1.0.30001260", "fraction.js": "^4.1.1", + "nanocolors": "^0.2.8", "normalize-range": "^0.1.2", "postcss-value-parser": "^4.1.0" + }, + "dependencies": { + "nanocolors": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/nanocolors/-/nanocolors-0.2.12.tgz", + "integrity": "sha512-SFNdALvzW+rVlzqexid6epYdt8H9Zol7xDoQarioEFcFN0JHo4CYNztAxmtfgGTVRCmFlEOqqhBpoFGKqSAMug==" + } } }, "axe-core": { @@ -2943,12 +2951,12 @@ } }, "babel-plugin-polyfill-corejs3": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.2.4.tgz", - "integrity": "sha512-z3HnJE5TY/j4EFEa/qpQMSbcUJZ5JQi+3UFjXzn6pQCmIKc5Ug5j98SuYyH+m4xQnvKlMDIW4plLfgyVnd0IcQ==", + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.2.5.tgz", + "integrity": "sha512-ninF5MQNwAX9Z7c9ED+H2pGt1mXdP4TqzlHKyPIYmJIYz0N+++uwdM7RnJukklhzJ54Q84vA4ZJkgs7lu5vqcw==", "requires": { "@babel/helper-define-polyfill-provider": "^0.2.2", - "core-js-compat": "^3.14.0" + "core-js-compat": "^3.16.2" } }, "babel-plugin-polyfill-regenerator": { @@ -2960,9 +2968,13 @@ } }, "babel-plugin-remove-graphql-queries": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/babel-plugin-remove-graphql-queries/-/babel-plugin-remove-graphql-queries-3.13.0.tgz", - "integrity": "sha512-6kznO5vqrcVAZLwwJwDev4QyjjRhW+G665NZgdUsjF/j9pikyZ5zp58c75wijkY7eXH2W1lhzd4GS3zxLQibCQ==" + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/babel-plugin-remove-graphql-queries/-/babel-plugin-remove-graphql-queries-3.14.0.tgz", + "integrity": "sha512-uRqbsHOcJ1kWn6IK6clZOGHBnQCddiz1LuoGIpv/hcGZCO1nCy16z9KMgEM8TdGG6L6cO31mNr1RcVmvGtcCEw==", + "requires": { + "@babel/runtime": "^7.15.4", + "gatsby-core-utils": "^2.14.0" + } }, "babel-plugin-transform-react-remove-prop-types": { "version": "0.4.24", @@ -2970,25 +2982,25 @@ "integrity": "sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==" }, "babel-preset-gatsby": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/babel-preset-gatsby/-/babel-preset-gatsby-1.13.0.tgz", - "integrity": "sha512-m7j5P06nl3QWSm1LQUXO0dWuiF85Cko02pqLuTXndOHlpBbRMLks72Q3MTzEZw2R6ePj/ib+dzdkarYQ86P4Uw==", + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/babel-preset-gatsby/-/babel-preset-gatsby-1.14.0.tgz", + "integrity": "sha512-weu2mSxvlzWUUaSfO67AS005W2+UncMgyTwkGWMoqeNe4MaZxWMtEimxBRVDPHvhW/VQIzeh3aL+gjZ2v9P4oQ==", "requires": { "@babel/plugin-proposal-class-properties": "^7.14.0", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.14.5", "@babel/plugin-proposal-optional-chaining": "^7.14.5", "@babel/plugin-syntax-dynamic-import": "^7.8.3", - "@babel/plugin-transform-classes": "^7.14.9", - "@babel/plugin-transform-runtime": "^7.14.5", + "@babel/plugin-transform-classes": "^7.15.4", + "@babel/plugin-transform-runtime": "^7.15.0", "@babel/plugin-transform-spread": "^7.14.6", - "@babel/preset-env": "^7.14.9", + "@babel/preset-env": "^7.15.4", "@babel/preset-react": "^7.14.0", - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "babel-plugin-dynamic-import-node": "^2.3.3", "babel-plugin-macros": "^2.8.0", "babel-plugin-transform-react-remove-prop-types": "^0.4.24", - "gatsby-core-utils": "^2.13.0", - "gatsby-legacy-polyfills": "^1.13.0" + "gatsby-core-utils": "^2.14.0", + "gatsby-legacy-polyfills": "^1.14.0" }, "dependencies": { "@babel/plugin-transform-spread": { @@ -3281,15 +3293,15 @@ } }, "browserslist": { - "version": "4.17.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.17.0.tgz", - "integrity": "sha512-g2BJ2a0nEYvEFQC208q8mVAhfNwpZ5Mu8BwgtCdZKO3qx98HChmeg448fPdUzld8aFmfLgVh7yymqV+q1lJZ5g==", + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.17.1.tgz", + "integrity": "sha512-aLD0ZMDSnF4lUt4ZDNgqi5BUn9BZ7YdQdI/cYlILrhdSSZJLU9aNZoD5/NBmM4SK34APB2e83MOsRt1EnkuyaQ==", "requires": { - "caniuse-lite": "^1.0.30001254", - "colorette": "^1.3.0", - "electron-to-chromium": "^1.3.830", + "caniuse-lite": "^1.0.30001259", + "electron-to-chromium": "^1.3.846", "escalade": "^3.1.1", - "node-releases": "^1.1.75" + "nanocolors": "^0.1.5", + "node-releases": "^1.1.76" } }, "buffer": { @@ -3409,30 +3421,6 @@ } } }, - "cacheable-lookup": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-2.0.1.tgz", - "integrity": "sha512-EMMbsiOTcdngM/K6gV/OxF2x0t07+vMOWxZNSCRQMjO2MY2nhZQ6OYhOOpyQrbhqsgtvKGI7hcq6xjnA92USjg==", - "requires": { - "@types/keyv": "^3.1.1", - "keyv": "^4.0.0" - }, - "dependencies": { - "json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==" - }, - "keyv": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.3.tgz", - "integrity": "sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA==", - "requires": { - "json-buffer": "3.0.1" - } - } - } - }, "cacheable-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", @@ -3514,9 +3502,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001255", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001255.tgz", - "integrity": "sha512-F+A3N9jTZL882f/fg/WWVnKSu6IOo3ueLz4zwaOPbPYHNmM/ZaDUyzyJwS1mZhX7Ex5jqTyW599Gdelh5PDYLQ==" + "version": "1.0.30001261", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001261.tgz", + "integrity": "sha512-vM8D9Uvp7bHIN0fZ2KQ4wnmYFpJo/Etb4Vwsuc+ka0tfGDHvOPrFm6S/7CCNLSOkAUjenT2HnUPESdOIL91FaA==" }, "ccount": { "version": "1.1.0", @@ -3868,6 +3856,30 @@ "object-visit": "^1.0.0" } }, + "color": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/color/-/color-4.0.1.tgz", + "integrity": "sha512-rpZjOKN5O7naJxkH2Rx1sZzzBgaiWECc6BYXjeCE6kF0kcASJYbUq02u7JqIHwCb/j3NhV+QhRL2683aICeGZA==", + "requires": { + "color-convert": "^2.0.1", + "color-string": "^1.6.0" + }, + "dependencies": { + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + } + } + }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -3891,14 +3903,14 @@ } }, "colord": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/colord/-/colord-2.7.0.tgz", - "integrity": "sha512-pZJBqsHz+pYyw3zpX6ZRXWoCHM1/cvFikY9TV8G3zcejCaKE0lhankoj8iScyrrePA8C7yJ5FStfA9zbcOnw7Q==" + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.8.0.tgz", + "integrity": "sha512-kNkVV4KFta3TYQv0bzs4xNwLaeag261pxgzGQSh4cQ1rEhYjcTJfFRP0SDlbhLONg0eSoLzrDd79PosjbltufA==" }, "colorette": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.3.0.tgz", - "integrity": "sha512-ecORCqbSFP7Wm8Y6lyqMJjexBQqXSF7SSeaTyGGphogUjBlFP9m9o08wy86HL2uB7fMTxtOUzLMk7ogKcxMg1w==" + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", + "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==" }, "combined-stream": { "version": "1.0.8", @@ -4078,13 +4090,13 @@ "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" }, "contentful-management": { - "version": "7.36.1", - "resolved": "https://registry.npmjs.org/contentful-management/-/contentful-management-7.36.1.tgz", - "integrity": "sha512-RmK1CKslvyQ9gQEMUmH2oT9EtX+AtY1nh7kqEmvA18Ye70qZtRgtYDhOoC8XnxbwZ+tEQ6vJEdLhWavpJ/FThQ==", + "version": "7.41.0", + "resolved": "https://registry.npmjs.org/contentful-management/-/contentful-management-7.41.0.tgz", + "integrity": "sha512-JBpSdnG2F5rxjm/gmt1f9yy+HHoqKf+20kPMsSuuzsZ3czddoBjprIRzk8RFn2hInIBrcd3UCT/pFhnNg9TPTw==", "requires": { "@types/json-patch": "0.0.30", - "axios": "^0.21.0", - "contentful-sdk-core": "^6.8.0", + "axios": "^0.21.4", + "contentful-sdk-core": "^6.9.0", "fast-copy": "^2.1.0", "lodash.isplainobject": "^4.0.6", "type-fest": "^0.21.3" @@ -4098,11 +4110,12 @@ } }, "contentful-sdk-core": { - "version": "6.8.5", - "resolved": "https://registry.npmjs.org/contentful-sdk-core/-/contentful-sdk-core-6.8.5.tgz", - "integrity": "sha512-Efmv/Jf0zeTdRNqCW6y+iMsNbDa/+KpxYOaYYz0z1qVd4q88qtZDJrvLdjPHtYvrcrvkhYtucVRFr9oe2b+cAA==", + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/contentful-sdk-core/-/contentful-sdk-core-6.9.0.tgz", + "integrity": "sha512-fLwE0avEf81iDdJGVFUB5nC8AzI1OPg+YY33V8aFBgHkKMXpHI6zNInWnQGUekXCl2OAGKk5QkVfEAjvpkFGig==", "requires": { "fast-copy": "^2.1.0", + "lodash": "^4.17.21", "qs": "^6.9.4" }, "dependencies": { @@ -4185,11 +4198,11 @@ "integrity": "sha512-lyvajs+wd8N1hXfzob1LdOCCHFU4bGMbqqmLn1Q4QlCpDqWPpGf+p0nj+LNrvDDG33j0hZXw2nsvvVpHysxyNw==" }, "core-js-compat": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.17.2.tgz", - "integrity": "sha512-lHnt7A1Oqplebl5i0MrQyFv/yyEzr9p29OjlkcsFRDDgHwwQyVckfRGJ790qzXhkwM8ba4SFHHa2sO+T5f1zGg==", + "version": "3.18.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.18.1.tgz", + "integrity": "sha512-XJMYx58zo4W0kLPmIingVZA10+7TuKrMLPt83+EzDmxFJQUMcTVVmQ+n5JP4r6Z14qSzhQBRi3NSWoeVyKKXUg==", "requires": { - "browserslist": "^4.16.8", + "browserslist": "^4.17.1", "semver": "7.0.0" }, "dependencies": { @@ -4201,9 +4214,9 @@ } }, "core-js-pure": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.17.2.tgz", - "integrity": "sha512-2VV7DlIbooyTI7Bh+yzOOWL9tGwLnQKHno7qATE+fqZzDKYr6llVjVQOzpD/QLZFgXDPb8T71pJokHEZHEYJhQ==" + "version": "3.18.1", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.18.1.tgz", + "integrity": "sha512-kmW/k8MaSuqpvA1xm2l3TVlBuvW+XBkcaOroFUpO3D4lsTGQWBTb/tBDCf/PNkkPLrwgrkQRIYNPB0CeqGJWGQ==" }, "core-util-is": { "version": "1.0.3", @@ -4240,9 +4253,12 @@ } }, "create-gatsby": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/create-gatsby/-/create-gatsby-1.13.0.tgz", - "integrity": "sha512-ypJeb+nj5uZybFeic+ab5myxGh21oZQ+OeCRkKHPL9NPZbzcvQE/y5lWXgVXHqy2/xf2IBnotkImrmiQiqPOxg==" + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/create-gatsby/-/create-gatsby-1.14.0.tgz", + "integrity": "sha512-ba081Li7A7T7cHmcoE4oL+MO12k4ck5MWENPcuF9U8fTbOfICf+r3S0Mr+35YKbxr0w25RzhN5VcOS3+rokgbA==", + "requires": { + "@babel/runtime": "^7.15.4" + } }, "create-require": { "version": "1.1.1", @@ -4533,9 +4549,9 @@ "integrity": "sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ==" }, "date-fns": { - "version": "2.23.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.23.0.tgz", - "integrity": "sha512-5ycpauovVyAk0kXNZz6ZoB9AYMZB4DObse7P3BPWmyEjXNORTI8EJ6X0uaSAq4sCHzM1uajzrkr6HnsLQpxGXA==" + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.24.0.tgz", + "integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw==" }, "debug": { "version": "3.2.7", @@ -4927,9 +4943,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "electron-to-chromium": { - "version": "1.3.830", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.830.tgz", - "integrity": "sha512-gBN7wNAxV5vl1430dG+XRcQhD4pIeYeak6p6rjdCtlz5wWNwDad8jwvphe5oi1chL5MV6RNRikfffBBiFuj+rQ==" + "version": "1.3.853", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.853.tgz", + "integrity": "sha512-W4U8n+U8I5/SUaFcqZgbKRmYZwcyEIQVBDf+j5QQK6xChjXnQD+wj248eGR9X4u+dDmDR//8vIfbu4PrdBBIoQ==" }, "emoji-regex": { "version": "8.0.0", @@ -5299,11 +5315,11 @@ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } }, "type-fest": { @@ -5401,9 +5417,9 @@ } }, "eslint-plugin-flowtype": { - "version": "5.9.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-5.9.2.tgz", - "integrity": "sha512-qxE/eo9DCN7800MIB/O1ToOiFuOPOlaMJWQY2BEm69oY7RCm3s2X1z4CdgtFvDDWf9RSSugZm1KRhdBMBueKbg==", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-5.10.0.tgz", + "integrity": "sha512-vcz32f+7TP+kvTUyMXZmCnNujBQZDNmcqPImw8b9PZ+16w1Qdm6ryRuYZYVaG9xRqqmAPr2Cs9FAX5gN+x/bjw==", "requires": { "lodash": "^4.17.15", "string-natural-compare": "^3.0.1" @@ -5534,22 +5550,23 @@ } }, "eslint-plugin-react": { - "version": "7.25.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.25.1.tgz", - "integrity": "sha512-P4j9K1dHoFXxDNP05AtixcJEvIT6ht8FhYKsrkY0MPCPaUMYijhpWwNiRDZVtA8KFuZOkGSeft6QwH8KuVpJug==", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.26.0.tgz", + "integrity": "sha512-dceliS5itjk4EZdQYtLMz6GulcsasguIs+VTXuiC7Q5IPIdGTkyfXVdmsQOqEhlD9MciofH4cMcT1bw1WWNxCQ==", "requires": { "array-includes": "^3.1.3", "array.prototype.flatmap": "^1.2.4", "doctrine": "^2.1.0", "estraverse": "^5.2.0", - "has": "^1.0.3", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.0.4", "object.entries": "^1.1.4", "object.fromentries": "^2.0.4", + "object.hasown": "^1.0.0", "object.values": "^1.1.4", "prop-types": "^15.7.2", "resolve": "^2.0.0-next.3", + "semver": "^6.3.0", "string.prototype.matchall": "^4.0.5" }, "dependencies": { @@ -5574,6 +5591,11 @@ "is-core-module": "^2.2.0", "path-parse": "^1.0.6" } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, @@ -5966,9 +5988,9 @@ } }, "ext": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.5.0.tgz", - "integrity": "sha512-+ONcYoWj/SoQwUofMr94aGu05Ou4FepKi7N7b+O8T4jVfyIsZQV1/xeS8jpaBzF0csAk0KLXoHCxU7cKYZjo1Q==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz", + "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==", "requires": { "type": "^2.5.0" }, @@ -6488,39 +6510,6 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" }, - "from2": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", - "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", - "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, "fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -6532,13 +6521,13 @@ "integrity": "sha1-zyVVTKBQ3EmuZla0HeQiWJidy84=" }, "fs-extra": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", - "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", "requires": { "graceful-fs": "^4.2.0", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" } }, "fs-monkey": { @@ -6568,17 +6557,18 @@ "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=" }, "gatsby": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby/-/gatsby-3.13.0.tgz", - "integrity": "sha512-nZOQkV6CF8ixtkbr+VNeiD2ISwuSkRLafeK+x/1btPB/l+b/w8ar0XrJGIWNX4DHr2Pohf3wy166IPfEkNqNTA==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby/-/gatsby-3.14.0.tgz", + "integrity": "sha512-ikXoVZ9LQO9lZBxVvQxrYkjhX1zz9/7/Iv/6WCt/UPdgYCWCKc6GWOqlbLKmjShM0fBxJHfjmiahNxuTzJIZsw==", "requires": { "@babel/code-frame": "^7.14.0", - "@babel/core": "^7.14.8", - "@babel/eslint-parser": "^7.14.9", - "@babel/parser": "^7.14.9", - "@babel/runtime": "^7.14.8", - "@babel/traverse": "^7.14.9", - "@babel/types": "^7.14.9", + "@babel/core": "^7.15.5", + "@babel/eslint-parser": "^7.15.4", + "@babel/helper-plugin-utils": "^7.14.5", + "@babel/parser": "^7.15.5", + "@babel/runtime": "^7.15.4", + "@babel/traverse": "^7.15.4", + "@babel/types": "^7.15.4", "@gatsbyjs/reach-router": "^1.3.6", "@gatsbyjs/webpack-hot-middleware": "^2.25.2", "@nodelib/fs.walk": "^1.2.4", @@ -6586,6 +6576,7 @@ "@types/http-proxy": "^1.17.4", "@typescript-eslint/eslint-plugin": "^4.29.3", "@typescript-eslint/parser": "^4.29.3", + "@vercel/webpack-asset-relocator-loader": "^1.6.0", "address": "1.1.2", "anser": "^2.0.1", "autoprefixer": "^10.2.4", @@ -6594,36 +6585,37 @@ "babel-plugin-add-module-exports": "^1.0.4", "babel-plugin-dynamic-import-node": "^2.3.3", "babel-plugin-lodash": "^3.3.4", - "babel-plugin-remove-graphql-queries": "^3.13.0", - "babel-preset-gatsby": "^1.13.0", + "babel-plugin-remove-graphql-queries": "^3.14.0", + "babel-preset-gatsby": "^1.14.0", "better-opn": "^2.0.0", "bluebird": "^3.7.2", "body-parser": "^1.19.0", "browserslist": "^4.12.2", "cache-manager": "^2.11.1", "chalk": "^4.1.2", - "chokidar": "^3.4.2", + "chokidar": "^3.5.2", "common-tags": "^1.8.0", "compression": "^1.7.4", "cookie": "^0.4.1", - "core-js": "^3.9.0", + "core-js": "^3.17.2", "cors": "^2.8.5", "css-loader": "^5.0.1", "css-minimizer-webpack-plugin": "^2.0.0", "css.escape": "^1.5.1", "date-fns": "^2.14.0", "debug": "^3.2.7", + "deepmerge": "^4.2.2", "del": "^5.1.0", "detect-port": "^1.3.0", "devcert": "^1.1.3", "dotenv": "^8.2.0", "eslint": "^7.32.0", "eslint-config-react-app": "^6.0.0", - "eslint-plugin-flowtype": "^5.8.2", + "eslint-plugin-flowtype": "^5.9.2", "eslint-plugin-graphql": "^4.0.0", - "eslint-plugin-import": "^2.23.4", + "eslint-plugin-import": "^2.24.2", "eslint-plugin-jsx-a11y": "^6.4.1", - "eslint-plugin-react": "^7.24.0", + "eslint-plugin-react": "^7.25.1", "eslint-plugin-react-hooks": "^4.2.0", "eslint-webpack-plugin": "^2.5.4", "event-source-polyfill": "^1.0.15", @@ -6635,20 +6627,20 @@ "file-loader": "^6.2.0", "find-cache-dir": "^3.3.1", "fs-exists-cached": "1.0.0", - "fs-extra": "^8.1.0", - "gatsby-cli": "^3.13.0", - "gatsby-core-utils": "^2.13.0", - "gatsby-graphiql-explorer": "^1.13.0", - "gatsby-legacy-polyfills": "^1.13.0", - "gatsby-link": "^3.13.0", - "gatsby-plugin-page-creator": "^3.13.0", - "gatsby-plugin-typescript": "^3.13.0", - "gatsby-plugin-utils": "^1.13.0", - "gatsby-react-router-scroll": "^4.13.0", - "gatsby-telemetry": "^2.13.0", - "gatsby-worker": "^0.4.0", + "fs-extra": "^10.0.0", + "gatsby-cli": "^3.14.0", + "gatsby-core-utils": "^2.14.0", + "gatsby-graphiql-explorer": "^1.14.0", + "gatsby-legacy-polyfills": "^1.14.0", + "gatsby-link": "^3.14.0", + "gatsby-plugin-page-creator": "^3.14.0", + "gatsby-plugin-typescript": "^3.14.0", + "gatsby-plugin-utils": "^1.14.0", + "gatsby-react-router-scroll": "^4.14.0", + "gatsby-telemetry": "^2.14.0", + "gatsby-worker": "^0.5.0", "glob": "^7.1.6", - "got": "8.3.2", + "got": "^11.8.2", "graphql": "^15.4.0", "graphql-compose": "~7.25.0", "graphql-playground-middleware-express": "^1.7.18", @@ -6690,7 +6682,7 @@ "redux": "^4.0.5", "redux-thunk": "^2.3.0", "resolve-from": "^5.0.0", - "semver": "^7.3.2", + "semver": "^7.3.5", "shallow-compare": "^1.2.2", "signal-exit": "^3.0.3", "slugify": "^1.4.4", @@ -6717,14 +6709,41 @@ "webpack-virtual-modules": "^0.3.2", "xstate": "^4.11.0", "yaml-loader": "^0.6.0" + }, + "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" + } } }, "gatsby-cli": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-cli/-/gatsby-cli-3.13.0.tgz", - "integrity": "sha512-QTJZUY4wPwXLuK4aP3GCqBpklruV2hv/jtf65ED5zfeF2YnZlFvrJXt40n9o1ptc5XYe/FF6yFBxu1Lwbt9qtg==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-cli/-/gatsby-cli-3.14.0.tgz", + "integrity": "sha512-1Az1EEQu0txRE8eZmOo9GOxRSjhJtFseinraiIEtSeHkYuM0/gjuoKVSrtmbBFNWdOZll6QYCO3sRl6sOrwb+g==", "requires": { "@babel/code-frame": "^7.14.0", + "@babel/runtime": "^7.15.4", "@types/common-tags": "^1.8.0", "better-opn": "^2.0.0", "chalk": "^4.1.2", @@ -6732,14 +6751,14 @@ "common-tags": "^1.8.0", "configstore": "^5.0.1", "convert-hrtime": "^3.0.0", - "create-gatsby": "^1.13.0", + "create-gatsby": "^1.14.0", "envinfo": "^7.7.3", "execa": "^5.1.1", "fs-exists-cached": "^1.0.0", - "fs-extra": "^8.1.0", - "gatsby-core-utils": "^2.13.0", - "gatsby-recipes": "^0.24.0", - "gatsby-telemetry": "^2.13.0", + "fs-extra": "^10.0.0", + "gatsby-core-utils": "^2.14.0", + "gatsby-recipes": "^0.25.0", + "gatsby-telemetry": "^2.14.0", "hosted-git-info": "^3.0.6", "is-valid-path": "^0.1.1", "joi": "^17.4.0", @@ -6752,7 +6771,7 @@ "prompts": "^2.3.2", "redux": "^4.0.5", "resolve-cwd": "^3.0.0", - "semver": "^7.3.2", + "semver": "^7.3.5", "signal-exit": "^3.0.3", "source-map": "0.7.3", "stack-trace": "^0.0.10", @@ -6764,6 +6783,16 @@ "yurnalist": "^2.1.0" }, "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, "hosted-git-info": { "version": "3.0.8", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz", @@ -6771,37 +6800,80 @@ "requires": { "lru-cache": "^6.0.0" } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" } } }, "gatsby-core-utils": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/gatsby-core-utils/-/gatsby-core-utils-2.13.0.tgz", - "integrity": "sha512-fkMAxiWFY8N26Iui/Wq8yfE2FY2b31bGJVtlIlSwLgfsoO7fpta64lxeivRtfpNLbAoywmWY/L8TC74GFlnuWg==", + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/gatsby-core-utils/-/gatsby-core-utils-2.14.0.tgz", + "integrity": "sha512-HDMb1XMqysup9raLYWB0wIQU568R9qPounF7iAwjf2esFUVV5mdBTvxEpune/7yG0RmwhNPhgrEZo2rBHeJf7A==", "requires": { + "@babel/runtime": "^7.15.4", "ci-info": "2.0.0", "configstore": "^5.0.1", "file-type": "^16.5.3", - "fs-extra": "^8.1.0", - "node-object-hash": "^2.3.8", + "fs-extra": "^10.0.0", + "got": "^11.8.2", + "node-object-hash": "^2.3.9", "proper-lockfile": "^4.1.2", "tmp": "^0.2.1", "xdg-basedir": "^4.0.0" + }, + "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" + } } }, "gatsby-graphiql-explorer": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/gatsby-graphiql-explorer/-/gatsby-graphiql-explorer-1.13.0.tgz", - "integrity": "sha512-0vsbFEMFZlYwaL7jLHf5t3DYNet2vEXXSdg3JGtr26mnefK0+PeZemIvCp8EBVun+DUZK6mpYw5hbWFjfCNbTQ==", + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/gatsby-graphiql-explorer/-/gatsby-graphiql-explorer-1.14.0.tgz", + "integrity": "sha512-OdwNGWDzrzmLHx8n02yrnuQo2ePsEsnrZHI/EZvb6I14fnSBizeW7rV35/5ppxdqV/1nsfNSMpzmFK+5ySVSEA==", "requires": { - "@babel/runtime": "^7.15.3" + "@babel/runtime": "^7.15.4" } }, "gatsby-legacy-polyfills": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/gatsby-legacy-polyfills/-/gatsby-legacy-polyfills-1.13.0.tgz", - "integrity": "sha512-DjSW8KgQ+DMzIc47XrOJvIaTe9xpwIV7NYjVNo7Sh3gP78Dij7i/snwhJyGdHY+dg+GeoIw/RUiwCVJIoR2Kfg==", + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/gatsby-legacy-polyfills/-/gatsby-legacy-polyfills-1.14.0.tgz", + "integrity": "sha512-IGto7YurB4cEm6r07Lr/hSPZZvrkT1/0YdGpZQp7rC6CdSLqyWO9X5CS9F111NJyJhLusHCr9ZuRJG5cA0SYxQ==", "requires": { + "@babel/runtime": "^7.15.4", "core-js-compat": "3.9.0" }, "dependencies": { @@ -6822,141 +6894,84 @@ } }, "gatsby-link": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-link/-/gatsby-link-3.13.0.tgz", - "integrity": "sha512-18KOZEOdsD3wgCozZeT1EWuXVv3ESwHefCkTFKyeL4hjTM9MaLSt64e4tD4EpfDwOaXfDeRM0qY6ABcM3pM8wA==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-link/-/gatsby-link-3.14.0.tgz", + "integrity": "sha512-a7ZC6aQZ+dz6lhkW0nrg33zlFQq9DADvtl/wwk3W3GdTlseDNOC+iry11tLMEthisUQZ2H3SZGJyVeNuQkdFsw==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "@types/reach__router": "^1.3.9", "prop-types": "^15.7.2" } }, "gatsby-page-utils": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/gatsby-page-utils/-/gatsby-page-utils-1.13.0.tgz", - "integrity": "sha512-IKWFP/JA7MrFjOeAVnHxg0bxxnCUIaT+Ip3E4xmBqUC3dDh8CjWJ0/5ice4Gad4njBNOvFrEfdGZSOPM0yVeLw==", + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/gatsby-page-utils/-/gatsby-page-utils-1.14.0.tgz", + "integrity": "sha512-Hjyxq4XnbUYCaYc5Ta7xXML1S3qLNkTv3xYQn2W91LuVDY4/u27LaOgzIYOVPMlHUSfocfhu0CMFmXw4fOjGFg==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "bluebird": "^3.7.2", - "chokidar": "^3.5.1", + "chokidar": "^3.5.2", "fs-exists-cached": "^1.0.0", - "gatsby-core-utils": "^2.13.0", + "gatsby-core-utils": "^2.14.0", "glob": "^7.1.7", "lodash": "^4.17.21", "micromatch": "^4.0.4" } }, "gatsby-plugin-catch-links": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-catch-links/-/gatsby-plugin-catch-links-3.13.0.tgz", - "integrity": "sha512-IdxpkO6yLUHxFao4lSSDIbvD7UXYFxCBbVvl/PSOUcahMGWuVLudXgP5vrJfq95pbM3M7KjaX8NxuRyaz51OHg==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-catch-links/-/gatsby-plugin-catch-links-3.14.0.tgz", + "integrity": "sha512-F1cydnQWrD6ObDJdBqyPmobEpS3rWYvD9rjLszIkCCcO/9orqOgpffbVJf0xdMnzStxqeyUPGze06Ld8YCtFIw==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "escape-string-regexp": "^1.0.5" } }, "gatsby-plugin-google-tagmanager": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-google-tagmanager/-/gatsby-plugin-google-tagmanager-3.13.0.tgz", - "integrity": "sha512-TItPS6Mv+1XN0yyGN112Fy0Tq0YGaNYW20B9nu7j44nbmpEGeHHTzGtq4En/98lhVa3oW8lRaLHH0n4xWAg+bg==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-google-tagmanager/-/gatsby-plugin-google-tagmanager-3.14.0.tgz", + "integrity": "sha512-rASL5Y9HRXMxcNg3oJRbbfqJLO8CyrYPjQRleiAsE7iBK51uiPGMUVcj6HNqHtniqM9q7YkzX6E42fGUA8GDLQ==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "web-vitals": "^1.1.2" } }, "gatsby-plugin-manifest": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-manifest/-/gatsby-plugin-manifest-3.13.0.tgz", - "integrity": "sha512-izBaVUUnOId7IlX7nDjgnFqxx2insGbm4r+CwlmyjF5slETdvxifERPX8ng+WAscbh3qvJ/vlgLlbbjjfvJe9w==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-manifest/-/gatsby-plugin-manifest-3.14.0.tgz", + "integrity": "sha512-l++KGG/3/8iwUExJ8oXUHF5ra7P//xQkkhoDybUu3N7+9jpp9S2j4NWqJvgpMhRbh09zcUfuw7usII0sJO24lA==", "requires": { - "@babel/runtime": "^7.14.8", - "gatsby-core-utils": "^2.13.0", - "gatsby-plugin-utils": "^1.13.0", + "@babel/runtime": "^7.15.4", + "gatsby-core-utils": "^2.14.0", + "gatsby-plugin-utils": "^1.14.0", "semver": "^7.3.5", "sharp": "^0.29.0" - }, - "dependencies": { - "color": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/color/-/color-4.0.1.tgz", - "integrity": "sha512-rpZjOKN5O7naJxkH2Rx1sZzzBgaiWECc6BYXjeCE6kF0kcASJYbUq02u7JqIHwCb/j3NhV+QhRL2683aICeGZA==", - "requires": { - "color-convert": "^2.0.1", - "color-string": "^1.6.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "prebuild-install": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.4.tgz", - "integrity": "sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==", - "requires": { - "detect-libc": "^1.0.3", - "expand-template": "^2.0.3", - "github-from-package": "0.0.0", - "minimist": "^1.2.3", - "mkdirp-classic": "^0.5.3", - "napi-build-utils": "^1.0.1", - "node-abi": "^2.21.0", - "npmlog": "^4.0.1", - "pump": "^3.0.0", - "rc": "^1.2.7", - "simple-get": "^3.0.3", - "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0" - } - }, - "sharp": { - "version": "0.29.0", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.29.0.tgz", - "integrity": "sha512-mdN1Up0eN+SwyForPls59dWO0nx64J1XRQYy5ZiKSADAccGYCB10UAGJHSVG9VObzJdhHqrVJzQcq6gx8USyoA==", - "requires": { - "color": "^4.0.1", - "detect-libc": "^1.0.3", - "node-addon-api": "^4.0.0", - "prebuild-install": "^6.1.4", - "semver": "^7.3.5", - "simple-get": "^3.1.0", - "tar-fs": "^2.1.1", - "tunnel-agent": "^0.6.0" - } - } } }, "gatsby-plugin-page-creator": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-page-creator/-/gatsby-plugin-page-creator-3.13.0.tgz", - "integrity": "sha512-4uoMCMw7+zLuEIs0jIIaatjjPnWIOWOmKEaxOhryhQiV+nxci1HSuLqs9/msxvCtFob1QQJQH61bxidLXBWSXw==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-page-creator/-/gatsby-plugin-page-creator-3.14.0.tgz", + "integrity": "sha512-Y7Ims8CkdDpDYrr/42aFM4wTdpBTxIYe7VakdV8m0fJGb1OdD1W/7Wc9yOj+yBTqMgeeXXp45pg26wsjiG5H9w==", "requires": { - "@babel/traverse": "^7.14.9", + "@babel/runtime": "^7.15.4", + "@babel/traverse": "^7.15.4", "@sindresorhus/slugify": "^1.1.2", - "chokidar": "^3.5.1", + "chokidar": "^3.5.2", "fs-exists-cached": "^1.0.0", - "gatsby-core-utils": "^2.13.0", - "gatsby-page-utils": "^1.13.0", - "gatsby-telemetry": "^2.13.0", + "gatsby-core-utils": "^2.14.0", + "gatsby-page-utils": "^1.14.0", + "gatsby-plugin-utils": "^1.14.0", + "gatsby-telemetry": "^2.14.0", "globby": "^11.0.4", "lodash": "^4.17.21" } }, "gatsby-plugin-react-helmet": { - "version": "4.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-react-helmet/-/gatsby-plugin-react-helmet-4.13.0.tgz", - "integrity": "sha512-GwIqbhFtPDryftM6NEWdjgWNDpWhyifgshDxtIh8PMSEIs7VMg2tgLQf8W8EpSP7yXu3aKDm/pxFXRlPZPk9cA==", + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-react-helmet/-/gatsby-plugin-react-helmet-4.14.0.tgz", + "integrity": "sha512-IpLC0mWRNP+E0ezDBXHciVATW+mv2MCvCP3lEYtFQ8mfcm3K//MpeynouNQSPCXn9cH7fr5w0Y355Wl5w1kw1A==", "requires": { - "@babel/runtime": "^7.14.8" + "@babel/runtime": "^7.15.4" } }, "gatsby-plugin-react-svg": { @@ -6968,62 +6983,35 @@ } }, "gatsby-plugin-sharp": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-sharp/-/gatsby-plugin-sharp-3.13.0.tgz", - "integrity": "sha512-3ifLeUrTUqyaNh4QY6CvKVeggL6GYlcxCjx/JO0R+We1iT6ogKSssQno0cltHnmwsfki+DH2lyV9l7uCNvBVIQ==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-sharp/-/gatsby-plugin-sharp-3.14.0.tgz", + "integrity": "sha512-i4jrmOElr0mB2tWlpfxzX72zimiheGny0GF+jPPYY77Uoie43AH7GGZm6wyhdtqKlphYDhgatOMU7xeiWQar6g==", "requires": { - "@babel/runtime": "^7.14.8", - "async": "^3.2.0", + "@babel/runtime": "^7.15.4", + "async": "^3.2.1", "bluebird": "^3.7.2", "filenamify": "^4.3.0", - "fs-extra": "^9.1.0", - "gatsby-core-utils": "^2.13.0", - "gatsby-plugin-utils": "^1.13.0", - "gatsby-telemetry": "^2.13.0", - "got": "^10.7.0", + "fs-extra": "^10.0.0", + "gatsby-core-utils": "^2.14.0", + "gatsby-plugin-utils": "^1.14.0", + "gatsby-telemetry": "^2.14.0", + "got": "^11.8.2", "lodash": "^4.17.21", "mini-svg-data-uri": "^1.3.3", "potrace": "^2.1.8", "probe-image-size": "^6.0.0", "progress": "^2.0.3", - "semver": "^7.3.4", + "semver": "^7.3.5", "sharp": "^0.29.0", "svgo": "1.3.2", "uuid": "3.4.0" }, "dependencies": { - "@sindresorhus/is": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-2.1.1.tgz", - "integrity": "sha512-/aPsuoj/1Dw/kzhkgz+ES6TxG0zfTMGLwuK2ZG00k/iJzYHTLCE8mVU8EPqEOp/lmxPoq1C1C9RYToRKb2KEfg==" - }, - "@szmarczak/http-timer": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", - "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", - "requires": { - "defer-to-connect": "^2.0.0" - } - }, "async": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/async/-/async-3.2.1.tgz", "integrity": "sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==" }, - "cacheable-request": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz", - "integrity": "sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==", - "requires": { - "clone-response": "^1.0.2", - "get-stream": "^5.1.0", - "http-cache-semantics": "^4.0.0", - "keyv": "^4.0.0", - "lowercase-keys": "^2.0.0", - "normalize-url": "^6.0.1", - "responselike": "^2.0.0" - } - }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -7034,28 +7022,6 @@ "supports-color": "^5.3.0" } }, - "color": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/color/-/color-4.0.1.tgz", - "integrity": "sha512-rpZjOKN5O7naJxkH2Rx1sZzzBgaiWECc6BYXjeCE6kF0kcASJYbUq02u7JqIHwCb/j3NhV+QhRL2683aICeGZA==", - "requires": { - "color-convert": "^2.0.1", - "color-string": "^1.6.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, "css-select": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", @@ -7081,19 +7047,6 @@ "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.4.2.tgz", "integrity": "sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==" }, - "decompress-response": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-5.0.0.tgz", - "integrity": "sha512-TLZWWybuxWgoW7Lykv+gq9xvzOsUjQ9tF09Tj6NSTYGMTCHNXzrPnD6Hi+TgZq19PyTAGH4Ll/NIM/eTGglnMw==", - "requires": { - "mimic-response": "^2.0.0" - } - }, - "defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==" - }, "dom-serializer": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz", @@ -7125,51 +7078,15 @@ } }, "fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", "requires": { - "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, - "get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "requires": { - "pump": "^3.0.0" - } - }, - "got": { - "version": "10.7.0", - "resolved": "https://registry.npmjs.org/got/-/got-10.7.0.tgz", - "integrity": "sha512-aWTDeNw9g+XqEZNcTjMMZSy7B7yE9toWOFYip7ofFTLleJhvZwUxxTxkTpKvF+p1SAA4VHmuEy7PiHTHyq8tJg==", - "requires": { - "@sindresorhus/is": "^2.0.0", - "@szmarczak/http-timer": "^4.0.0", - "@types/cacheable-request": "^6.0.1", - "cacheable-lookup": "^2.0.0", - "cacheable-request": "^7.0.1", - "decompress-response": "^5.0.0", - "duplexer3": "^0.1.4", - "get-stream": "^5.0.0", - "lowercase-keys": "^2.0.0", - "mimic-response": "^2.1.0", - "p-cancelable": "^2.0.0", - "p-event": "^4.0.0", - "responselike": "^2.0.0", - "to-readable-stream": "^2.0.0", - "type-fest": "^0.10.0" - } - }, - "json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==" - }, "jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -7179,29 +7096,11 @@ "universalify": "^2.0.0" } }, - "keyv": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.3.tgz", - "integrity": "sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA==", - "requires": { - "json-buffer": "3.0.1" - } - }, - "lowercase-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", - "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" - }, "mdn-data": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==" }, - "mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" - }, "nth-check": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", @@ -7210,54 +7109,6 @@ "boolbase": "~1.0.0" } }, - "p-cancelable": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", - "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==" - }, - "prebuild-install": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.4.tgz", - "integrity": "sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==", - "requires": { - "detect-libc": "^1.0.3", - "expand-template": "^2.0.3", - "github-from-package": "0.0.0", - "minimist": "^1.2.3", - "mkdirp-classic": "^0.5.3", - "napi-build-utils": "^1.0.1", - "node-abi": "^2.21.0", - "npmlog": "^4.0.1", - "pump": "^3.0.0", - "rc": "^1.2.7", - "simple-get": "^3.0.3", - "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0" - } - }, - "responselike": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz", - "integrity": "sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==", - "requires": { - "lowercase-keys": "^2.0.0" - } - }, - "sharp": { - "version": "0.29.0", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.29.0.tgz", - "integrity": "sha512-mdN1Up0eN+SwyForPls59dWO0nx64J1XRQYy5ZiKSADAccGYCB10UAGJHSVG9VObzJdhHqrVJzQcq6gx8USyoA==", - "requires": { - "color": "^4.0.1", - "detect-libc": "^1.0.3", - "node-addon-api": "^4.0.0", - "prebuild-install": "^6.1.4", - "semver": "^7.3.5", - "simple-get": "^3.1.0", - "tar-fs": "^2.1.1", - "tunnel-agent": "^0.6.0" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -7283,16 +7134,6 @@ "util.promisify": "~1.0.0" } }, - "to-readable-stream": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-2.1.0.tgz", - "integrity": "sha512-o3Qa6DGg1CEXshSdvWNX2sN4QHqg03SPq7U6jPXRahlQdl5dK8oXjkU/2/sGrnOZKeGV1zLSO8qPwyKklPPE7w==" - }, - "type-fest": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.10.0.tgz", - "integrity": "sha512-EUV9jo4sffrwlg8s0zDhP0T2WD3pru5Xi0+HTE3zTUmBaZNhfkite9PdSJwdXLwPVW0jnAHT56pZHIOYckPEiw==" - }, "universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", @@ -7301,73 +7142,75 @@ } }, "gatsby-plugin-sitemap": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-sitemap/-/gatsby-plugin-sitemap-4.9.0.tgz", - "integrity": "sha512-frbbsp8woF7WXY+Np5b3qLHZ2tK/uVF9kOVkTPZS3i4tfT0fRFoOamTqyWxFZbFwFTuZsJBUc7Rp0y41TtoPLw==", + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-sitemap/-/gatsby-plugin-sitemap-4.10.0.tgz", + "integrity": "sha512-q7WdaZLzOQnSJDZ2/ArTnSpCG26Eqgpt9jvni6wUqPxLic9irwvzIHhZxmZp8I7iq6Ue1Ii1MD5kWO2VTYb7GA==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "common-tags": "^1.8.0", "minimatch": "^3.0.4", "sitemap": "^7.0.0" } }, "gatsby-plugin-typescript": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-typescript/-/gatsby-plugin-typescript-3.13.0.tgz", - "integrity": "sha512-qYE5ZdpV8XdbEXRjkpD7aiDwDRVnhdYzBSrTDUmjfTrwUK+QmZ7BxDQjic+hA5fjTMDMgLR+dYmvwotkqp8hxA==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-typescript/-/gatsby-plugin-typescript-3.14.0.tgz", + "integrity": "sha512-gQVkLFPvO9g+O+DdY9nw+1SAelF2yOQ+CqpFJ9aDllf/JUyxNbajND7nbYkLCiDja86yi3ZNCkZR2yp8qWZnpQ==", "requires": { - "@babel/core": "^7.14.8", + "@babel/core": "^7.15.5", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.14.5", "@babel/plugin-proposal-numeric-separator": "^7.14.5", "@babel/plugin-proposal-optional-chaining": "^7.14.5", - "@babel/preset-typescript": "^7.14.0", - "@babel/runtime": "^7.14.8", - "babel-plugin-remove-graphql-queries": "^3.13.0" + "@babel/preset-typescript": "^7.15.0", + "@babel/runtime": "^7.15.4", + "babel-plugin-remove-graphql-queries": "^3.14.0" } }, "gatsby-plugin-typography": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-typography/-/gatsby-plugin-typography-3.13.0.tgz", - "integrity": "sha512-sIo8HtEtFp79H59Ydc0BU6ReQ5IhQUCYvApZ1K3Z1efXWVkJ6Se2jJAntk3brstrkSQQAt9Ena5O+HBvZsBkJw==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-typography/-/gatsby-plugin-typography-3.14.0.tgz", + "integrity": "sha512-N9JPpB2N75esPSWkP8J5H2zeaKjH2a4QOLnRhxVqtu+bB79vMacxi0ZkYhlRqCPTaV0rurae9xOa0baOrxKcAg==", "requires": { - "@babel/runtime": "^7.14.8" + "@babel/runtime": "^7.15.4" } }, "gatsby-plugin-utils": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/gatsby-plugin-utils/-/gatsby-plugin-utils-1.13.0.tgz", - "integrity": "sha512-Qen1qBe4qOm32856V7lCoPH3XxRhcVej36LxPElWWknLmLcmLY2sBnzXgx2gSMaOcepAw3jtoqgaSuVLFeYA6Q==", + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/gatsby-plugin-utils/-/gatsby-plugin-utils-1.14.0.tgz", + "integrity": "sha512-lYzr9R9yTH/PzgRTWB878yB1xBlJULvyosEoF8LnE62+UyuPXxv+e/frfwZCeCoqsqstuciR0yaMELIPYMna+Q==", "requires": { - "joi": "^17.2.1" + "@babel/runtime": "^7.15.4", + "joi": "^17.4.2" } }, "gatsby-react-router-scroll": { - "version": "4.13.0", - "resolved": "https://registry.npmjs.org/gatsby-react-router-scroll/-/gatsby-react-router-scroll-4.13.0.tgz", - "integrity": "sha512-SfvZ8fWxVPrX5jXo6HyhC33i/rT9L/naCzjUAawp4dunTuSJV1ZJquFJXqPIvUMiXNz/OgRoCzxHzq3zHIevEg==", + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/gatsby-react-router-scroll/-/gatsby-react-router-scroll-4.14.0.tgz", + "integrity": "sha512-ahsJqhqSroRsm+BySUUNNrTLWOzjxb8zBP6UNja/VssEYAiGnG3V7ycVqpzMXDnWnZAKTSGIO7B3ZiM5sf6mYw==", "requires": { - "@babel/runtime": "^7.14.8" + "@babel/runtime": "^7.15.4" } }, "gatsby-recipes": { - "version": "0.24.0", - "resolved": "https://registry.npmjs.org/gatsby-recipes/-/gatsby-recipes-0.24.0.tgz", - "integrity": "sha512-azDY4tnOCy5/CK+Kv53CBIgzmEroAGe/mLaiW2PuizTQIdhoY3lg63ZXK6kPQHAq1F4qAYHGkBM4ECgSfaq5HA==", + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/gatsby-recipes/-/gatsby-recipes-0.25.0.tgz", + "integrity": "sha512-eEbmmAWY78pL1zLrx0M0CNC4fMbzKza/Ug0vSQ7egfAqNk74Lt0csgODRGdBLVHbmRRKYmJpJIXK7NdE+ZWh4A==", "requires": { - "@babel/core": "^7.14.8", - "@babel/generator": "^7.14.9", + "@babel/core": "^7.15.5", + "@babel/generator": "^7.15.4", "@babel/helper-plugin-utils": "^7.14.0", "@babel/plugin-proposal-optional-chaining": "^7.14.5", "@babel/plugin-transform-react-jsx": "^7.14.9", - "@babel/standalone": "^7.14.9", - "@babel/template": "^7.14.0", - "@babel/types": "^7.14.9", + "@babel/runtime": "^7.15.4", + "@babel/standalone": "^7.15.5", + "@babel/template": "^7.15.4", + "@babel/types": "^7.15.4", "@graphql-tools/schema": "^7.0.0", "@graphql-tools/utils": "^7.0.2", "@hapi/hoek": "8.x.x", "@hapi/joi": "^15.1.1", "better-queue": "^3.8.10", - "chokidar": "^3.4.2", + "chokidar": "^3.5.2", "contentful-management": "^7.5.1", "cors": "^2.8.5", "debug": "^4.3.1", @@ -7376,9 +7219,9 @@ "execa": "^5.1.1", "express": "^4.17.1", "express-graphql": "^0.12.0", - "fs-extra": "^8.1.0", - "gatsby-core-utils": "^2.13.0", - "gatsby-telemetry": "^2.13.0", + "fs-extra": "^10.0.0", + "gatsby-core-utils": "^2.14.0", + "gatsby-telemetry": "^2.14.0", "glob": "^7.1.6", "graphql": "^15.4.0", "graphql-compose": "~7.25.0", @@ -7401,7 +7244,7 @@ "remark-parse": "^6.0.3", "remark-stringify": "^8.1.0", "resolve-from": "^5.0.0", - "semver": "^7.3.2", + "semver": "^7.3.5", "single-trailing-newline": "^1.0.0", "strip-ansi": "^6.0.0", "style-to-object": "^0.3.0", @@ -7422,18 +7265,37 @@ "ms": "2.1.2" } }, - "prettier": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz", - "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==" + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } }, "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" } } }, @@ -7500,18 +7362,44 @@ } }, "gatsby-remark-copy-linked-files": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/gatsby-remark-copy-linked-files/-/gatsby-remark-copy-linked-files-4.10.0.tgz", - "integrity": "sha512-NaMMExnjnebrRty9poDXu57cdxU13GMoR2OAASriFzw9xJuvqJug+kyUdBxuZ71AQ+LOnyXNJjFGXiz4RVwCQQ==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/gatsby-remark-copy-linked-files/-/gatsby-remark-copy-linked-files-4.11.0.tgz", + "integrity": "sha512-24VI4ZM7767b+2x/J5Ww7yzeTJhVtGCJOQGjH2NZgEAw4ryBoZLJ2WwHiVwBD03+JSMPKGutQxus95jkLmMD4w==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "cheerio": "^1.0.0-rc.10", - "fs-extra": "^8.1.0", + "fs-extra": "^10.0.0", "is-relative-url": "^3.0.0", "lodash": "^4.17.21", "path-is-inside": "^1.0.2", "probe-image-size": "^6.0.0", "unist-util-visit": "^2.0.3" + }, + "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" + } } }, "gatsby-remark-embed-video": { @@ -7572,14 +7460,14 @@ } }, "gatsby-remark-images": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/gatsby-remark-images/-/gatsby-remark-images-5.10.0.tgz", - "integrity": "sha512-p5mQ1R6tEeRqYEnyhVLwNe626tIjvijkb4N/LHwRIX+sC3c1WISZrHmGj7IedCV3z5Gezmgs1ST6mQAdATwzwA==", + "version": "5.11.0", + "resolved": "https://registry.npmjs.org/gatsby-remark-images/-/gatsby-remark-images-5.11.0.tgz", + "integrity": "sha512-KP5dWp8AQ6wwhFN4+IIKvxAm2BsL6HyvLU9w61HxvDtYrT7aRRAHj2YFPaPrgeJQ5ncAzwn+knB5Ofy4a/wx0g==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "chalk": "^4.1.2", "cheerio": "^1.0.0-rc.10", - "gatsby-core-utils": "^2.13.0", + "gatsby-core-utils": "^2.14.0", "is-relative-url": "^3.0.0", "lodash": "^4.17.21", "mdast-util-definitions": "^4.0.0", @@ -7606,11 +7494,11 @@ } }, "gatsby-remark-katex": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/gatsby-remark-katex/-/gatsby-remark-katex-5.10.0.tgz", - "integrity": "sha512-K/WxPs69NaLsNLp+AgFO2SGgzBlgvWv931zuQyYz0rtEBSctCMva6dCfC0NbKyEXHsnO9tp8eE23YFTl2V+sLA==", + "version": "5.11.0", + "resolved": "https://registry.npmjs.org/gatsby-remark-katex/-/gatsby-remark-katex-5.11.0.tgz", + "integrity": "sha512-mmvWZrGXla9M8tQW9FKHIkGZTppDwQMVuTwSBlrXFc+ci85A8KerPRwlLWNiviun9W8gVlD8T1m6DTNLtHiW1A==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "rehype-parse": "^7.0.1", "remark-math": "^4.0.0", "unified": "^9.2.2", @@ -7633,21 +7521,21 @@ } }, "gatsby-remark-prismjs": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/gatsby-remark-prismjs/-/gatsby-remark-prismjs-5.10.0.tgz", - "integrity": "sha512-t6C7mKDUb8Wm7NSctyYkZ7cKZ8o0pWna5xPsd6z4TApo/kTrSrvllqfMPLOdWJCRDd3mJ4maxAzvPs/C/mVGTg==", + "version": "5.11.0", + "resolved": "https://registry.npmjs.org/gatsby-remark-prismjs/-/gatsby-remark-prismjs-5.11.0.tgz", + "integrity": "sha512-WrS074GqW0st5chlYxTrFxLZLjAubWvNdw23DYW8qcaAfnH527hmgEDQgynjVnkSxhxbhnhTqT5MYWBdTBLk5A==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "parse-numeric-range": "^1.2.0", "unist-util-visit": "^2.0.3" } }, "gatsby-remark-responsive-iframe": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/gatsby-remark-responsive-iframe/-/gatsby-remark-responsive-iframe-4.10.0.tgz", - "integrity": "sha512-Z8VD88n2U+G9hNXkP+fJu9rd778bHEN0UfPjamtEaNQ01jrO9BxJW/J7bJd/9fJXuANRKJd0qQn1Y9RMZJDQ3A==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/gatsby-remark-responsive-iframe/-/gatsby-remark-responsive-iframe-4.11.0.tgz", + "integrity": "sha512-b4Nl9wOzRIjxRUHSkwKDczsKvSF5l8FcUf37Mbnecx+eTzfvQtMccIG87C488C5FTBsOh44jjokhR9QwqnWkww==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "cheerio": "^1.0.0-rc.10", "common-tags": "^1.8.0", "lodash": "^4.17.21", @@ -7655,16 +7543,16 @@ } }, "gatsby-source-filesystem": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-source-filesystem/-/gatsby-source-filesystem-3.13.0.tgz", - "integrity": "sha512-SR0OFKunE4OtsZ0E7kpX6kipQcbrw3jTfROuhqD6EJPM5hkTxeEI/6I2r/bUo4ZS1rpwrEYd3UF89Q0YdAhLqQ==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-source-filesystem/-/gatsby-source-filesystem-3.14.0.tgz", + "integrity": "sha512-Gg5GGxiWXhjapWMYdXOGk7zp+ajYowS+xNmaDUkL1gH+IQLvE18XbvKh00B/HiFaHm4azJfS2QRrRI/mPTZX+w==", "requires": { - "@babel/runtime": "^7.14.8", - "better-queue": "^3.8.10", - "chokidar": "^3.4.3", - "file-type": "^16.0.0", - "fs-extra": "^8.1.0", - "gatsby-core-utils": "^2.13.0", + "@babel/runtime": "^7.15.4", + "chokidar": "^3.5.2", + "fastq": "^1.11.1", + "file-type": "^16.5.3", + "fs-extra": "^10.0.0", + "gatsby-core-utils": "^2.14.0", "got": "^9.6.0", "md5-file": "^5.0.0", "mime": "^2.5.2", @@ -7703,42 +7591,68 @@ } }, "gatsby-telemetry": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/gatsby-telemetry/-/gatsby-telemetry-2.13.0.tgz", - "integrity": "sha512-PN9kKbZd0i2QkoVvHyCa3VjuRVIvBwjXTyZHwL+se5yrbYufZQXoyMiMMXFV48FvxMgE53ON1U2vtzeRvE8U2w==", + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/gatsby-telemetry/-/gatsby-telemetry-2.14.0.tgz", + "integrity": "sha512-c8/1L1nkK1OcxYV7axyoyM+7nzM1WL7DXvgxJloI7NSwb6M3EgcWvgq9bmqUAfmWM29/whR07mO7nnl1jZntyA==", "requires": { "@babel/code-frame": "^7.14.0", - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "@turist/fetch": "^7.1.7", "@turist/time": "^0.0.2", "async-retry-ng": "^2.0.1", "boxen": "^4.2.0", "configstore": "^5.0.1", - "fs-extra": "^8.1.0", - "gatsby-core-utils": "^2.13.0", + "fs-extra": "^10.0.0", + "gatsby-core-utils": "^2.14.0", "git-up": "^4.0.5", "is-docker": "^2.2.1", "lodash": "^4.17.21", "node-fetch": "^2.6.1", "uuid": "3.4.0" + }, + "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" + } } }, "gatsby-transformer-json": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-transformer-json/-/gatsby-transformer-json-3.13.0.tgz", - "integrity": "sha512-yfZoMq83nzaxyRTsOSeSlTTKMmK8CffpjA4XTACScc3acpTOMV9dTuDlBV81Yttw6wkrxf53b6CCWYzGPnpLew==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-transformer-json/-/gatsby-transformer-json-3.14.0.tgz", + "integrity": "sha512-bEewDHg9o4ghIe28gXy5kVm7ARqhTlzkfN1E7RZeWzd0jmMZAJHBmboQOE1cbJopODspkNvCySvDXfGF/sgUBQ==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "bluebird": "^3.7.2" } }, "gatsby-transformer-remark": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/gatsby-transformer-remark/-/gatsby-transformer-remark-4.10.0.tgz", - "integrity": "sha512-laIDi9fY4PntsD+4uhpL+BmXA7Kf8u82ubuzyt7J8kQT6VATXx9Tks84dVzSvtvDaN/bhNk+CdLAo+brJ49oFg==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/gatsby-transformer-remark/-/gatsby-transformer-remark-4.11.0.tgz", + "integrity": "sha512-K00qbvSVq5puyDmZZp+WOU8CmBJRpjmJt1t8yl30My66PiDT8u242xVYUE0qdsKEr2gz5npE0w4nj/5Hgtfw7Q==", "requires": { - "@babel/runtime": "^7.14.8", - "gatsby-core-utils": "^2.13.0", + "@babel/runtime": "^7.15.4", + "gatsby-core-utils": "^2.14.0", "gray-matter": "^4.0.2", "hast-util-raw": "^6.0.2", "hast-util-to-html": "^7.1.2", @@ -7793,11 +7707,11 @@ } }, "gatsby-transformer-yaml": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/gatsby-transformer-yaml/-/gatsby-transformer-yaml-3.13.0.tgz", - "integrity": "sha512-IImXPLVPhXRYHz1fzOUIwLVW9NQGk2wYkmp2HtlFUg1zZHli7QevcKDgl6ZJyCboDyh+qKSPK4MRLAO2QLLL2w==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/gatsby-transformer-yaml/-/gatsby-transformer-yaml-3.14.0.tgz", + "integrity": "sha512-rdXV4JEOPvnXgAdKzPaprpyZZMje5GqdLPCi6TXOK+uTGMWWBckjEjx63yMIWmqR9u0RfJik/DtcfKZ2U4YbCg==", "requires": { - "@babel/runtime": "^7.14.8", + "@babel/runtime": "^7.15.4", "js-yaml": "^3.14.1", "lodash": "^4.17.21", "unist-util-select": "^1.5.0" @@ -7837,11 +7751,12 @@ } }, "gatsby-worker": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/gatsby-worker/-/gatsby-worker-0.4.0.tgz", - "integrity": "sha512-yVp4R7UvzTTi+zyzknJK21qMfgBou9+O03zM4zwA+8GN/ibYOHalIMJCbnJWkPPL4MArIy0/NIlwi0JNPkmaow==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/gatsby-worker/-/gatsby-worker-0.5.0.tgz", + "integrity": "sha512-r9BBUqCfHESSHfVvBW4tajacZ+tSxqWm+j5RB+Av8sBEhbMBFCHmWdU2USs7Bt0lvRpybwU5oxswb6nmeKkaSg==", "requires": { - "@babel/core": "^7.14.8" + "@babel/core": "^7.15.5", + "@babel/runtime": "^7.15.4" } }, "gauge": { @@ -8052,102 +7967,111 @@ } }, "got": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", - "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", - "requires": { - "@sindresorhus/is": "^0.7.0", - "cacheable-request": "^2.1.1", - "decompress-response": "^3.3.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "into-stream": "^3.1.0", - "is-retry-allowed": "^1.1.0", - "isurl": "^1.0.0-alpha5", - "lowercase-keys": "^1.0.0", - "mimic-response": "^1.0.0", - "p-cancelable": "^0.4.0", - "p-timeout": "^2.0.1", - "pify": "^3.0.0", - "safe-buffer": "^5.1.1", - "timed-out": "^4.0.1", - "url-parse-lax": "^3.0.0", - "url-to-options": "^1.0.1" + "version": "11.8.2", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.2.tgz", + "integrity": "sha512-D0QywKgIe30ODs+fm8wMZiAcZjypcCodPNuMz5H9Mny7RJ+IjJ10BdmGW7OM7fHXP+O7r6ZwapQ/YQmMSvB0UQ==", + "requires": { + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.1", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" }, "dependencies": { "@sindresorhus/is": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", - "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==" + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.2.0.tgz", + "integrity": "sha512-VkE3KLBmJwcCaVARtQpfuKcKv8gcBmUubrfHGF84dXuuW6jgsRYxPtzcIhPyK9WAPpRt2/xY6zkD9MnRaJzSyw==" }, - "cacheable-request": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", - "integrity": "sha1-DYCIAbY0KtM8kd+dC0TcCbkeXD0=", - "requires": { - "clone-response": "1.0.2", - "get-stream": "3.0.0", - "http-cache-semantics": "3.8.1", - "keyv": "3.0.0", - "lowercase-keys": "1.0.0", - "normalize-url": "2.0.1", - "responselike": "1.0.2" - }, - "dependencies": { - "lowercase-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", - "integrity": "sha1-TjNms55/VFfjXxMkvfb4jQv8cwY=" - } + "@szmarczak/http-timer": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", + "requires": { + "defer-to-connect": "^2.0.0" } }, - "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + "cacheable-lookup": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", + "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==" }, - "http-cache-semantics": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", - "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" + "cacheable-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz", + "integrity": "sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==", + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^4.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" + } }, - "keyv": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", - "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", + "decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "requires": { - "json-buffer": "3.0.0" + "mimic-response": "^3.1.0" } }, - "normalize-url": { + "defer-to-connect": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", - "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==" + }, + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "requires": { - "prepend-http": "^2.0.0", - "query-string": "^5.0.1", - "sort-keys": "^2.0.0" + "pump": "^3.0.0" } }, - "p-cancelable": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", - "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==" + "json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==" }, - "query-string": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", - "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", + "keyv": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.0.3.tgz", + "integrity": "sha512-zdGa2TOpSZPq5mU6iowDARnMBZgtCqJ11dJROFi6tg6kTn4nuUdU09lFyLFSaHrWqpIJ+EBq4E8/Dc0Vx5vLdA==", "requires": { - "decode-uri-component": "^0.2.0", - "object-assign": "^4.1.0", - "strict-uri-encode": "^1.0.0" + "json-buffer": "3.0.1" } }, - "strict-uri-encode": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", - "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=" + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" + }, + "mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==" + }, + "p-cancelable": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==" + }, + "responselike": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz", + "integrity": "sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==", + "requires": { + "lowercase-keys": "^2.0.0" + } } } }, @@ -8291,24 +8215,11 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" }, - "has-symbol-support-x": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", - "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==" - }, "has-symbols": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" }, - "has-to-string-tag-x": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", - "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", - "requires": { - "has-symbol-support-x": "^1.4.1" - } - }, "has-tostringtag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", @@ -8650,6 +8561,15 @@ } } }, + "http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "requires": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" + } + }, "human-signals": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", @@ -8804,15 +8724,6 @@ "side-channel": "^1.0.4" } }, - "into-stream": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", - "integrity": "sha1-lvsKk2wSur1v8XUqF9BWFqvQlMY=", - "requires": { - "from2": "^2.1.1", - "p-is-promise": "^1.1.0" - } - }, "invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", @@ -9089,11 +9000,6 @@ "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, - "is-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", - "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==" - }, "is-path-cwd": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", @@ -9152,11 +9058,6 @@ "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" }, - "is-retry-allowed": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", - "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==" - }, "is-root": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", @@ -9265,15 +9166,6 @@ "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==" }, - "isurl": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", - "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", - "requires": { - "has-to-string-tag-x": "^1.2.0", - "is-object": "^1.0.1" - } - }, "iterall": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/iterall/-/iterall-1.3.0.tgz", @@ -9467,19 +9359,20 @@ } }, "jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "requires": { - "graceful-fs": "^4.1.6" + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" } }, "jsx-ast-utils": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.2.0.tgz", - "integrity": "sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.2.1.tgz", + "integrity": "sha512-uP5vu8xfy2F9A6LGC22KO7e2/vGTS1MhP+18f++ZNlf0Ohaxbc9nIEwHAsejlJKyzfZzU5UIhe5ItYkitcZnZA==", "requires": { - "array-includes": "^3.1.2", + "array-includes": "^3.1.3", "object.assign": "^4.1.2" } }, @@ -10356,9 +10249,9 @@ } }, "memfs": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.2.4.tgz", - "integrity": "sha512-2mDCPhuduRPOxlfgsXF9V+uqC6Jgz8zt/bNe4d4W7d5f6pCzHrWkxLNr17jKGXd4+j2kQNsAG2HARPnt74sqVQ==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.3.0.tgz", + "integrity": "sha512-BEE62uMfKOavX3iG7GYX43QJ+hAeeWnwIAuJ/R6q96jaMtiLzhsxHJC8B1L7fK7Pt/vXDRwb3SG/yBpNGDPqzg==", "requires": { "fs-monkey": "1.0.3" } @@ -10763,6 +10656,11 @@ "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" }, + "nanocolors": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/nanocolors/-/nanocolors-0.1.12.tgz", + "integrity": "sha512-2nMHqg1x5PU+unxX7PGY7AuYxl2qDx7PSrTRjizr8sxdd3l/3hBuWWaki62qmtYm2U5i4Z5E7GbjlyDFhs9/EQ==" + }, "nanoid": { "version": "3.1.25", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", @@ -10897,9 +10795,9 @@ "integrity": "sha512-NQt1YURrMPeQGZzW4lRbshUEF2PqxJEZYY4XJ/L+q33dI8yPYvnb7QXmwUcl1EuXluzeY4TEV+H6H0EmtI6f5g==" }, "node-releases": { - "version": "1.1.75", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", - "integrity": "sha512-Qe5OUajvqrqDSy6wrWFmMwfJ0jVgwiw4T3KqmbTcZ62qW0gQkheXYhcFM1+lOVcGUoRxcEcfyvFMAnDgaF1VWw==" + "version": "1.1.76", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.76.tgz", + "integrity": "sha512-9/IECtNr8dXNmPWmFXepT0/7o5eolGesHUa3mtr0KlgnCvnZxwh2qensKL42JJY2vQKC3nIBXetFAqR+PW1CmA==" }, "normalize-package-data": { "version": "2.5.0", @@ -11117,6 +11015,15 @@ "es-abstract": "^1.18.0-next.2" } }, + "object.hasown": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.0.0.tgz", + "integrity": "sha512-qYMF2CLIjxxLGleeM0jrcB4kiv3loGVAjKQKvH8pSU/i2VcRRvUNmxbD+nEMmrXRfORhuVJuH8OtSYCZoue3zA==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.18.1" + } + }, "object.pick": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", @@ -11226,34 +11133,11 @@ "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-3.0.0.tgz", "integrity": "sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw==" }, - "p-event": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", - "integrity": "sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==", - "requires": { - "p-timeout": "^3.1.0" - }, - "dependencies": { - "p-timeout": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", - "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", - "requires": { - "p-finally": "^1.0.0" - } - } - } - }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" }, - "p-is-promise": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", - "integrity": "sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4=" - }, "p-limit": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", @@ -11278,14 +11162,6 @@ "aggregate-error": "^3.0.0" } }, - "p-timeout": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", - "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", - "requires": { - "p-finally": "^1.0.0" - } - }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -11667,13 +11543,20 @@ "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" }, "postcss": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.3.6.tgz", - "integrity": "sha512-wG1cc/JhRgdqB6WHEuyLTedf3KIRuD0hG6ldkFEZNCjRxiC+3i6kkWUUbiJQayP28iwG35cEmAbe98585BYV0A==", + "version": "8.3.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.3.8.tgz", + "integrity": "sha512-GT5bTjjZnwDifajzczOC+r3FI3Cu+PgPvrsjhQdRqa2kTJ4968/X9CUce9xttIB0xOs5c6xf0TCWZo/y9lF6bA==", "requires": { - "colorette": "^1.2.2", - "nanoid": "^3.1.23", + "nanocolors": "^0.2.2", + "nanoid": "^3.1.25", "source-map-js": "^0.6.2" + }, + "dependencies": { + "nanocolors": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/nanocolors/-/nanocolors-0.2.12.tgz", + "integrity": "sha512-SFNdALvzW+rVlzqexid6epYdt8H9Zol7xDoQarioEFcFN0JHo4CYNztAxmtfgGTVRCmFlEOqqhBpoFGKqSAMug==" + } } }, "postcss-calc": { @@ -12333,6 +12216,26 @@ } } }, + "prebuild-install": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.4.tgz", + "integrity": "sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==", + "requires": { + "detect-libc": "^1.0.3", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^1.0.1", + "node-abi": "^2.21.0", + "npmlog": "^4.0.1", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^3.0.3", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + } + }, "prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -12346,8 +12249,7 @@ "prettier": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz", - "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==", - "dev": true + "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==" }, "pretty-bytes": { "version": "5.6.0", @@ -12542,6 +12444,11 @@ "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" }, + "quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==" + }, "ramda": { "version": "0.21.0", "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.21.0.tgz", @@ -13123,11 +13030,11 @@ "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" }, "regenerate-unicode-properties": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz", - "integrity": "sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz", + "integrity": "sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA==", "requires": { - "regenerate": "^1.4.0" + "regenerate": "^1.4.2" } }, "regenerator-runtime": { @@ -13167,16 +13074,16 @@ "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==" }, "regexpu-core": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.7.1.tgz", - "integrity": "sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.8.0.tgz", + "integrity": "sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg==", "requires": { - "regenerate": "^1.4.0", - "regenerate-unicode-properties": "^8.2.0", - "regjsgen": "^0.5.1", - "regjsparser": "^0.6.4", - "unicode-match-property-ecmascript": "^1.0.4", - "unicode-match-property-value-ecmascript": "^1.2.0" + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^9.0.0", + "regjsgen": "^0.5.2", + "regjsparser": "^0.7.0", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.0.0" } }, "registry-auth-token": { @@ -13201,9 +13108,9 @@ "integrity": "sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==" }, "regjsparser": { - "version": "0.6.9", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.9.tgz", - "integrity": "sha512-ZqbNRz1SNjLAiYuwY0zoXW8Ne675IX5q+YHioAGbCw4X96Mjl2+dcX9B2ciaeyYjViDAfvIjFpQjJgLttTEERQ==", + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.7.0.tgz", + "integrity": "sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ==", "requires": { "jsesc": "~0.5.0" }, @@ -13666,6 +13573,11 @@ "path-parse": "^1.0.6" } }, + "resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==" + }, "resolve-cwd": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", @@ -14074,6 +13986,21 @@ "resolved": "https://registry.npmjs.org/shallow-equal/-/shallow-equal-1.2.1.tgz", "integrity": "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" }, + "sharp": { + "version": "0.29.1", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.29.1.tgz", + "integrity": "sha512-DpgdAny9TuS+oWCQ7MRS8XyY9x6q1+yW3a5wNx0J3HrGuB/Jot/8WcT+lElHY9iJu2pwtegSGxqMaqFiMhs4rQ==", + "requires": { + "color": "^4.0.1", + "detect-libc": "^1.0.3", + "node-addon-api": "^4.1.0", + "prebuild-install": "^6.1.4", + "semver": "^7.3.5", + "simple-get": "^3.1.0", + "tar-fs": "^2.1.1", + "tunnel-agent": "^0.6.0" + } + }, "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", @@ -14368,9 +14295,9 @@ }, "dependencies": { "@types/node": { - "version": "14.17.15", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.15.tgz", - "integrity": "sha512-D1sdW0EcSCmNdLKBGMYb38YsHUS6JcM7yQ6sLQ9KuZ35ck7LYCKE7kYFHOO59ayFOY3zobWVZxf4KXhYHcHYFA==" + "version": "14.17.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.19.tgz", + "integrity": "sha512-jjYI6NkyfXykucU6ELEoT64QyKOdvaA6enOqKtP4xUsGY0X0ZUZz29fUmrTRo+7v7c6TgDu82q3GHHaCEkqZwA==" }, "debug": { "version": "4.3.2", @@ -14445,21 +14372,6 @@ "url-parse": "^1.5.3" } }, - "sort-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", - "integrity": "sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg=", - "requires": { - "is-plain-obj": "^1.0.0" - }, - "dependencies": { - "is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=" - } - } - }, "source-list-map": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", @@ -14956,16 +14868,16 @@ } }, "svgo": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.5.0.tgz", - "integrity": "sha512-FSdBOOo271VyF/qZnOn1PgwCdt1v4Dx0Sey+U1jgqm1vqRYjPGdip0RGrFW6ItwtkBB8rHgHk26dlVr0uCs82Q==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.7.0.tgz", + "integrity": "sha512-aDLsGkre4fTDCWvolyW+fs8ZJFABpzLXbtdK1y71CKnHzAnpDxKXPj2mNKj+pyOXUCzFHzuxRJ94XOFygOWV3w==", "requires": { - "@trysound/sax": "0.1.1", - "colorette": "^1.3.0", + "@trysound/sax": "0.2.0", "commander": "^7.2.0", "css-select": "^4.1.3", "css-tree": "^1.1.3", "csso": "^4.2.0", + "nanocolors": "^0.1.12", "stable": "^0.1.8" } }, @@ -14997,9 +14909,9 @@ }, "dependencies": { "ajv": { - "version": "8.6.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.2.tgz", - "integrity": "sha512-9807RlWAgT564wT+DjeyU5OFMPjmzxVobvDFmNAhY+5zD6A2ly3jDp6sgnfyDtlIQ+7H97oc/DGCzzfu9rjw9w==", + "version": "8.6.3", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.3.tgz", + "integrity": "sha512-SMJOdDP6LqTkD0Uq8qLi+gMwSt0imXLSV080qFVwJCpH9U6Mb+SUGHAXM0KNbcBPguytWyvFxcHgMLe2D2XSpw==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -15013,11 +14925,11 @@ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } } } @@ -15151,11 +15063,6 @@ "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, - "timed-out": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" - }, "timers-ext": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.7.tgz", @@ -15469,28 +15376,28 @@ } }, "unicode-canonical-property-names-ecmascript": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz", - "integrity": "sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==" }, "unicode-match-property-ecmascript": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz", - "integrity": "sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "requires": { - "unicode-canonical-property-names-ecmascript": "^1.0.4", - "unicode-property-aliases-ecmascript": "^1.0.4" + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" } }, "unicode-match-property-value-ecmascript": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz", - "integrity": "sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz", + "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==" }, "unicode-property-aliases-ecmascript": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz", - "integrity": "sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz", + "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==" }, "unified": { "version": "8.4.2", @@ -15649,9 +15556,9 @@ } }, "universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" }, "unixify": { "version": "1.0.0", @@ -15828,11 +15735,6 @@ "prepend-http": "^2.0.0" } }, - "url-to-options": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", - "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k=" - }, "use": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", diff --git a/package.json b/package.json index 412122ca1..231226670 100644 --- a/package.json +++ b/package.json @@ -6,29 +6,29 @@ "@fastly/performance-observer-polyfill": "^2.0.0", "bowser": "^2.9.0", "classnames": "^2.3.1", - "gatsby": "^3.13.0", - "gatsby-plugin-catch-links": "^3.13.0", - "gatsby-plugin-google-tagmanager": "^3.13.0", - "gatsby-plugin-manifest": "^3.13.0", - "gatsby-plugin-react-helmet": "^4.13.0", + "gatsby": "^3.14.0", + "gatsby-plugin-catch-links": "^3.14.0", + "gatsby-plugin-google-tagmanager": "^3.14.0", + "gatsby-plugin-manifest": "^3.14.0", + "gatsby-plugin-react-helmet": "^4.14.0", "gatsby-plugin-react-svg": "^3.0.1", - "gatsby-plugin-sharp": "^3.13.0", - "gatsby-plugin-sitemap": "^4.9.0", - "gatsby-plugin-typography": "^3.13.0", + "gatsby-plugin-sharp": "^3.14.0", + "gatsby-plugin-sitemap": "^4.10.0", + "gatsby-plugin-typography": "^3.14.0", "gatsby-remark-component": "^1.1.3", "gatsby-remark-component-parent2div": "^1.2.3", - "gatsby-remark-copy-linked-files": "^4.10.0", + "gatsby-remark-copy-linked-files": "^4.11.0", "gatsby-remark-embed-video": "^3.1.1", "gatsby-remark-external-links": "0.0.4", - "gatsby-remark-images": "^5.10.0", + "gatsby-remark-images": "^5.11.0", "gatsby-remark-images-medium-zoom": "^1.7.0", - "gatsby-remark-katex": "^5.10.0", - "gatsby-remark-prismjs": "^5.10.0", - "gatsby-remark-responsive-iframe": "^4.10.0", - "gatsby-source-filesystem": "^3.13.0", - "gatsby-transformer-json": "^3.13.0", + "gatsby-remark-katex": "^5.11.0", + "gatsby-remark-prismjs": "^5.11.0", + "gatsby-remark-responsive-iframe": "^4.11.0", + "gatsby-source-filesystem": "^3.14.0", + "gatsby-transformer-json": "^3.14.0", "gatsby-transformer-remark": "^4.10.0", - "gatsby-transformer-yaml": "^3.13.0", + "gatsby-transformer-yaml": "^3.14.0", "js-cookie": "^3.0.1", "katex": "^0.13.18", "lunr": "^2.3.9", From 76e81f834eb45bb2fd77d1b9e901c19c7ddad903 Mon Sep 17 00:00:00 2001 From: Fran McDade Date: Wed, 29 Sep 2021 16:26:00 +1000 Subject: [PATCH 2/2] Review markdown rendering errors as part of Gatsby upgrade. #1137. --- content/about/platform/attributions.md | 6 +- content/about/platform/data-use-agreement.md | 5 +- content/about/platform/dcp-roadmap.md | 7 +- content/about/platform/dcp.md | 11 +- content/about/platform/hca.md | 4 +- content/analyze/methods/methods-packages.md | 11 +- .../analyze/methods/methods-packages/infer.md | 15 +- .../analyze/methods/methods-packages/magic.md | 14 +- .../analyze/methods/methods-packages/phate.md | 13 +- .../analyze/methods/methods-packages/sc3.md | 12 +- .../methods/methods-packages/slingshot.md | 12 +- .../methods/methods-packages/spectre.md | 14 +- .../methods/methods-packages/stream.md | 21 ++- content/analyze/portals/analysis-portals.md | 8 +- .../analyze/portals/analysis-portals/asap.md | 11 +- .../analysis-portals/bioturing-browser.md | 12 +- .../portals/analysis-portals/cellxgene.md | 11 +- .../portals/analysis-portals/cytoscape.md | 17 +- .../portals/analysis-portals/dnastack.md | 9 +- .../portals/analysis-portals/fastgenomics.md | 21 +-- .../genepattern-notebook-environment.md | 11 +- .../portals/analysis-portals/granatum-x.md | 25 ++- .../analysis-portals/hca-galaxy-instance.md | 12 +- .../portals/analysis-portals/omnibrowser.md | 11 +- .../single-cell-expression-atlas.md | 11 +- .../analysis-portals/single-cell-portal.md | 12 +- .../analyze/portals/analysis-portals/terra.md | 11 +- .../analysis-portals/ucsc-cell-browser.md | 11 +- .../portals/analysis-portals/ucsc-xena.md | 22 +-- .../visualization/visualization-packages.md | 8 +- .../visualization-packages/anatomogram.md | 14 +- .../visualization-packages/ideogram.md | 16 +- .../visualization-packages/igv.md | 13 +- .../visualization-packages/morpheus.md | 15 +- content/apis/api-documentation/apis.md | 3 +- .../api-documentation/data-browser-api.md | 16 +- ...with-support-for-controlled-access-data.md | 21 +-- .../dcp-matrix-ux-study-spring-2021.md | 11 +- .../community-update/dcp-updates.md | 65 +++----- .../what-is-the-dcp-20-data-preview.md | 8 +- content/contact/contact/contact-us.md | 12 +- .../contact/contact/join-the-discussion.md | 9 +- .../data/analysis-tools-registry.md | 24 ++- .../registry-standards.md | 33 ++-- .../contribute/data/contributing-to-hca.md | 9 +- .../contributing-data-processing-results.md | 7 +- .../contributing-data-suitability.md | 4 +- .../contributing-expect-prepare.md | 5 +- .../contribute/data/contributing-vignettes.md | 4 +- .../creating-content/content-style-guide.md | 89 +++++----- .../creating-content/creating-a-new-page.md | 39 ++--- .../creating-content/creating-links.md | 33 ++-- .../editing-an-existing-page.md | 46 +++--- .../document/creating-content/example-page.md | 20 +-- content/document/creating-content/overview.md | 30 +--- .../document/creating-content/using-images.md | 16 +- content/feedback/feedback/feedback.md | 8 +- .../consumer-vignettes/export-to-terra.md | 104 ++++-------- content/guides/userguides/data-lifecycle.md | 7 +- .../guides/userguides/exploring-projects.md | 15 +- content/guides/userguides/matrices.md | 47 +++--- .../guides/userguides/quick-start-guide.md | 54 +++--- content/help/help/help-and-faq.md | 14 +- content/metadata/metadata-overview/modules.md | 6 +- content/metadata/metadata-overview/types.md | 6 +- .../data-processing-pipelines-user-guides.md | 18 +- .../data-processing-pipelines/file-formats.md | 11 +- .../optimus-workflow.md | 32 ++-- .../pipeline-best-practices.md | 49 ++++-- .../data-processing-pipelines/qc-metrics.md | 156 +++++++++--------- .../smart-seq2-workflow.md | 38 ++--- content/privacy/privacy/lungmap-privacy.md | 14 +- content/privacy/privacy/privacy.md | 31 ++-- .../2020-mar/acknowledgements.md | 40 ++--- .../documentation/2020-mar/feedback.md | 5 +- .../documentation/2020-mar/methods.md | 134 +++++++-------- .../documentation/2020-mar/overview.md | 12 +- .../replicating-the-release-analysis.md | 64 +++++-- .../2020-mar/working-with-release-files.md | 90 ++++++---- 79 files changed, 921 insertions(+), 964 deletions(-) diff --git a/content/about/platform/attributions.md b/content/about/platform/attributions.md index 69c8b91e2..d460c3b83 100644 --- a/content/about/platform/attributions.md +++ b/content/about/platform/attributions.md @@ -1,7 +1,7 @@ --- -path: "/about/platform/attributions" +componentName: "attributions" date: "2018-05-03" -title: "Attributions" description: "Credits, copyrights and attributions for data.humancellatlas.org." -componentName: "attributions" +path: "/about/platform/attributions" +title: "Attributions" --- diff --git a/content/about/platform/data-use-agreement.md b/content/about/platform/data-use-agreement.md index 40e854b2a..30ad0f525 100644 --- a/content/about/platform/data-use-agreement.md +++ b/content/about/platform/data-use-agreement.md @@ -1,9 +1,9 @@ --- -path: "/about/platform/data-use-agreement" date: "2018-05-03" -title: "Data Use Agreement" description: "Data Use Agreement for the HCA DCP." +path: "/about/platform/data-use-agreement" subTitle: "" +title: "Data Use Agreement" --- # Data Use Agreement @@ -13,4 +13,3 @@ Each dataset in this release is licensed under a [Creative Commons Attribution 4 ## Data Use Policy For information regarding data sharing and data use, please see our [Data Release Policy](https://www.humancellatlas.org/data-release-policy/) - diff --git a/content/about/platform/dcp-roadmap.md b/content/about/platform/dcp-roadmap.md index df6d354aa..424e2617c 100644 --- a/content/about/platform/dcp-roadmap.md +++ b/content/about/platform/dcp-roadmap.md @@ -1,16 +1,15 @@ --- -path: "/about/platform/dcp-roadmap" date: "2019-12-03" -title: "DCP Roadmap" description: "The Data Coordination Platform’s (DCP) purpose is to support the creation of the Human Cell Atlas by providing a cloud-based platform for researchers to share, organize, analyze, and interrogate single-cell data, as described in five Strategic Aims." draft: true +path: "/about/platform/dcp-roadmap" +title: "DCP Roadmap" --- # Our Mission The Data Coordination Platform’s (DCP) purpose is to support the creation of the Human Cell Atlas by providing a cloud-based platform for researchers to share, organize, analyze, and interrogate single-cell data, as described in five Strategic Aims. You can read more about these aims on the DCP Strategy page. - # DCP Strategic Aims Aim 1. Create a data resource that maximizes the value and use of Human Cell Atlas data across the scientific community. @@ -23,7 +22,6 @@ Aim 4. Build alignment amongst the Human Cell Atlas community around a core set Aim 5. Create standards for the community to use to describe single-cell experimental designs, including assay types, data and metadata. - # Current Role of the DCP in HCA Atlas Development To support initial efforts in atlas development, the DCP team is focusing on describing experimental design, collecting and harmonizing data, and assuring data integrity, serving both the biologists generating single-cell data and the computational biologists needing to access it. @@ -35,4 +33,3 @@ To support initial efforts in atlas development, the DCP team is focusing on des The DCP Quarterly Roadmap lists the in progress and upcoming activities for achieving the DCP Strategic Aims as they relate to our current role in atlas development. ![Practice_table](../_images/Practice_table.png) - diff --git a/content/about/platform/dcp.md b/content/about/platform/dcp.md index c63b557ea..c0a259ac9 100644 --- a/content/about/platform/dcp.md +++ b/content/about/platform/dcp.md @@ -1,8 +1,8 @@ --- date: "2018-05-03" -title: "About the Data Coordination Platform" description: "The Human Cell Atlas Data Coordination Platform (HCA DCP) is an open source, cloud-based platform developed to organize, standardize, and make accessible the data that constitute the Human Cell Atlas." draft: false +title: "About the Data Coordination Platform" --- # About the Data Coordination Platform @@ -11,14 +11,12 @@ The Human Cell Atlas (HCA) community is profiling millions of human cells, a pro To help coordinate this data collection and processing, the HCA established the **Data Coordination Platform (DCP), a public, cloud-based platform where scientists can share, organise and interrogate single-cell data**. +Data Flow - - -\ The platform was developed and is operated by a dedicated team of scientists, engineers and bioinformaticians from the European Bioinformatics Institute (EBI), the Broad Institute (Broad), the Chan Zuckerberg Initiative (CZI) and the University of California, Santa Cruz (UCSC). - ## Data Coordination Platform Strategic Aims + The DCP Strategic Aims detail the DCP team’s long-term goals for atlas development: Aim 1. Create a data resource that maximizes the value and use of Human Cell Atlas data across the scientific community. @@ -32,8 +30,9 @@ Aim 4. Build alignment amongst the Human Cell Atlas community around a core set Aim 5. Create standards for the community to use to describe single-cell experimental designs, including assay types, data and metadata. ## Contribute - + We encourage you to contribute your data or analysis portals and methods. Learn more on the [Contribute](/contribute) page. ## Have Questions or Feedback? Contact Us! + The Data Coordination Platform is continuously developed and improved in response to researchers’ needs and feedback. Navigate to the [Contact](/contact) page to ask questions or provide feedback. diff --git a/content/about/platform/hca.md b/content/about/platform/hca.md index 292498859..0a15443d8 100644 --- a/content/about/platform/hca.md +++ b/content/about/platform/hca.md @@ -1,10 +1,11 @@ --- date: "2018-05-03" -title: "About the Human Cell Atlas" description: "Our mission is to create comprehensive reference maps of all the cells in the human body as a basis for both understanding human health and diagnosing, monitoring, and treating disease." +title: "About the Human Cell Atlas" --- # About the Human Cell Atlas + The Human Cell Atlas (HCA) is a collaborative community of international scientists. Our mission is to create comprehensive reference maps of all the cells in the human body as a basis for both understanding human health and diagnosing, monitoring, and treating disease. The HCA registry has more than one thousand member scientists from hundreds of institutions around the world. The project is steered and governed by an Organizing Committee, co-chaired by Aviv Regev and Sarah Teichmann. @@ -12,4 +13,3 @@ The HCA registry has more than one thousand member scientists from hundreds of i To learn more about the HCA, visit . To join the HCA community, you can register on this page: - diff --git a/content/analyze/methods/methods-packages.md b/content/analyze/methods/methods-packages.md index 1a93b3438..273ffc234 100644 --- a/content/analyze/methods/methods-packages.md +++ b/content/analyze/methods/methods-packages.md @@ -1,9 +1,7 @@ --- -path: "/analyze/methods/methods-packages" +componentName: "analyze" date: "2018-05-03" -title: "Methods Packages" description: "Methods packages for performing analyses involving computational biology approaches for analyzing single-cell data." -componentName: "analyze" linked: - ./methods-packages/infer.md - ./methods-packages/magic.md @@ -12,13 +10,14 @@ linked: - ./methods-packages/slingshot.md - ./methods-packages/spectre.md - ./methods-packages/stream.md +path: "/analyze/methods/methods-packages" +title: "Methods Packages" --- -# Methods Packages +# Methods Packages Methods packages listed below are tools for performing analyses involving computational biology approaches for analyzing single-cell data. Method software is pre-installed in container images. Registry methods can be called programmatically for easy integration into portals. These methods provide domain-specific ways to analyze biological data produced by Human Cell Atlas. These solutions are built by third parties. This information is provided as a service to the community and does not constitute an endorsement by the HCA. - ->Are you developing a package that can consume HCA data? Please [submit it](/contribute/analysis-tools-registry) for inclusion in the registry. +> Are you developing a package that can consume HCA data? Please [submit it](/contribute/analysis-tools-registry) for inclusion in the registry. diff --git a/content/analyze/methods/methods-packages/infer.md b/content/analyze/methods/methods-packages/infer.md index d2b0cef6b..70027efaf 100644 --- a/content/analyze/methods/methods-packages/infer.md +++ b/content/analyze/methods/methods-packages/infer.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/infer" -date: "2019-03-26" -title: "inferCNV" +appUrl: "https://github.com/broadinstitute/infercnv" author: "Brian J. Haas, Christophe H. Georgescu, Maxwell P. Brown, Timothy L. Tickle, Livnat Jerby, Matan Hofree, Itay Tirosh, Aviv Regev" +componentName: "analysisDetail" +date: "2019-03-26" description: "InferCNV is used to explore tumor single cell RNA-Seq data to identify evidence for large-scale chromosomal copy number variations." githubUrl: "https://github.com/broadinstitute/infercnv" -appUrl: "https://github.com/broadinstitute/infercnv" +path: "/analyze/methods/methods-packages/infer" +title: "inferCNV" upstreamRegistryUrl: "Submitted to bioconductor on 03/13/2019" -componentName: "analysisDetail" --- [![Build Status](https://travis-ci.com/broadinstitute/infercnv.svg?branch=master )](https://travis-ci.com/broadinstitute/infercnv) @@ -44,9 +44,8 @@ docker run -v ${PWD}:/data -w /data --rm -it singlecellportal/infercnv:0-99-5 /i --denoise ``` - - ## Validate + Run this command to confirm your container produces correct reference output: ``` @@ -54,7 +53,9 @@ docker run -v ${PWD}:/data -w /data --rm -it singlecellportal/infercnv:0-99-5 R ``` ## Integrate + [Run inferCNV](https://github.com/broadinstitute/single_cell_portal/wiki/Running-inferCNV) in Single Cell Portal. ## Contact + Christophe Georgescu ([cgeorges@broadinsitute.org](mailto:cgeorges@broadinsitute.org)) diff --git a/content/analyze/methods/methods-packages/magic.md b/content/analyze/methods/methods-packages/magic.md index d88686764..4dd15d520 100644 --- a/content/analyze/methods/methods-packages/magic.md +++ b/content/analyze/methods/methods-packages/magic.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/magic" -date: "2019-01-23" -title: "Markov Affinity-based Graph Imputation of Cells (MAGIC)" +appUrl: "https://pypi.org/project/magic-impute/" author: "David van Dijk, Kevin Moon, Scott Gigante, Daniel Dager, Guy Wolf, Smita Krishnaswamy" +componentName: "analysisDetail" +date: "2019-01-23" description: "Markov Affinity-based Graph Imputation of Cells (MAGIC) is an algorithm for denoising and imputation of single cells applied to single-cell RNA sequencing data" githubUrl: "https://github.com/KrishnaswamyLab/MAGIC/" -appUrl: "https://pypi.org/project/magic-impute/" +path: "/analyze/methods/methods-packages/magic" +title: "Markov Affinity-based Graph Imputation of Cells (MAGIC)" upstreamRegistryUrl: "https://pypi.org/project/magic-impute/" -componentName: "analysisDetail" --- [![Build Status](https://travis-ci.com/KrishnaswamyLab/MAGIC.svg?branch=master)](https://travis-ci.com/KrishnaswamyLab/MAGIC#) @@ -22,13 +22,10 @@ docker pull scottgigante/magic:release-1.1 Here we download a csv file containing raw scRNA-seq counts, preprocess it by filtering cells with less than 2000 counts, library size normalize and then apply a square root transform before running MAGIC, then save the smoothed data matrix to magic_output.csv in your current working directory. - - ``` docker run -v ${PWD}:/data --rm scottgigante/magic:release-1.1 --filename https://github.com/KrishnaswamyLab/MAGIC/raw/master/data/HMLE_TGFb_day_8_10.csv.gz --min-library-size 2000 --normalize --transform sqrt --knn 5 --decay 15 --all-genes --output /data/magic_output.csv ``` - ## Validate Run this command to confirm your container produces correct reference output: @@ -37,4 +34,5 @@ docker run --rm scottgigante/magic:release-1.1 --validate ``` ## Contact + Scott Gigante ([scott.gigante@yale.edu](mailto:scott.gigante@yale.edu)) diff --git a/content/analyze/methods/methods-packages/phate.md b/content/analyze/methods/methods-packages/phate.md index 93211c0ba..257c72865 100644 --- a/content/analyze/methods/methods-packages/phate.md +++ b/content/analyze/methods/methods-packages/phate.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/phate" -date: "2019-01-23" -title: "Potential of Heat-diffusion for Affinity-based Transition Embedding (PHATE)" +appUrl: "https://pypi.org/project/phate/" author: "Kevin Moon, David van Dijk, Scott Gigante, Smita Krishnaswamy" +componentName: "analysisDetail" +date: "2019-01-23" description: "PHATE is a tool for visualizing high dimensional single-cell data with natural progressions or trajectories." githubUrl: "https://github.com/KrishnaswamyLab/PHATE/" -appUrl: "https://pypi.org/project/phate/" +path: "/analyze/methods/methods-packages/phate" +title: "Potential of Heat-diffusion for Affinity-based Transition Embedding (PHATE)" upstreamRegistryUrl: "https://pypi.org/project/phate/" -componentName: "analysisDetail" --- [![Build Status](https://travis-ci.com/KrishnaswamyLab/PHATE.svg?branch=master)](https://travis-ci.com/KrishnaswamyLab/PHATE#) @@ -26,8 +26,8 @@ Here we download a csv file containing raw scRNA-seq counts, preprocess it by fi docker run -v ${PWD}:/data --rm scottgigante/phate:release-1.1 --filename https://github.com/KrishnaswamyLab/MAGIC/raw/master/data/HMLE_TGFb_day_8_10.csv.gz --min-library-size 2000 --normalize --transform sqrt --knn 5 --decay 15 --output /data/phate_output.csv ``` - ## Validate + Run this command to confirm your container produces correct reference output: ``` @@ -35,4 +35,5 @@ docker run --rm scottgigante/phate:release-1.1 --validate ``` ## Contact + Scott Gigante ([scott.gigante@yale.edu](mailto:scott.gigante@yale.edu)) diff --git a/content/analyze/methods/methods-packages/sc3.md b/content/analyze/methods/methods-packages/sc3.md index ba7c88d8d..301d80786 100644 --- a/content/analyze/methods/methods-packages/sc3.md +++ b/content/analyze/methods/methods-packages/sc3.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/sc3" -date: "2019-02-28" -title: "Single-cell consensus clustering (SC3)" +appUrl: "http://bioconductor.org/packages/SC3" author: "Martin Hemberg (SC3), Gene Expression Team (sc3-scripts)" +componentName: "analysisDetail" +date: "2019-02-28" description: "SC3 is an unsupervised clustering method for scRNA-seq data." githubUrl: "https://github.com/hemberg-lab/SC3" -appUrl: "http://bioconductor.org/packages/SC3" +path: "/analyze/methods/methods-packages/sc3" +title: "Single-cell consensus clustering (SC3)" upstreamRegistryUrl: "http://bioconductor.org/packages/SC3" -componentName: "analysisDetail" --- [![Build Status](http://www.bioconductor.org/shields/build/release/bioc/SC3.svg)](https://git.bioconductor.org/packages/SC3) @@ -31,6 +31,7 @@ docker run -v ${PWD}:/data -w /data --rm quay.io/biocontainers/bioconductor-sc3- ``` ## Validate + Run this command to confirm your container produces correct reference output: ``` @@ -40,5 +41,6 @@ docker run -v ${PWD}:/data -w /data --rm quay.io/biocontainers/bioconductor-sc3- ``` ## Contact + Martin Hemberg, SC3 ([mh26@sanger.ac.uk](mailto:mh26@sanger.ac.uk))\ Gene Expression Team, sc3-scripts ([gene-expression@ebi.ac.uk](mailto:gene-expression@ebi.ac.uk)) diff --git a/content/analyze/methods/methods-packages/slingshot.md b/content/analyze/methods/methods-packages/slingshot.md index ffb1e0296..8ebd23644 100644 --- a/content/analyze/methods/methods-packages/slingshot.md +++ b/content/analyze/methods/methods-packages/slingshot.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/slingshot" -date: "2018-09-10" -title: "Slingshot" +appUrl: "http://bioconductor.org/packages/slingshot" author: "Kelly Street, Davide Risso, Diya Das, Sandrine Dudoit, Koen Van den Berge, and Robrecht Cannoodt" +componentName: "analysisDetail" +date: "2018-09-10" description: "Slingshot provides functions for inferring continuous, branching lineage structures in low-dimensional data." githubUrl: "https://github.com/kstreet13/slingshot" -appUrl: "http://bioconductor.org/packages/slingshot" +path: "/analyze/methods/methods-packages/slingshot" +title: "Slingshot" upstreamRegistryUrl: "http://bioconductor.org/packages/slingshot" -componentName: "analysisDetail" --- [![Build Status](https://travis-ci.org/kstreet13/slingshot.svg?branch=master)](https://travis-ci.org/kstreet13/slingshot) @@ -46,6 +46,7 @@ docker run -v ${PWD}:/data -w /data --rm -it quay.io/kstreet13/slingshot-docker: ``` ## Validate + Run this command to confirm your container produces correct reference output: ``` @@ -53,4 +54,5 @@ docker run -v ${PWD}:/data -w /data --rm -it quay.io/kstreet13/slingshot-docker: ``` ## Contact + Kelly Street ([street.kelly@gmail.com](mailto:street.kelly@gmail.com)) diff --git a/content/analyze/methods/methods-packages/spectre.md b/content/analyze/methods/methods-packages/spectre.md index 2c0703cf1..bdc2f8d36 100644 --- a/content/analyze/methods/methods-packages/spectre.md +++ b/content/analyze/methods/methods-packages/spectre.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/spectre" -date: "2021-08-03" -title: "Spectre" +appUrl: "" author: "Thomas Ashhurst, Felix Marsh-Wakefield, Givanna Putri" +componentName: "analysisDetail" +date: "2021-08-03" description: "Spectre is an R package and computational toolkit that enables comprehensive end-to-end integration, exploration, and analysis of high-dimensional cytometry or imaging data." githubUrl: "https://github.com/ImmuneDynamics/Spectre" -appUrl: "" +path: "/analyze/methods/methods-packages/spectre" +title: "Spectre" upstreamRegistryUrl: "" -componentName: "analysisDetail" --- ![Build Status](https://camo.githubusercontent.com/e778875bb2f2b2aa0bcf9e842d1a5d4a97e6185a02c4b3118d286b39364e2dcf/68747470733a2f2f63692e6170707665796f722e636f6d2f6170692f70726f6a656374732f7374617475732f616b68766238777562366436786874643f7376673d74727565) @@ -21,6 +21,7 @@ Spectre streamlines the analytical stages of raw data pre-processing, batch alig For users unfamiliar with using R, we also provide workflow instructions for replicating many of our analysis approaches in programs such as FlowJo. ## Docker URL + [Method-ready Docker image URL](https://hub.docker.com/r/immunedynamics/spectre) ![Spectre](../../_images/methods/spectre.png) @@ -29,7 +30,6 @@ For users unfamiliar with using R, we also provide workflow instructions for rep Extensive command line usage in R or RStudio provided at https://immunedynamics.github.io/spectre/. - ## Contact -Thomas Ashhurst () +Thomas Ashhurst () diff --git a/content/analyze/methods/methods-packages/stream.md b/content/analyze/methods/methods-packages/stream.md index b3aa82b2e..dd0767e2e 100644 --- a/content/analyze/methods/methods-packages/stream.md +++ b/content/analyze/methods/methods-packages/stream.md @@ -1,13 +1,13 @@ --- -path: "/analyze/methods/methods-packages/stream" -date: "2018-12-07" -title: "STREAM: Single-cell Trajectories Reconstruction, Exploration And Mapping of single-cell data" +appUrl: " http://stream.pinellolab.org/" author: "Huidong Chen, Luca Pinello" +componentName: "analysisDetail" +date: "2018-12-07" description: "STREAM is an interactive computational pipeline for reconstructing complex cellular developmental trajectories from sc-qPCR, scRNA-seq or scATAC-seq data." githubUrl: "https://github.com/pinellolab/STREAM/" -appUrl: " http://stream.pinellolab.org/" +path: "/analyze/methods/methods-packages/stream" +title: "STREAM: Single-cell Trajectories Reconstruction, Exploration And Mapping of single-cell data" upstreamRegistryUrl: "https://bioconda.github.io/recipes/stream/README.html" -componentName: "analysisDetail" --- [![Build Status](https://travis-ci.org/pinellolab/STREAM.svg)](https://travis-ci.org/pinellolab/STREAM) @@ -37,6 +37,7 @@ unzip testData.zip __Input files must be located in the directory where the docker container will be launched__ ### How to run trajectory inference (transcriptomic data) with marker gene exploration + Perform trajectory inference analysis on single cell transcriptomic data then detect DE (differentially expressed) genes, transition genes, and leaf genes ``` @@ -45,8 +46,8 @@ cd Nestorowa_2016 docker run -v ${PWD}:/data -w /data pinellolab/stream:0.3.2 -m data_Nestorowa.tsv.gz -l cell_label.tsv.gz -c cell_label_color.tsv.gz --DE --TG --LG ``` - ### How to run feature mapping + Reuse a previously inferred principal graph as reference to map new cells ``` @@ -58,15 +59,17 @@ docker run -v ${PWD}:/data -w /data pinellolab/stream:0.3.2 --new data_perturba ``` ### How to run trajectory inference (scATAC-seq data) using precomputed z-score file -Reconstructs trajectories and pseudotime from epigenomic data + +Reconstructs trajectories and pseudotime from epigenomic data + ``` cd Buenrostro_2018 docker run -v ${PWD}:/data -w /data pinellolab/stream:0.3.2 --atac -m zscore.tsv.gz --atac_samples sample_file.tsv.gz --atac_regions region_file.bed.gz -l cell_label.tsv.gz -c cell_label_color.tsv.gz --lle_components 4 ``` - ## Validate + Run this command to confirm your container produces correct reference output: ``` @@ -76,8 +79,10 @@ docker run --entrypoint stream_run_test pinellolab/stream:0.3.2 The validation test includes marker gene detction and will take ~15m to finish. ## Integrate + View STREAM in its [production portal](http://stream.pinellolab.org/). ## Contact + Huidong Chen ([huidong.chen@mgh.harvard.edu](mailto:huidong.chen@mgh.harvard.edu))\ Luca Pinello ([lpinello@mgh.harvard.edu](mailto:lpinello@mgh.harvard.edu)) diff --git a/content/analyze/portals/analysis-portals.md b/content/analyze/portals/analysis-portals.md index b90de40af..d10cc8779 100644 --- a/content/analyze/portals/analysis-portals.md +++ b/content/analyze/portals/analysis-portals.md @@ -1,9 +1,7 @@ --- -path: "/analyze/portals/analysis-portals" +componentName: "analyze" date: "2018-05-03" -title: "Analysis Portals" description: "Analysis portals providing a human-friendly UI in a web or native app to search and explore biological data." -componentName: "analyze" linked: - ./analysis-portals/asap.md - ./analysis-portals/bioturing-browser.md @@ -20,11 +18,13 @@ linked: - ./analysis-portals/terra.md - ./analysis-portals/ucsc-cell-browser.md - ./analysis-portals/ucsc-xena.md +path: "/analyze/portals/analysis-portals" +title: "Analysis Portals" --- # Analysis Portals -Analysis portals listed below provide a human-friendly UI in a web or native app to search and explore biological data. Portals sometimes include packages for analysis [methods](/analyze/methods) and biological [visualizations](/analyze/visualization), and integrate data from Human Cell Atlas. +Analysis portals listed below provide a human-friendly UI in a web or native app to search and explore biological data. Portals sometimes include packages for analysis [methods](/analyze/methods) and biological [visualizations](/analyze/visualization), and integrate data from Human Cell Atlas. These solutions are built by third parties. This information is provided as a service to the community and does not constitute an endorsement by the HCA. diff --git a/content/analyze/portals/analysis-portals/asap.md b/content/analyze/portals/analysis-portals/asap.md index f45741941..77fd813d1 100644 --- a/content/analyze/portals/analysis-portals/asap.md +++ b/content/analyze/portals/analysis-portals/asap.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/asap" -date: "2018-05-03" -title: "Automated Single cell Analysis Platform (ASAP)" +appUrl: "https://asap.epfl.ch/" author: "Vincent Gardeux, Fabrice David, Bart Deplancke" +componentName: "analysisDetail" +date: "2018-05-03" description: "ASAP allows the user to perform custom analyses and compare algorithms for each step of the single cell or bulk RNA-seq analysis pipeline post genome alignment." -appUrl: "https://asap.epfl.ch/" githubUrl: "https://asap.epfl.ch/" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/asap" +title: "Automated Single cell Analysis Platform (ASAP)" --- [ASAP](https://asap.epfl.ch/) allows the user to perform custom analyses and compare algorithms for each step of the single cell or bulk RNA-seq analysis pipeline post genome alignment via an intuitive web interface (Gardeux et al., Bioinformatics, 2017). These steps include parsing, filtering, and normalization of the input gene expression matrix, visual (2D and 3D) representation, differential expression, clustering, heatmaps, trajectory inference and functional enrichment analyses to characterize novel cell clusters, specific cell types, or differentiation processes. Thus, ASAP has been developed to lower the bioinformatic entry level to single cell experiments and to catalyze collaborations between computational biologists and experimentalists via an easy-to-use data interaction portal. @@ -14,5 +14,6 @@ componentName: "analysisDetail" ![ASAP](../../_images/portals/asap.png) ## Contact + Vincent Gardeux ([Vincent.Gardeux@epfl.ch](mailto:Vincent.Gardeux@epfl.ch))\ Fabrice David ([Fabrice.David@epfl.ch](mailto:Fabrice.David@epfl.ch)) diff --git a/content/analyze/portals/analysis-portals/bioturing-browser.md b/content/analyze/portals/analysis-portals/bioturing-browser.md index ee2e74472..a6a4a3db5 100644 --- a/content/analyze/portals/analysis-portals/bioturing-browser.md +++ b/content/analyze/portals/analysis-portals/bioturing-browser.md @@ -1,17 +1,16 @@ --- -path: "/analyze/portals/analysis-portals/bioturing-browser" -date: "2021-07-16" -title: "BioTuring Browser" +appUrl: "https://bioturing.com/product/bbrowser" author: "BioTuring Team" +componentName: "analysisDetail" +date: "2021-07-16" description: "BioTuring Browser combines modern data visualization techniques, statistical machine learning toolboxes, and a rich knowledge base to create a unique platform for single-cell data analytics." -appUrl: "https://bioturing.com/product/bbrowser" githubUrl: "" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/bioturing-browser" +title: "BioTuring Browser" --- [BioTuring Browser](https://bioturing.com/product/bbrowser) (or BBrowser) combines modern data visualization techniques, statistical machine learning toolboxes, and a rich knowledge base to create a unique platform for single-cell data analytics. - BBrowser provides interactive access to a comprehensive and growing single-cell RNA-seq and CITE-seq database. Here scientists can instantly view the t-SNE or UMAP of the data, plot and compare the expression of multiple genes/proteins across cell groups, find markers and differentially expressed genes y populations, run enrichment analysis, study how cell composition changes across conditions, study subtypes, reconstruct cell trajectories and study genes transitioning along a biological process, etc. This single-cell database is also supported with a wide range of cross-study analyses and data integration to facilitate target validation across millions of cells. @@ -21,5 +20,6 @@ In addition to providing a portal for public single-cell data, BBrowser is also ![BioTuring Browser](../../_images/portals/bioturing-browser.png) ## Contact + BioTuring Team () diff --git a/content/analyze/portals/analysis-portals/cellxgene.md b/content/analyze/portals/analysis-portals/cellxgene.md index 0603a3893..1067b3ab4 100644 --- a/content/analyze/portals/analysis-portals/cellxgene.md +++ b/content/analyze/portals/analysis-portals/cellxgene.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/cellxgene" -date: "2018-05-03" -title: "cellxgene" +appUrl: "https://github.com/chanzuckerberg/cellxgene" author: "Chan Zuckerberg Initiative" +componentName: "analysisDetail" +date: "2018-05-03" description: "cellxgene is an interactive, performant explorer for single cell transcriptomics data." githubUrl: "https://github.com/chanzuckerberg/cellxgene" -appUrl: "https://github.com/chanzuckerberg/cellxgene" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/cellxgene" +title: "cellxgene" --- [cellxgene](https://github.com/chanzuckerberg/cellxgene) is an interactive, performant explorer for single cell transcriptomics data - an open-source experiment in how to bring powerful tools from modern web development to visualize and explore large single-cell transcriptomics datasets. cellxgene hopes to both enable scientists to explore their data and to equip developers with scalable, reusable patterns and frameworks for visualizing large scientific datasets. Features will include scalable visualization of at least 1 million cells, interactive exploration via performant cross-filtering and comparison, and a flexible API that supports a range of existing analysis packages (e.g. scanpy) for backend computational tasks integrated with client-side visualization. @@ -18,4 +18,5 @@ componentName: "analysisDetail" ## Contact + Fiona Griffin ([fiona.griffin@chanzuckerberg.com](mailto:fiona.griffin@chanzuckerberg.com)) diff --git a/content/analyze/portals/analysis-portals/cytoscape.md b/content/analyze/portals/analysis-portals/cytoscape.md index 727e313df..6f8f27d51 100644 --- a/content/analyze/portals/analysis-portals/cytoscape.md +++ b/content/analyze/portals/analysis-portals/cytoscape.md @@ -1,21 +1,22 @@ --- -path: "/analyze/portals/analysis-portals/cytoscape" -date: "2018-05-03" -title: "Cytoscape" +appUrl: "http://www.cytoscape.org/" author: "Cytoscape consortium" +componentName: "analysisDetail" +date: "2018-05-03" description: "Cytoscape is an open source software platform for visualizing molecular interaction networks and biological pathways." -appUrl: "http://www.cytoscape.org/" githubUrl: "http://www.cytoscape.org/" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/cytoscape" +title: "Cytoscape" --- -[Cytoscape](http://www.cytoscape.org/) is an open source software platform for visualizing molecular interaction networks and biological pathways and integrating these networks with annotations, gene expression profiles and other state data. Although Cytoscape was originally designed for biological research, now it is a general platform for complex network analysis and visualization. +[Cytoscape](http://www.cytoscape.org/) is an open source software platform for visualizing molecular interaction networks and biological pathways and integrating these networks with annotations, gene expression profiles and other state data. Although Cytoscape was originally designed for biological research, now it is a general platform for complex network analysis and visualization. -Cytoscape's core distribution provides a basic set of features for data integration, analysis, and visualization. Additional features are available as Apps (formerly called Plugins). Apps are available for network and molecular profiling analyses, new layouts, additional file format support, scripting, and connection with databases. +Cytoscape's core distribution provides a basic set of features for data integration, analysis, and visualization. Additional features are available as Apps (formerly called Plugins). Apps are available for network and molecular profiling analyses, new layouts, additional file format support, scripting, and connection with databases. -Apps may be developed by anyone using the Cytoscape open API based on Java™ technology and App community development is encouraged. Most of the Apps are freely available from Cytoscape App Store. Cytoscape is being extended through apps and core features to search, extract, visualize, and analyze data from the Human Cell Atlas, with a focus on network and pathway analysis. +Apps may be developed by anyone using the Cytoscape open API based on Java™ technology and App community development is encouraged. Most of the Apps are freely available from Cytoscape App Store. Cytoscape is being extended through apps and core features to search, extract, visualize, and analyze data from the Human Cell Atlas, with a focus on network and pathway analysis. ![Cytoscape](../../_images/portals/cytoscape.png) ## Contact + [Cytoscape Helpdesk](https://groups.google.com/forum/#!forum/cytoscape-helpdesk) ([cytoscape-helpdesk@googlegroups.com](mailto:cytoscape-helpdesk@googlegroups.com)) diff --git a/content/analyze/portals/analysis-portals/dnastack.md b/content/analyze/portals/analysis-portals/dnastack.md index 7ad5a71f1..c77c26a28 100644 --- a/content/analyze/portals/analysis-portals/dnastack.md +++ b/content/analyze/portals/analysis-portals/dnastack.md @@ -1,11 +1,11 @@ --- -path: "/analyze/portals/analysis-portals/dnastack" +appUrl: "https://www.dnastack.com" +componentName: "analysisDetail" date: "2018-05-03" -title: "DNAstack" description: "Cloud platform for bioinformatics based on open standards like Workflow Description Language and Common Workflow Language." -appUrl: "https://www.dnastack.com" githubUrl: "https://www.dnastack.com" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/dnastack" +title: "DNAstack" --- [DNAstack](https://www.dnastack.com) is a cloud platform for bioinformatics based on open standards like Workflow Description Language and Common Workflow Language. @@ -17,4 +17,5 @@ componentName: "analysisDetail" ## Contact + General information ([info@dnastack.com](mailto:info@dnastack.com)) diff --git a/content/analyze/portals/analysis-portals/fastgenomics.md b/content/analyze/portals/analysis-portals/fastgenomics.md index 6b2d19b68..528e87456 100644 --- a/content/analyze/portals/analysis-portals/fastgenomics.md +++ b/content/analyze/portals/analysis-portals/fastgenomics.md @@ -1,25 +1,24 @@ --- -path: "/analyze/portals/analysis-portals/fastgenomics" -date: "2021-04-23" -title: "FASTGenomics" +appUrl: "https://www.fastgenomics.org/" author: "Comma Soft AG" +componentName: "analysisDetail" +date: "2021-04-23" description: "FASTGenomics is a platform to share scRNA-seq data and analyses. Users can either choose from best practices or create individual workflows for the exploration of gene expression data." githubUrl: "https://fastgenomics.org/login" -appUrl: "https://www.fastgenomics.org/" +path: "/analyze/portals/analysis-portals/fastgenomics" +title: "FASTGenomics" upstreamRegistryUrl: "" -componentName: "analysisDetail" --- [FASTGenomics](https://www.fastgenomics.org/) - a cloud-based collaboration platform for data management and reproducible analyses of scRNA-seq and omics data. ![FASTGenomics](../../_images/portals/fastgenomics.png) - Collaboration and data sharing is key in biomedical research. It involves experts from several fields of study such as Molecular Biology, Immunology, Data Science and Computer Science as well as storage and re-use of data in a reproducible environment. Our Life and Data Science experts at [Comma Soft](https://comma-soft.com) have therefore developed the open platform [FASTGenomics](https://beta.fastgenomics.org), which provides a common infrastructure, smart data management, is easy to use and allows direct access to data and results. It thus acts as a single point of truth and brings together all collaborators of your project. -The aim of FASTGenomics is to provide the highest reproducibility and transparency for single-cell and omics data analysis to the whole community. The platform offers publicly available datasets, reproducible analyses, and interactive projects for the exploration and visualization of gene expression data. Docker containers provide full reproducibility and help to avoid the "works only on my machine" problems. +The aim of FASTGenomics is to provide the highest reproducibility and transparency for single-cell and omics data analysis to the whole community. The platform offers publicly available datasets, reproducible analyses, and interactive projects for the exploration and visualization of gene expression data. Docker containers provide full reproducibility and help to avoid the "works only on my machine" problems. FASTGenomics is an open-access platform and is used as the central data and analytics platform in various European research projects such as the Human Cell Atlas project [discovAIR](https://www.discovair.org) and the EU H2020 project [SYSCID](http://www.syscid.eu). @@ -27,7 +26,6 @@ We are an experienced partner with a tight network of leading experts from Bioin Together, we can help you get started with your research project, assist in data management, and leverage the power of state-of-the-art AI-based techniques. Our hybrid design also allows custom solutions such as FASTGenomics on-premises for clinical and pharmaceutical research facilities. - ![FASTGenomics Screenshot](../../_images/portals/fastgenomics_screenshot.jpg) ## Where to find us: @@ -36,15 +34,14 @@ Together, we can help you get started with your research project, assist in data **Twitter**: [@FASTGenomics](https://twitter.com/FASTGenomics) -**Youtube**: [FASTGenomics channel](https://www.youtube.com/channel/UCuox5j5_QeW2mc4-gO2mncA) +**YouTube**: [FASTGenomics channel](https://www.youtube.com/channel/UCuox5j5_QeW2mc4-gO2mncA) **Slack**: [Slack support channel](https://join.slack.com/t/fastgenomics/shared_invite/enQtNjU2ODk0OTk5MTA3LTkwZTgxN2EzYzAyMmExZTJiYmYxMjRhYjM2ODBiMWIwYmQ3MzZhYmIzZDkxZTI4OGFhYjQ4ODIzMTU3OWQ2NTc) -**Github**: [https://github.com/FASTGenomics](https://github.com/FASTGenomics) +**GitHub**: [https://github.com/FASTGenomics](https://github.com/FASTGenomics) **Docker**: [https://hub.docker.com/u/fastgenomics](https://hub.docker.com/u/fastgenomics) - ## Contact -Team FASTGenomics (contact@fastgenomics.org) +Team FASTGenomics (contact@fastgenomics.org) diff --git a/content/analyze/portals/analysis-portals/genepattern-notebook-environment.md b/content/analyze/portals/analysis-portals/genepattern-notebook-environment.md index 257083231..a6dea7d49 100644 --- a/content/analyze/portals/analysis-portals/genepattern-notebook-environment.md +++ b/content/analyze/portals/analysis-portals/genepattern-notebook-environment.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/genepattern-notebook-environment" -date: "2018-05-03" -title: "GenePattern Notebook Environment" +appUrl: "http://www.genepattern-notebook.org" author: "Mesirov lab, UCSD" +componentName: "analysisDetail" +date: "2018-05-03" description: "GenePattern Notebook integrates the popular Jupyter Notebook platform, which interleaves text, graphics, and code, with the hundreds of genomic analyses available in the GenePattern platform." -appUrl: "http://www.genepattern-notebook.org" githubUrl: "http://www.genepattern-notebook.org" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/genepattern-notebook-environment" +title: "GenePattern Notebook Environment" --- [GenePattern Notebook](http://www.genepattern-notebook.org) integrates the popular Jupyter Notebook platform, which interleaves text, graphics, and code, with the hundreds of genomic analyses available in the GenePattern platform, providing a workspace for reproducible research and open science to all researchers, regardless of their programming experience. @@ -16,4 +16,5 @@ The environment allows researchers to create, share, and publish detailed descri ![GenePattern Notebook](../../_images/portals/genepattern-notebook.png) ## Contact + Genepattern Team ([genepattern-team@broadinstitute.org](mailto:genepattern-team@broadinstitute.org)) diff --git a/content/analyze/portals/analysis-portals/granatum-x.md b/content/analyze/portals/analysis-portals/granatum-x.md index 45b642c29..1ad667fd1 100644 --- a/content/analyze/portals/analysis-portals/granatum-x.md +++ b/content/analyze/portals/analysis-portals/granatum-x.md @@ -1,16 +1,14 @@ --- -path: "/analyze/portals/analysis-portals/granatum-x" -date: "2021-03-12" -title: "GranatumX" +appUrl: "http://garmiregroup.org/granatumx/app" author: "Lana Garmire, David Garmire, Xun Zhu et al." +componentName: "analysisDetail" +date: "2021-03-12" description: "GranatumX is a next-generation software environment for single-cell data analysis. GranatumX is inspired by the interactive web tool Granatum (published in Genome Medicine, 2017). It enables biologists to flexibly design their own pipelines for single-cell analysis in a web-based graphical environment." -appUrl: "http://garmiregroup.org/granatumx/app" githubUrl: "https://github.com/granatumx" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/granatum-x" +title: "GranatumX" --- - - The objective of [GranatumX](http://garmiregroup.org/granatumx/app) is to provide scRNA-seq biologists better access to bioinformatics tools and the ability to conduct single-cell data analysis independently. Currently, other scRNA-seq platforms usually only provide a fixed set of methods implemented by the authors themselves. It is difficult to add new methods developed by the community due to programming language lock-in as well as monolithic code architectures. If a pipeline is assembled between heterogeneous tools, it is manually crafted and inhibits a repeatable execution of data analysis tools by other wet-lab scientists. @@ -18,23 +16,22 @@ tools and the ability to conduct single-cell data analysis independently. Curren As a solution, GranatumX uses the plugin and virtualized framework that provides an easy and unified approach to add new methods in a data-analysis pipeline. The plugin system is agnostic to developer code and the choice of the original scripting language. It also eliminates inter-module incompatibilities, by isolating the dependencies of each module. As a data portal, GranatumX provides a graphical user interface (GUI) that requires no programming experience. -The users can self-learn GraantumX extremely easily. Watching this short youtube video is also helpful: https://www.youtube.com/watch?v=Y5LcG7qLQ5s - - +The users can self-learn GraantumX extremely easily. Watching this short YouTube video is also helpful: https://www.youtube.com/watch?v=Y5LcG7qLQ5s ## Deployment of GranatumX + The web-based GUI can be accessed on various devices including desktop, tablets, and smartphones. In addition to the web-based format, GranatumX is also deployable on a broad variety of computational environments, such as private PCs, cloud services, and High-Performance Computing (HPC) platforms with minimal effort by system administrators. The deployment process is unified on all platforms because all components of GranatumX are containerized in Docker (also portable to Singularity). GranatumX can handle larger-scale scRNA-seq datasets coming online, with an adequate cloud configuration setup and appropriate Gboxes. For example, after uploading data, it took GranatumX 12 minutes to finish the recommended pipeline on an AMD 3950x with 16 cores and 128GB of DRAM memory running Ubuntu 20.04, using 10K cells downsampled from the dataset of “1.3 Million Brain Cells from E18 Mice'' on the 10x Genomics website. The most time-consuming step is imputation using DeepImpute (⅖ time). ## Unique Gbox Modules + Gbox is a unique concept of GrantumX. It represents a containerized version of a scientific package that handles its input and output by a format understood by the GranatumX core. GranatumX has a set of pre-installable Gboxes that enable complete scRNA-seq analysis out of the box. Various Gboxes for data entry, preprocessing, and processing can be customized and organized together, to form a complete analysis pipeline. One highlight feature of the Gbox is that it stands alone and the user can assume any Gbox without the need to restart the full pipeline, in case one implemented by the user fails. Another highlight of the Gbox feature is that the entire GranatumX platform is fully interactive, with addition or removal of some Gboxes or parameter changes on the go, while some other Gboxes are being executed. - A comprehensive set of over 50 Gboxes are implemented in GranatumX to perform tasks all the way from data entry and processing to downstream functional analysis. The data processing tasks help to minimize the biases in the data and increase the signal-to-noise ratio. For each of these quality improvement categories, GranatumX provides multiple popular methods from which users can pick. To assist functional analysis, GranatumX provides a core list of methods for dimension reduction, visualization (including PCA, t-SNE, and UMAP), clustering, differential expression, marker gene identification, Gene Set Enrichment Analysis, network analysis and pseudo-time construction. Versioning for each of these Gboxes has been implemented so that users can use a specific tested version of a Gbox. Developers on the other hand can work on newer versions separately before the official upgrade. Gboxes can be stored on DockerHub for public use which maintains its own versioning system . @@ -42,6 +39,7 @@ To assist functional analysis, GranatumX provides a core list of methods for dim For advanced devlopers interesting in writing/contributing their own Gbox plug-ins, please click the link at the bottom of this webpage: http://garmiregroup.org/granatumx/app/. You can also sign up GranatumX developer's Slack group: https://granatumx-developer.slack.com/ for quick responses, rather than waiting for email reply from the contact email: lana.garmire.group@gmail.com ## Input Files + The input files of GranatumX include expression matrices and optional sample metadata tables, acceptable in a variety of formats such as CSV, TSV, or Excel format. GranatumX even accepts zip files and gz files (GNU zip), and the user can choose that format for large expression matrices. Expression matrices are raw read counts for all genes (rows) in all cells (columns). The sample metadata tables annotate each cell with a pre-assigned cell type, state, or other quality information. The parsing step creates a sparse matrix using the coordinate list (COO) format, and this representation ensures swift upload onto the back end, even for large input datasets (>10K cells). @@ -49,16 +47,17 @@ The sample metadata tables annotate each cell with a pre-assigned cell type, sta Such information will either be used to generate computational results (such as Gene Set Analysis) or be mapped onto the PCA, t-SNE, or UMAP plot for visualization. Once the user uploads the gene expression matrix, the data are read into a dataframe using Pandas and the step updates the user with a “preview”, consisting of the first few rows and columns of the gene expression matrix, along with the number of genes and samples present. ## User-centric Design + As a user-friendly tool, GranatumX allows multiple users to be affiliated with the same project for data and result sharing, while restricting one user to run the pipeline at a time to avoid data conflicts. It allows dynamically adding, removing, and reordering pipeline steps on the go. It also allows users to reset the current step. All relevant data in the analysis pipeline and all results generated by each module are stored in a database, allowing users to access and download them. To ensure reproducibility, GranatumX can automatically generate a human-readable report detailing the inputs, running arguments, and the results of all steps. All of these features are designed with the mindset of “consumer reports'' to facilitate research in experimental labs or genomics cores. ## Source Code and Gbox Implementation + The source code and Gbox tutorial for GranatumX is available at under MIT license. All builds are deployed via Docker Hub at . - ## Contact -GranatumX Development Team () +GranatumX Development Team () diff --git a/content/analyze/portals/analysis-portals/hca-galaxy-instance.md b/content/analyze/portals/analysis-portals/hca-galaxy-instance.md index 2cadf2631..80648663e 100644 --- a/content/analyze/portals/analysis-portals/hca-galaxy-instance.md +++ b/content/analyze/portals/analysis-portals/hca-galaxy-instance.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/hca-galaxy-instance" -date: "2019-09-26" -title: "Human Cell Atlas Galaxy Instance" +appUrl: "https://humancellatlas.usegalaxy.eu" author: "P Moreno, N Huang, J Manning, S Mohammed, C Talavera-Lopez, K Polanski, W Bacon, B Gruening, H Rasche, K Meyer, S Teichmann, A Brazma, I Papatheodorou" +componentName: "analysisDetail" +date: "2019-09-26" description: "The Human Cell Atlas Galaxy instance contains tools for Single Cell analysis and interactive visualisation (UCSC CellBrowser). It can import matrix data from the Human Cell Atlas and the EBI Single Cell Expression Atlas, for re-analysis." -appUrl: "https://humancellatlas.usegalaxy.eu" githubUrl: "https://github.com/ebi-gene-expression-group/container-galaxy-sc-tertiary" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/hca-galaxy-instance" +title: "Human Cell Atlas Galaxy Instance" --- The [Human Cell Atlas Galaxy instance](https://humancellatlas.usegalaxy.eu) contains tools for Single Cell Clustering (Scanpy, Seurat, Scater/SC3), Trajectories analysis (Scanpy, Monocle) and interactive visualisation (UCSC CellBrowser), allowing simple data analysis through the proven Galaxy user interface. @@ -17,8 +17,10 @@ The portal runs as part of the larger usegalaxy.eu infrastructure and has access ![Human Cell Atlas Galaxy Instance](../../_images/portals/hca-galaxy-instance.png) ## Contact + Website: [https://www.ebi.ac.uk/support/gxasc](https://www.ebi.ac.uk/support/gxasc)\ Twitter: [@ExpressionAtlas](https://twitter.com/ExpressionAtlas) ## Source code + diff --git a/content/analyze/portals/analysis-portals/omnibrowser.md b/content/analyze/portals/analysis-portals/omnibrowser.md index 4e54d0dff..2c4243dc8 100644 --- a/content/analyze/portals/analysis-portals/omnibrowser.md +++ b/content/analyze/portals/analysis-portals/omnibrowser.md @@ -1,13 +1,13 @@ --- -path: "/analyze/portals/analysis-portals/omnibrowser" -date: "2021-04-23" -title: "OmniBrowser" +appUrl: "https://omnibrowser.abiosciences.com/" author: "Analytical BioSciences Team" +componentName: "analysisDetail" +date: "2021-04-23" description: "OmniBrowser is a comprehensive curated single cell data compendium with visualization and analysis capabilities that further empower and innovate drug discovery and development." githubUrl: "" -appUrl: "https://omnibrowser.abiosciences.com/" +path: "/analyze/portals/analysis-portals/omnibrowser" +title: "OmniBrowser" upstreamRegistryUrl: "" -componentName: "analysisDetail" --- [OmniBrowser](https://omnibrowser.abiosciences.com/) is a web portal for scRNA-seq data exploration, supported by a spectrum of commonly used tools for data visualization and analysis. The OmniBrowserTM toolset is designed with a special attention to support biologists with limited programming and analytical bandwidth or depth. @@ -19,4 +19,5 @@ Datasets are constantly updated on OmniBrowser in order to support the constantl ![OmniBrowser](../../_images/portals/omnibrowser.jpg) ## Contact + Analytical BioSciences () diff --git a/content/analyze/portals/analysis-portals/single-cell-expression-atlas.md b/content/analyze/portals/analysis-portals/single-cell-expression-atlas.md index 71a26b087..e33893525 100644 --- a/content/analyze/portals/analysis-portals/single-cell-expression-atlas.md +++ b/content/analyze/portals/analysis-portals/single-cell-expression-atlas.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/single-cell-expression-atlas" -date: "2018-05-03" -title: "Single Cell Expression Atlas" +appUrl: "https://www.ebi.ac.uk/gxa/sc/home" author: "Gene Expression Team at EMBL-EBI, Irene Papatheodorou" +componentName: "analysisDetail" +date: "2018-05-03" description: "Single Cell Expression Atlas supports research in single cell transcriptomics." githubUrl: "https://www.ebi.ac.uk/gxa/sc/home" -appUrl: "https://www.ebi.ac.uk/gxa/sc/home" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/single-cell-expression-atlas" +title: "Single Cell Expression Atlas" --- [Single Cell Expression Atlas](https://www.ebi.ac.uk/gxa/sc/home) supports research in single cell transcriptomics. The Atlas annotates publicly available single cell RNA-Seq experiments with ontology identifiers and re-analyses them using standardised pipelines available through iRAP, our RNA-Seq analysis toolkit. The browser enables visualisation of clusters of cells, their annotations and supports searches for gene expression within and across studies. @@ -18,5 +18,6 @@ componentName: "analysisDetail" ## Contact + Website: [https://www.ebi.ac.uk/support/gxasc](https://www.ebi.ac.uk/support/gxasc)\ Twitter: [@ExpressionAtlas](https://twitter.com/ExpressionAtlas) diff --git a/content/analyze/portals/analysis-portals/single-cell-portal.md b/content/analyze/portals/analysis-portals/single-cell-portal.md index 50e01d192..66a5b07ca 100644 --- a/content/analyze/portals/analysis-portals/single-cell-portal.md +++ b/content/analyze/portals/analysis-portals/single-cell-portal.md @@ -1,15 +1,16 @@ --- -path: "/analyze/portals/analysis-portals/single-cell-portal" -date: "2018-05-03" -title: "Single Cell Portal" +appUrl: "https://portals.broadinstitute.org/single_cell" author: "Jon Bistline, Eric Weitz, Jean Chang, Vicky Horst, Timothy Tickle" +componentName: "analysisDetail" +date: "2018-05-03" description: "Single Cell Portal (SCP) provides interactive visualizations, easy-to-run cloud scalable workflows and analyses, and secure sharing permissions that support all stages of scientific inquiry." -appUrl: "https://portals.broadinstitute.org/single_cell" githubUrl: "https://portals.broadinstitute.org/single_cell" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/single-cell-portal" +title: "Single Cell Portal" --- [Single Cell Portal](https://portals.broadinstitute.org/single_cell) (SCP) is a cloud-based, scalable web application that is organized by studies with downloadable raw data, aiming to accelerate reproducible single-cell research through: + 1. enabling scientists to transform complex data into actionable insights through interactive visualizations, such as plots showing cell clusters, three dimensional trajectories, gene expression within and across studies, genome browsers to explore read level evidence, chromosomal rearrangements and more; 2. extending single cell genomics analysis to any scientist by providing curated pipelines and analysis; 3. centralizing downloadable data, visualizations, and analysis to enable reproducible analysis; @@ -18,4 +19,5 @@ componentName: "analysisDetail" ![Single Cell Portal](../../_images/portals/single-cell-portal.png) ## Contact + Single Cell Portal team ([scp-support@broadinstitute.zendesk.com](mailto:scp-support@broadinstitute.zendesk.com)) diff --git a/content/analyze/portals/analysis-portals/terra.md b/content/analyze/portals/analysis-portals/terra.md index e9a38ecb5..fca161706 100644 --- a/content/analyze/portals/analysis-portals/terra.md +++ b/content/analyze/portals/analysis-portals/terra.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/terra" -date: "2021-09-01" -title: "Terra" +appUrl: "https://app.terra.bio/" author: " Broad Institute, Microsoft, and Verily" +componentName: "analysisDetail" +date: "2021-09-01" description: "Terra is a secure, scalable, open-source platform for biomedical researchers to access data, run analysis tools and collaborate." -appUrl: "https://app.terra.bio/" githubUrl: "" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/terra" +title: "Terra" --- [Terra](https://app.terra.bio/) is a scalable, open-source platform for biomedical researchers to access data, run analysis tools, and collaborate securely in the cloud. It powers important scientific projects including NHGRI’s AnVIL, NHLBI’s BioData Catalyst, the Human Cell Atlas Data Coordination Platform, the BRAIN Initiative Cell Census Network, and many others. @@ -26,4 +26,5 @@ To learn more about Terra, visit [terra.bio](https://terra.bio/). ![Terra](../../_images/portals/terra.png) ## Contact + Terra team () diff --git a/content/analyze/portals/analysis-portals/ucsc-cell-browser.md b/content/analyze/portals/analysis-portals/ucsc-cell-browser.md index 490990395..e88eb72b9 100644 --- a/content/analyze/portals/analysis-portals/ucsc-cell-browser.md +++ b/content/analyze/portals/analysis-portals/ucsc-cell-browser.md @@ -1,12 +1,12 @@ --- -path: "/analyze/portals/analysis-portals/ucsc-cell-browser" -date: "2018-05-03" -title: "UCSC Cell Browser" +appUrl: "http://cells.ucsc.edu/" author: "Max Haeussler" +componentName: "analysisDetail" +date: "2018-05-03" description: "UCSC Cell Browser is a software tool for single cell RNA expression." -appUrl: "http://cells.ucsc.edu/" githubUrl: "http://cells.ucsc.edu/" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/ucsc-cell-browser" +title: "UCSC Cell Browser" --- [UCSC Cell Browser](http://cells.ucsc.edu/) is a software tool for single cell RNA expression, a 2D viewer that shows cells as a dimensionality reduction plot with the expression data overlaid. The viewer allows a visual comparison of large single-cell datasets in 2D, overlaying metadata, marker gene levels and cell clustering information. This is useful when comparing single cell layout (dimensionality reduction) methods and batch correction methods. @@ -14,4 +14,5 @@ componentName: "analysisDetail" ![UCSC Cell Browser](../../_images/portals/ucsc-cell-browser.png) ## Contact + Max Haeussler ([max@soe.ucsc.edu](mailto:max@soe.ucsc.edu)) diff --git a/content/analyze/portals/analysis-portals/ucsc-xena.md b/content/analyze/portals/analysis-portals/ucsc-xena.md index 73999db6f..051525e9b 100644 --- a/content/analyze/portals/analysis-portals/ucsc-xena.md +++ b/content/analyze/portals/analysis-portals/ucsc-xena.md @@ -1,30 +1,30 @@ --- -path: "/analyze/portals/analysis-portals/ucsc-xena" -date: "2019-02-05" -title: "UCSC Xena" +appUrl: "https://singlecell.xenahubs.net" author: "Brian Craft, Mary Goldman, Jingchun Zhu, David Haussler" +componentName: "analysisDetail" +date: "2019-02-05" description: "UCSC Xena is an online exploration tool for multi-omic functional genomics data and associated meta-data and annotations." -appUrl: "https://singlecell.xenahubs.net" githubUrl: "https://singlecell.xenahubs.net" -componentName: "analysisDetail" +path: "/analyze/portals/analysis-portals/ucsc-xena" +title: "UCSC Xena" --- [UCSC Xena single cell browser](https://singlecell.xenahubs.net) is an online exploration tool for single cell RNA-seq data and associated meta-data and annotations. - - Xena is built for performance, allowing you to dynamically view 1 million cells at a time. Xena downloads the latest HCA gene expression matrices available at [HCA Data Portal](/) and makes them available for visualization via our Visual Spreadsheet. + +Xena is built for performance, allowing you to dynamically view 1 million cells at a time. Xena downloads the latest HCA gene expression matrices available at [HCA Data Portal](/) and makes them available for visualization via our Visual Spreadsheet. ![UCSC Xena single cell browser](../../_images/portals/ucsc-xena.png) Xena's Visual Spreadsheet is analogous to an office spreadsheet: it is a visual representation of a data grid where each column is a slice of genomic data (e.g. a gene or a set of genes' expression, inferred cell type, cell location), and each row is a single cell. - - Users interactively build their spreadsheet, enabling discovery across multiple genes and cell annotations. Researchers can dynamically combine their own data, such as cluster calls, with the data from the HCA while still keeping their data private. - +Users interactively build their spreadsheet, enabling discovery across multiple genes and cell annotations. Researchers can dynamically combine their own data, such as cluster calls, with the data from the HCA while still keeping their data private. ## Contact + [ucsc-cancer-genomics-browser@googlegroups.com](mailto:ucsc-cancer-genomics-browser@googlegroups.com)\ [genome-cancer@soe.ucsc.edu](mailto:genome-cancer@soe.ucsc.edu)\ Follow [@UCSCXena](https://twitter.com/UCSCXena) ## Source code -https://github.com/ucscXena + + diff --git a/content/analyze/visualization/visualization-packages.md b/content/analyze/visualization/visualization-packages.md index 41350a717..e6a8320cc 100644 --- a/content/analyze/visualization/visualization-packages.md +++ b/content/analyze/visualization/visualization-packages.md @@ -1,19 +1,19 @@ --- -path: "/analyze/visualization/visualization-packages" +componentName: "analyze" date: "2018-05-03" -title: "Visualization Packages" description: "JavaScript visualization packages that enable analysis through interactive data exploration on the web." -componentName: "analyze" linked: - ./visualization-packages/anatomogram.md - ./visualization-packages/ideogram.md - ./visualization-packages/igv.md - ./visualization-packages/morpheus.md +path: "/analyze/visualization/visualization-packages" +title: "Visualization Packages" --- # Visualization Packages -Visualization packages listed below are JavaScript modules that enable analysis through interactive data exploration on the web. To ease integration into [portals](/analyze), these web components are all available as NPM packages and can be imported using standard ES6 syntax. They each provide domain-specific ways to visualize biological data produced by Human Cell Atlas. +Visualization packages listed below are JavaScript modules that enable analysis through interactive data exploration on the web. To ease integration into [portals](/analyze), these web components are all available as NPM packages and can be imported using standard ES6 syntax. They each provide domain-specific ways to visualize biological data produced by Human Cell Atlas. These solutions are built by third parties. This information is provided as a service to the community and does not constitute an endorsement by the HCA. diff --git a/content/analyze/visualization/visualization-packages/anatomogram.md b/content/analyze/visualization/visualization-packages/anatomogram.md index 92d2fd8df..0a5722c99 100644 --- a/content/analyze/visualization/visualization-packages/anatomogram.md +++ b/content/analyze/visualization/visualization-packages/anatomogram.md @@ -1,12 +1,12 @@ --- -path: "/analyze/visualization/visualization-packages/anatomogram" -date: "2018-05-03" -title: "Expression Atlas Anatomogram" -author: "Expression Atlas developers" -githubUrl: "https://github.com/ebi-gene-expression-group/anatomogram" appUrl: "https://github.com/ebi-gene-expression-group/anatomogram" -description: "Anatomogram component for Expression Atlas heatmap." +author: "Expression Atlas developers" componentName: "analysisDetail" +date: "2018-05-03" +description: "Anatomogram component for Expression Atlas heatmap." +githubUrl: "https://github.com/ebi-gene-expression-group/anatomogram" +path: "/analyze/visualization/visualization-packages/anatomogram" +title: "Expression Atlas Anatomogram" --- [![Build Status](https://travis-ci.org/ebi-gene-expression-group/anatomogram.svg?branch=master)](https://travis-ci.org/ebi-gene-expression-group/anatomogram) @@ -17,7 +17,9 @@ Anatomogram is an interactive component to display an anatomical view of an orga ![Anatomogram](../../_images/visualization/anatomogram.png) ## Install + `npm install anatomogram` ## Contact + Irene Papatheodorou ([irenep@ebi.ac.uk](mailto:irenep@ebi.ac.uk)) diff --git a/content/analyze/visualization/visualization-packages/ideogram.md b/content/analyze/visualization/visualization-packages/ideogram.md index e4f8d9397..0aa20e79c 100644 --- a/content/analyze/visualization/visualization-packages/ideogram.md +++ b/content/analyze/visualization/visualization-packages/ideogram.md @@ -1,12 +1,12 @@ --- -path: "/analyze/visualization/visualization-packages/ideogram" -date: "2018-05-03" -title: "Ideogram.js" +appUrl: "https://github.com/eweitz/ideogram" author: "Eric Weitz" +componentName: "analysisDetail" +date: "2018-05-03" description: "Chromosome visualization with D3.js." githubUrl: "https://github.com/eweitz/ideogram" -appUrl: "https://github.com/eweitz/ideogram" -componentName: "analysisDetail" +path: "/analyze/visualization/visualization-packages/ideogram" +title: "Ideogram.js" --- [![Build Status](https://travis-ci.org/eweitz/ideogram.svg?branch=master)](https://travis-ci.org/eweitz/ideogram) @@ -15,18 +15,20 @@ componentName: "analysisDetail" [Ideogram.js](https://eweitz.github.io/ideogram/) is a JavaScript library for chromosome visualization. -Ideogram supports drawing and animating genome-wide datasets for [human](https://eweitz.github.io/ideogram/human), [mouse](https://eweitz.github.io/ideogram/mouse), and [many other eukaryotes](https://eweitz.github.io/ideogram/eukaryotes). The [Ideogram API](https://github.com/eweitz/ideogram/blob/master/api.md) for annotations supports [histograms](https://eweitz.github.io/ideogram/annotations-histogram), [heatmaps](https://eweitz.github.io/ideogram/annotations-heatmap), [overlays](https://eweitz.github.io/ideogram/annotations-overlaid), and points of arbitrary shape and color layered in [tracks](https://eweitz.github.io/ideogram/annotations-tracks). Ideogram can depict haploid, [diploid](https://eweitz.github.io/ideogram/ploidy-basic) or higher ploidy genomes (e.g. plants), as well as aneuploidy, [genetic recombination](https://eweitz.github.io/ideogram/ploidy-recombination), and [homologous features](https://eweitz.github.io/ideogram/homology-basic) between chromosomes. +Ideogram supports drawing and animating genome-wide datasets for [human](https://eweitz.github.io/ideogram/human), [mouse](https://eweitz.github.io/ideogram/mouse), and [many other eukaryotes](https://eweitz.github.io/ideogram/eukaryotes). The [Ideogram API](https://github.com/eweitz/ideogram/blob/master/api.md) for annotations supports [histograms](https://eweitz.github.io/ideogram/annotations-histogram), [heatmaps](https://eweitz.github.io/ideogram/annotations-heatmap), [overlays](https://eweitz.github.io/ideogram/annotations-overlaid), and points of arbitrary shape and color layered in [tracks](https://eweitz.github.io/ideogram/annotations-tracks). Ideogram can depict haploid, [diploid](https://eweitz.github.io/ideogram/ploidy-basic) or higher ploidy genomes (e.g. plants), as well as aneuploidy, [genetic recombination](https://eweitz.github.io/ideogram/ploidy-recombination), and [homologous features](https://eweitz.github.io/ideogram/homology-basic) between chromosomes. Ideogram can be embedded as a [reusable component](https://github.com/eweitz/ideogram#usage) in any web page or application, and leverages D3.js and SVG to achieve fast, crisp client-side rendering. You can also integrate Ideogram with JavaScript frameworks like [Angular](https://github.com/eweitz/ideogram/tree/master/examples/angular), [React](https://github.com/eweitz/ideogram/tree/master/examples/react), and [Vue](https://github.com/eweitz/ideogram/tree/master/examples/vue), as well as data science platforms like [R](https://github.com/eweitz/ideogram/tree/master/examples/r) and [Jupyter Notebook](https://github.com/eweitz/ideogram/tree/master/examples/jupyter). ![Ideogram](../../_images/visualization/ideogram.png) ## Install + `npm install ideogram` ## Integrate + Learn how to use Ideogram.js in your portal by exploring a minimal example of [Ideogram.js in React](https://github.com/eweitz/ideogram/blob/master/examples/react/README.md#ideogram-in-react). ## Contact -Eric Weitz ([eric.m.weitz@gmail.com](mailto://eric.m.weitz@gmail.com)) +Eric Weitz ([eric.m.weitz@gmail.com](mailto://eric.m.weitz@gmail.com)) diff --git a/content/analyze/visualization/visualization-packages/igv.md b/content/analyze/visualization/visualization-packages/igv.md index 1c3179cf9..b56132020 100644 --- a/content/analyze/visualization/visualization-packages/igv.md +++ b/content/analyze/visualization/visualization-packages/igv.md @@ -1,12 +1,12 @@ --- -path: "/analyze/visualization/visualization-packages/igv" -date: "2018-05-03" -title: "igv.js" +appUrl: "https://github.com/igvteam/igv.js" author: "James Robinson, Douglass Turner, Helga Thorvaldsdottir, Jill Mesirov" +componentName: "analysisDetail" +date: "2018-05-03" description: "igv.js is an embeddable interactive genome visualization component based on the desktop Integrative Genomics Viewer (IGV)." githubUrl: "https://github.com/igvteam/igv.js" -appUrl: "https://github.com/igvteam/igv.js" -componentName: "analysisDetail" +path: "/analyze/visualization/visualization-packages/igv" +title: "igv.js" --- [![Build Status](https://travis-ci.org/igvteam/igv.js.svg?branch=master)](https://travis-ci.org/igvteam/igv.js) @@ -17,10 +17,13 @@ igv.js is an embeddable interactive genome visualization component based on the ![ivg](../../_images/visualization/igv.png) ## Install + `npm install igv` ## Integrate + Learn how to use igv.js in your portal by exploring a [minimal example of igv.js in React](https://github.com/eweitz/igv.js-react/blob/master/README.md#igvjs-in-react). ## Contact + IGV team ([igv-team@broadinstitute.org](mailto://igv-team@broadinstitute.org)) diff --git a/content/analyze/visualization/visualization-packages/morpheus.md b/content/analyze/visualization/visualization-packages/morpheus.md index e8be98672..f94722f1a 100644 --- a/content/analyze/visualization/visualization-packages/morpheus.md +++ b/content/analyze/visualization/visualization-packages/morpheus.md @@ -1,12 +1,12 @@ --- -path: "/analyze/visualization/visualization-packages/morpheus" -date: "2018-05-03" -title: "Morpheus" -author: "Joshua Gould" -githubUrl: "https://github.com/cmap/morpheus.js" appUrl: "https://github.com/cmap/morpheus.js" -description: "Versatile matrix visualization and analysis software." +author: "Joshua Gould" componentName: "analysisDetail" +date: "2018-05-03" +description: "Versatile matrix visualization and analysis software." +githubUrl: "https://github.com/cmap/morpheus.js" +path: "/analyze/visualization/visualization-packages/morpheus" +title: "Morpheus" --- [![Build Status](https://travis-ci.org/cmap/morpheus.js.svg?branch=master)](https://travis-ci.org/cmap/morpheus.js) @@ -17,10 +17,13 @@ Morpheus is versatile matrix visualization and analysis software. View your data ![Morpheus](../../_images/visualization/morpheus.png) ## Install + `npm install morpheus-app` ## Integrate + Learn how to use Morpheus in your portal by exploring a [minimal example of Morpheus in React](https://github.com/eweitz/morpheus.js-react/blob/master/README.md#morpheusjs-in-react). ## Contact + Joshua Gould ([morpheus@broadinstitute.org](mailto:morpheus@broadinstitute.org)) diff --git a/content/apis/api-documentation/apis.md b/content/apis/api-documentation/apis.md index 968872481..cf421b3fd 100644 --- a/content/apis/api-documentation/apis.md +++ b/content/apis/api-documentation/apis.md @@ -1,7 +1,8 @@ --- -title: "DCP APIs" description: "Overview of the DCP APIs." +title: "DCP APIs" --- + # DCP APIs The Data Coordination Platform (DCP) provides APIs for programmatic access of data and metadata. diff --git a/content/apis/api-documentation/data-browser-api.md b/content/apis/api-documentation/data-browser-api.md index 4c645310c..277bdb502 100644 --- a/content/apis/api-documentation/data-browser-api.md +++ b/content/apis/api-documentation/data-browser-api.md @@ -1,6 +1,6 @@ --- -title: "DCP Data Browser API" description: "An overview of the Azul Data Browser API." +title: "DCP Data Browser API" --- # Data Browser API @@ -15,17 +15,5 @@ For more information on the Data Browser API, see the [Data Browser API Specific ## API Usage Examples -- **Project matrices download notebook** - The [Downloading Project Matrices](https://github.com/DataBiosphere/azul/blob/develop/docs/download-project-matrices.ipynb) notebook demonstrates the process of making a request to the Azul [/index/projects](https://service.azul.data.humancellatlas.org/#/Index/get_index_projects__project_id_) endpoint for a single project and downloading all the project level matrix files contained within the response. - - +- **Project matrices download notebook** - The [Downloading Project Matrices](https://github.com/DataBiosphere/azul/blob/develop/docs/download-project-matrices.ipynb) notebook demonstrates the process of making a request to the Azul [/index/projects](https://service.azul.data.humancellatlas.org/#/Index/get_index_projects__project_id_) endpoint for a single project and downloading all the project level matrix files contained within the response. - **Command-line tool** - The [Azul command-line tool](https://github.com/DataBiosphere/azul/blob/develop/docs/hca_file_downloader.py) demonstrates how to use the [Data Browser API](https://service.azul.data.humancellatlas.org/) to programmatically list and download project data matrices. - - - - - - - - - - diff --git a/content/community-update/community-update/dcp-2-coming-soon-with-support-for-controlled-access-data.md b/content/community-update/community-update/dcp-2-coming-soon-with-support-for-controlled-access-data.md index d8ec61154..5698d17c4 100644 --- a/content/community-update/community-update/dcp-2-coming-soon-with-support-for-controlled-access-data.md +++ b/content/community-update/community-update/dcp-2-coming-soon-with-support-for-controlled-access-data.md @@ -1,10 +1,9 @@ --- date: "2020-10-09" -title: Coming Soon - DCP 2.0 description: "An important update for the community - The HCA DCP is undergoing a major upgrade." +title: Coming Soon - DCP 2.0 --- - # Coming Soon - DCP 2.0 Dear HCA Community Member, @@ -15,18 +14,12 @@ We have made significant changes, including GA4GH-compliance, more data—50% mo ## Changes Coming to Key Components - To accomplish this, we will be making the following changes and migrations: - -* The current Data Storage Service (DSS) will be retired and we will migrate to the Terra Data Repo, which supports managed access, for storage and metadata management. Both Ingest and the Data Browser will access the Terra Data Repo via self-service APIs, making for minimal disruption to the HCA scientific community. - - -* The matrix service API will be retired. The per-project matrices that are most commonly used will be available in static form directly from the data browser. +* The current Data Storage Service (DSS) will be retired and we will migrate to the Terra Data Repo, which supports managed access, for storage and metadata management. Both Ingest and the Data Browser will access the Terra Data Repo via self-service APIs, making for minimal disruption to the HCA scientific community. +* The matrix service API will be retired. The per-project matrices that are most commonly used will be available in static form directly from the data browser. * As a higher-level replacement for the DSS API, the Data Browser API is being prepared for use by the wider developer community and will be officially documented and supported by the HCA DCP team. - -* The HCA-CLI will be retired and the Data Browser will provide a bulk download capability via curl (or similar) commands. Because the internal organization of the data store is subject to change with upcoming work on the metadata, direct calls against it are discouraged in favor of the new Data Browser API. - +* The HCA-CLI will be retired and the Data Browser will provide a bulk download capability via curl (or similar) commands. Because the internal organization of the data store is subject to change with upcoming work on the metadata, direct calls against it are discouraged in favor of the new Data Browser API. ## Transition Path to DCP 2.0 @@ -34,10 +27,8 @@ More information about the transition to DCP 2.0 will be announced in the coming > To enable a smooth transition, the DCP 1 data browser, APIs and data will remain available until January 1, 2021. -We will continue to integrate third party portals and applications into the HCA ecosystem by linking from and integrating directly into the HCA Data Browser. - +We will continue to integrate third party portals and applications into the HCA ecosystem by linking from and integrating directly into the HCA Data Browser. + Regards, The HCA DCP Team - - diff --git a/content/community-update/community-update/dcp-matrix-ux-study-spring-2021.md b/content/community-update/community-update/dcp-matrix-ux-study-spring-2021.md index def6b8e0a..77d7cdcde 100644 --- a/content/community-update/community-update/dcp-matrix-ux-study-spring-2021.md +++ b/content/community-update/community-update/dcp-matrix-ux-study-spring-2021.md @@ -1,7 +1,7 @@ --- date: "2021-05-18" -title: 2021 DCP Matrix UX Study description: "The HCA DCP product team is recruiting volunteers to participate in a 15 - 20 minute virtual interview to help us understand how to improve the findability and reusability of the Data Portal project matrices." +title: 2021 DCP Matrix UX Study --- # 2021 DCP Matrix UX Study @@ -11,7 +11,6 @@ The HCA DCP product team is recruiting volunteers to participate in a 15 - 20 mi ### What are the study goals? - Learn how researchers use the Data Portal when accessing the various matrices (DCP-generated matrix and contributor-generated matrix). - - Identify improvements to the presentation, schema, and file format of matrices and their metadata. ### How do I participate? @@ -27,11 +26,7 @@ The HCA DCP product team is recruiting volunteers to participate in a 15 - 20 mi We’ll ask you questions relating to: - Your research; we want to learn more about your needs, how you use the Data Portal, and if you can find the types of projects that interest you. - - - Your experience working with the matrix files: how you access them and use them with downstream tools, how you link them together and with metadata, whether you can use their formats, and whether anything prevents you from using them. - - - How we can make the matrices and their metadata more useful to you. ### How long is the session? @@ -41,7 +36,3 @@ We’ll ask you questions relating to: ### How is my information used after the survey/interview? - The HCA DCP product team will de-identify your information, keep it private, and only use it for the internal purposes of improving the portal, matrices, and related documentation. - - - - diff --git a/content/community-update/community-update/dcp-updates.md b/content/community-update/community-update/dcp-updates.md index 75b56a0e6..ee58e2e1a 100644 --- a/content/community-update/community-update/dcp-updates.md +++ b/content/community-update/community-update/dcp-updates.md @@ -1,12 +1,11 @@ --- date: "2020-12-07" -title: DCP Updates description: "Latest updates for the HCA Data Coordination Platform (DCP)." +title: DCP Updates --- # DCP Platform Updates - #### September 17, 2021 ### New Raw Data @@ -36,9 +35,6 @@ The following 3 projects have updated files: 1. [Single cell RNA-Seq of E18.5 developing mouse kidney and human kidney organoids](https://data.humancellatlas.org/explore/projects/7b947aa2-43a7-4082-afff-222a3e3a4635) 1. [Single-cell transcriptomics uncovers human corneal limbal stem cells and their differentiation trajectory.](https://data.humancellatlas.org/explore/projects/24c654a5-caa5-440a-8f02-582921f2db4a) - - - ## DCP now contains data for 14 million estimated cells #### August 25, 2021 @@ -60,9 +56,8 @@ The DCP has added raw data for the following 12 new projects: 1. [The Immune Atlas of Human Deciduas With Unexplained Recurrent Pregnancy Loss](https://data.humancellatlas.org/explore/projects/3cfcdff5-dee1-4a7b-a591-c09c6e850b11) 1. [Transcriptional analysis of cystic fibrosis airways at single-cell resolution reveals altered epithelial cell states and composition](https://data.humancellatlas.org/explore/projects/e526d91d-cf3a-44cb-80c5-fd7676b55a1d) - - ### New Contributor Data + The following projects have new contributor-generated matrix files: 1. [Integrated single cell analysis of blood and cerebrospinal fluid leukocytes in multiple sclerosis.](https://data.humancellatlas.org/explore/projects/d3ac7c1b-5302-4804-b611-dad9f89c049d) @@ -71,15 +66,16 @@ The following projects have new contributor-generated matrix files: ### Browser Updates #### Selecting "Normal" Samples + To enable easier discovery of normal tissues, we've modified the Specimen Disease search facet on the Explore page so that the "Normal" option is now at the top of the drop-down. -![](_images/normal.png) +![Selecting Normal Samples](_images/normal.png) #### Selecting Files by "Content Description" + The File facet has a new Content Description column to enable searching for files by the type of data they contain. -![](_images/content.png) - +![Selecting Files](_images/content.png) ## New Managed Access and Seed Network projects @@ -97,9 +93,7 @@ Additionally, DCP now has the first project data from the [HCA Seed Networks](ht Managed access raw data is available from the [Synapse database](https://www.synapse.org/#!Synapse:syn22213200) upon successful completion of a data use certificate. To find supplementary links to the managed access datasets in Synapse, navigate to the [project detail page](https://data.humancellatlas.org/explore/projects/dc1a41f6-9e09-42a6-959e-3be23db6da56) in the Data Portal. - -![](_images/Supplementary_links.png) - +![Supplementary Links](_images/Supplementary_links.png) ### HCA Seed Networks Projects @@ -148,58 +142,52 @@ Managed access raw data is available from the [Synapse database](https://www.syn * Adipose tissue, blood, bone marrow, CSF, heart, kidney, lung, oral cavity, ovary, pancreas, prostate, skeletal muscle, testis, and umbilical vein * Disease states including COVID-19 infection, intracranial hypertension, multiple sclerosis, and renal cell carcinoma * Developmental stages including fetal, child, adolescent, and adult - * Standardized data, including aligned BAMs and cell-by-gene count matrices (Loom format), are available for 8 additional projects with data derived from: * 10x v2 and v3 3’ sequencing technologies * Brain (superior parietal cortex, middle temporal gyrus, and temporal cortex), epididymis, immune tissue, cortex of the kidney organoid, lymph nodes, placenta (chorionic villus and decidua), spine, testis, and thymus * Disease states including Alzheimer disease and cognitive impairment with or without cerebellar ataxia - - * New Jupyter Notebook tutorials for analyzing standardized DCP matrix files are available in an [Intro-to-HCA-data-on-Terra](https://app.terra.bio/#workspaces/featured-workspaces-hca/Intro-to-HCA-data-on-Terra) workspace on the cloud-based platform Terra. * After registering, you can try the step-by-step instructions for importing HCA data and analyzing in common community tools such as Bioconductor, Cumulus, Pegasus, Scanpy, and Seurat. ## Raw sequencing data and contributor-generated matrices for 23 new projects available for download + #### May 10, 2021 * Data Portal now has data for 12.2 million cells, including new standardized analyses for 13 projects as well as raw sequencing data and contributor-generated matrices for 23 new projects. - - * The standardized data, including aligned BAMs and cell-by-gene count matrices (Loom format), are derived from: * Human and mouse * Single-cell and single-nucleus * 10x V2 and V3 3’ chemistry * Blood, brain (including substantia nigra, developing hippocampus, cortex, retina), liver, lung, mouth, skeletal muscle, skin, spleen, and developing thymus * Disease states including HIV, drug hypersensitivity syndrome, multiple sclerosis, and thoracic aortic aneurysm - - * The 23 new projects and contributor-generated matrices include data derived from: * Human and mouse * Smart-seq2, 10x V2 and V3 3’ chemistry, Drop-seq, and Fluidigm C1 sequencing methods -* Blood, bone marrow, brain, cord blood, epididymis, fetal gonads, immune organ, kidney organoid, lymph nodes, pancreas, skin, and trachea +* Blood, bone marrow, brain, cord blood, epididymis, fetal gonads, immune organ, kidney organoid, lymph nodes, pancreas, skin, and trachea * Disease states including Alzheimer’s Disease, multiple sclerosis, autoimmune encephalitis, and type 2 diabetes - - * The Matrix Overview guide has been updated to include additional information on matrix [batch correction and normalization](/guides/consumer-vignettes/matrices#matrix-normalization-and-batch-correction). ## Raw data for 16 new projects now available + #### April 12, 2021 Raw sequencing data for 16 new projects are now available in the DCP [Data Browser](https://data.humancellatlas.org/explore/projects). These projects include single-cell data derived from: + - Human and mouse - 10x 3’, 10x 5’, Smart-seq2 technologies - Small intestine, aorta, brain, skeletal muscle, blood, pancreas, tonsil, lung, skin, immune system, kidney, and eye - Airway basal stem cells exposed to SARS-CoV-2 - Disease states, including Crohn’s Disease, aneurysm, Multiple Sclerosis, HIV, Type 2 Diabetes, and glioblastoma - ## Processed data now available for 26 HCA 10x datasets + #### April 02, 2021 The [DCP 2.0 Preview](https://data.humancellatlas.org/explore/projects) now has standardized BAMs and count matrices (Loom file format) available for 26 HCA projects, including 15 new projects. These projects contain both human and mouse single-cell and single-nucleus data generated with 3’ 10x V2 and V3 sequencing technology. This data was processed using the latest version of the Optimus pipeline (see the [Optimus Overview](https://data.humancellatlas.org/pipelines/optimus-workflow)). In addition to individual sample count matrices, each newly processed project also has standardized, project-level DCP-generated matrices that are stratified by organ, species, and library construction method. These matrices are minimally filtered to include only cells with more than 100 UMIs. You can download the project-level DCP-generated Matrices from the Data Browser (see image below) or from the individual Project page (see the [Exploring Projects](https://data.humancellatlas.org/guides) guide) -![](../../guides/_images/explore_dcp_2_matrices.png) +![DCP Preview](../../guides/_images/explore_dcp_2_matrices.png) ### New raw data and Contributor-Generated Matrices @@ -212,13 +200,14 @@ With the addition of these new data, the DCP now has 55 total projects with over All files (raw data, CGMs, and DCP-generated BAMs and matrices) can be downloaded following the instructions in the [Accessing HCA Data and Metadata](/guides/quick-start-guide) guide. Additionally, you can access matrix files programmatically using the new [Programmatic Download](https://colab.research.google.com/drive/1h14mbunsepfogcnG9VEF4FIGpuyGLA-P#scrollTo=jxk27LZk4373) guide. - ## DCP 2.0 launches; new projects, contributor generated matrices and DCP 2.0 infrastructure + #### December 11, 2020 In the spirit of bringing HCA data to the community as quickly as possible, we are releasing the new DCP 2.0 data and infrastructure incrementally. This initial launch -- the first of several planned to roll out new functionality, pipelines, and data -- includes: + * Raw data and standardized metadata for 5.8 million cells * Contributor-generated matrices, embeddings, and annotations for existing and **16 new DCP projects** * New DCP 2.0 infrastructure (details below) @@ -226,6 +215,7 @@ This initial launch -- the first of several planned to roll out new functionalit ### What’s included in the new projects? The new projects include a mix of human and mouse data from a variety of organs including adipose tissue, heart, hindlimb, spleen skin, yolk sac, diaphragm, tongue, trachea, and more. These data encompass: + * 143 donors * 248 specimens * estimated 1.3M additional cells @@ -237,16 +227,13 @@ The new projects include a mix of human and mouse data from a variety of organs Along with the new data view, we have updated DCP infrastructure by **retiring and replacing** the following features: * HCA Command Line Interface (CLI); data are now downloaded using curl commands (see the [Accessing HCA Data and Metadata guide](/guides/quick-start-guide)) - - * HCA Matrix Service; DCP 1.0 project matrices remain available on the individual Project Matrices page (see the [Exploring Projects guide](/guides)) - - -* HCA Data Storage Service (DSS) and API; data is now stored in the Terra Data Repo (TDR), an alternative storage and metadata management service that supports managed access +* HCA Data Storage Service (DSS) and API; data is now stored in the Terra Data Repo (TDR), an alternative storage and metadata management service that supports managed access ### What’s Coming Next? Between now and the end of February look for: + * Standardized analysis results (BAM and index files) made by the latest HCA 10x and SmartSeq-2 pipelines for all projects * DCP-generated count matrices stratified by species, library construction method, and organ for each project @@ -256,8 +243,8 @@ Once processing is complete, we will retire DCP 1.0 View. --- - ## Coming Soon - DCP 2.0 + #### October 9, 2020 We are excited to announce the launch of the DCP 2.0 this fall. This has been a collaborative effort over the past 6 months to take on board the HCA community feedback and understand the unmet needs in order to align the goals of the DCP with the wider HCA community goals. @@ -266,18 +253,12 @@ We have made significant changes, including GA4GH-compliance, more data—50% mo ## Changes Coming to Key Components - To accomplish this, we will be making the following changes and migrations: * The current Data Storage Service (DSS) will be retired and we will migrate to the Terra Data Repo, which supports managed access, for storage and metadata management. Both Ingest and the Data Browser will access the Terra Data Repo via self-service APIs, making for minimal disruption to the HCA scientific community. - - -* The matrix service API will be retired. The per-project matrices that are most commonly used will be available in static form directly from the data browser. - +* The matrix service API will be retired. The per-project matrices that are most commonly used will be available in static form directly from the data browser. * As a higher-level replacement for the DSS API, the Data Browser API is being prepared for use by the wider developer community and will be officially documented and supported by the HCA DCP team. - -* The HCA-CLI will be retired and the Data Browser will provide a bulk download capability via curl (or similar) commands. Because the internal organization of the data store is subject to change with upcoming work on the metadata, direct calls against it are discouraged in favor of the new Data Browser API. - +* The HCA-CLI will be retired and the Data Browser will provide a bulk download capability via curl (or similar) commands. Because the internal organization of the data store is subject to change with upcoming work on the metadata, direct calls against it are discouraged in favor of the new Data Browser API. ## Transition Path to DCP 2.0 @@ -290,7 +271,3 @@ We will continue to integrate third-party portals and applications into the HCA Regards, The HCA DCP Team - - - - diff --git a/content/community-update/community-update/what-is-the-dcp-20-data-preview.md b/content/community-update/community-update/what-is-the-dcp-20-data-preview.md index e4d729270..2c4e5554a 100644 --- a/content/community-update/community-update/what-is-the-dcp-20-data-preview.md +++ b/content/community-update/community-update/what-is-the-dcp-20-data-preview.md @@ -1,7 +1,7 @@ --- date: "2020-12-07" -title: "What is the DCP 2.0 Data?" description: "The DCP 2.0 is a new data view enabling you to explore and access the new and reprocessed DCP data as soon as they become available." +title: "What is the DCP 2.0 Data?" --- # What is the DCP 2.0 Data View? @@ -16,6 +16,7 @@ The DCP 2.0 Data View enables you to explore and access the new contributor-gene The [DCP 1.0 Data View](https://data.humancellatlas.org/explore/projects?catalog=dcp1) lists all the original DCP (DCP 1.0) projects. From this view, you can: + - Access raw data for DCP 1.0 projects - Access DCP 1.0 processed data generated with standardized pipelines (BAMs, etc.) - Download DCP 1.0 project matrices (in CSV, MTX, and Loom formats) @@ -24,6 +25,7 @@ From this view, you can: The [DCP 2.0 Data View](https://data.humancellatlas.org/explore/projects) lists all DCP projects, including the 16 new DCP 2.0 projects. From this view, you can: + - Download the new contributor-generated matrices for each project* - Access the raw data for all DCP projects; for DCP 1.0 projects, this raw data is the same as that in the DCP 1.0 view - Access data generated with updated DCP standardized pipelines** @@ -38,12 +40,8 @@ Use the banner at the top of the Data Portal to switch between the two views: ![Data Preview](./_images/dcp2view.png "DCP 2.0") - As we continue to (re)process old and new DCP projects, we will incrementally add the newly processed data and DCP-generated matrices to the DCP 2.0 Data View. Once processing is complete, we will retire the DCP 1.0 Data View. > To start exploring DCP data, see the [Exploring Projects guide](/guides). - - - diff --git a/content/contact/contact/contact-us.md b/content/contact/contact/contact-us.md index fb74d09f5..d5edb87a2 100644 --- a/content/contact/contact/contact-us.md +++ b/content/contact/contact/contact-us.md @@ -1,22 +1,22 @@ --- -path: "/contact/contact/contact-us" date: "2018-05-03" -title: "Contact Us" description: "Get involved to give us feedback, ask questions, and learn about the newest updates to the HCA Data Portal." +path: "/contact/contact/contact-us" +title: "Contact Us" --- # Contact Us -We are constantly improving the Data Portal to be a better resource for our community. Get involved to give us feedback, ask questions, and learn about the newest updates to the HCA Data Portal. +We are constantly improving the Data Portal to be a better resource for our community. Get involved to give us feedback, ask questions, and learn about the newest updates to the HCA Data Portal. ## Email us with questions -Find something that’s not working right? Have questions about the Data Portal or the datasets? Email us at [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org) to ask questions or report issues. +Find something that’s not working right? Have questions about the Data Portal or the datasets? Email us at [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org) to ask questions or report issues. ## Register with the HCA -Register with the Human Cell Atlas to find collaborators and stay up-to-date about news and events. [Register with the HCA](https://www.humancellatlas.org/join-hca). +Register with the Human Cell Atlas to find collaborators and stay up-to-date about news and events. [Register with the HCA](https://www.humancellatlas.org/join-hca). ## Contribute to the HCA -If you have data that you’d like to add to the HCA, visit this page to learn more about contributing data. If you’d like to contribute an analysis or visualization application, email [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org). +If you have data that you’d like to add to the HCA, visit this page to learn more about contributing data. If you’d like to contribute an analysis or visualization application, email [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org). diff --git a/content/contact/contact/join-the-discussion.md b/content/contact/contact/join-the-discussion.md index aa1ea2198..766d48545 100644 --- a/content/contact/contact/join-the-discussion.md +++ b/content/contact/contact/join-the-discussion.md @@ -1,17 +1,18 @@ --- -path: "/contact/contact/join-the-discussion" date: "2018-05-03" -title: "Join the Discussion" description: "The Human Cell Atlas is a community-driven effort. There are many opportunities to get involved." +path: "/contact/contact/join-the-discussion" +title: "Join the Discussion" --- # Join the Discussion The Human Cell Atlas is a community-driven effort. There are many opportunities to get involved. -We use a Slack workspace to coordinate our efforts as we build the HCA Data Portal. This is an open group, and we welcome community members who want to be learn more about the project. [Join up here](https://humancellatlas.slack.com/join/shared_invite/zt-8xpzlu3k-P9M6bKwAJNx~YI_ACLdrFg#/). +We use a Slack workspace to coordinate our efforts as we build the HCA Data Portal. This is an open group, and we welcome community members who want to be learn more about the project. [Join up here](https://humancellatlas.slack.com/join/shared_invite/zt-8xpzlu3k-P9M6bKwAJNx~YI_ACLdrFg#/). + +Some channels to explore: -Some channels to explore: * **#general** - A forum for wide-ranging discussion and questions. * **#data-portal** - Changes, issues, and feature requests for the Data Portal website. * **#data-store** - Discussion about accessing the HCA data directly, and reusing the HCA storage system. diff --git a/content/contribute/data/analysis-tools-registry.md b/content/contribute/data/analysis-tools-registry.md index 709061f5b..f90711751 100644 --- a/content/contribute/data/analysis-tools-registry.md +++ b/content/contribute/data/analysis-tools-registry.md @@ -1,15 +1,16 @@ --- -path: "/contribute/data/analysis-tools-registry" date: "2019-02-01" -title: "Contributing to the Analysis Tools Registry" description: "The Analysis Tools Registry lists portals, methods packages, and visualization packages suitable for working with HCA DCP data." +path: "/contribute/data/analysis-tools-registry" +title: "Contributing to the Analysis Tools Registry" --- # Contributing to the Analysis Tools Registry -The [Analysis Tools Registry](/analyze) lists portals, methods packages, and visualization packages. Computational biologists submit packages for use by software engineers in portal development. [Analysis Tools Registry standards](/contribute/analysis-tools-registry/registry-standards) promote software best practices and help facilitate ease of package deployment by non-biologists (e.g. software engineers) and non-computational biologists. +The [Analysis Tools Registry](/analyze) lists portals, methods packages, and visualization packages. Computational biologists submit packages for use by software engineers in portal development. [Analysis Tools Registry standards](/contribute/analysis-tools-registry/registry-standards) promote software best practices and help facilitate ease of package deployment by non-biologists (e.g. software engineers) and non-computational biologists. ## Submission Forms + Submissions are contributed via GitHub - use these links to access the submission forms: [Portal submission](https://github.com/HumanCellAtlas/data-portal/issues/new/?template=submit-portal.md)\ @@ -25,6 +26,7 @@ Below, we provide details about the information requested in the methods and vis The package details pages provide software engineers with information (basic command line usage, code repository location, etc.) and resources (Docker image URL, contact name and email etc.) to support rapid incorporation of these packages into web portals. ### Required Submission Fields for Methods and Visualizations + - Package title - Name of method or visualization - Each Methods Registry entry is associated with one container image. Methods Registry recommends authors provide one canonical package. However, some authors may wish to submit registry entries for different language implementations or minimal vs extended docker images; the package title should reflect the difference between such entries. @@ -52,8 +54,9 @@ The package details pages provide software engineers with information (basic com - Example 2-3 sentence description: STREAM is an interactive computational pipeline for reconstructing complex cellular developmental trajectories from sc-qPCR, scRNA-seq or scATAC-seq data. ### Optional Fields for Methods and Visualizations + - Build badge URL - - Packages should execute automated tests upon every push to their default branch (e.g. master) on GitHub using a continuous integration service. Such services include [Travis CI](https://docs.travis-ci.com/user/tutorial/) or [Circle CI](https://circleci.com/docs/2.0/first-steps/), which report whether the package build passes its own tests. + - Packages should execute automated tests upon every push to their default branch (e.g. master) on GitHub using a continuous integration service. Such services include [Travis CI](https://docs.travis-ci.com/user/tutorial/) or [Circle CI](https://circleci.com/docs/2.0/first-steps/), which report whether the package build passes its own tests. - Example Build badge URL: https\://travis-ci.org/pinellolab/STREAM.svg - Coverage badge URL - Packages should measure the code coverage of their automated tests using services like [Coveralls](https://docs.coveralls.io/) or [Codecov](https://docs.codecov.io/docs), which report the percentage of lines of code, conditional branches, and other metrics covered by tests. @@ -72,13 +75,13 @@ The package details pages provide software engineers with information (basic com - Example URL: http\://stream.pinellolab.org ### Method-Specific Required Fields + - Method-ready Docker image: - Containerized images allow consistent deployment of packages by bundling OS requirements, necessary dependencies and configurations so portal developers can rapidly incorporate a package while bypassing the need to build unfamiliar software. - Images should be tagged with method version so the docker pull command requests a versioned image compatible with the example command line(s) and validation commands provided below. - Recommended container registries: [quay.io](https://docs.quay.io/solution/getting-started.html), [Docker Hub](https://docs.docker.com/docker-hub/) - Container [best practices](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/) and [gotchas](https://runnable.com/blog/9-common-dockerfile-mistakes) - Example Method-ready Docker image: `docker pull pinellolab/stream:0.3.2` - - Language - Methods Registry recommends authors provide one canonical package but recognize that some packages are implemented in multiple languages. Language implementations should be submitted separately so docker images are kept small to minimize deployment costs. - Example Language: Python @@ -111,7 +114,7 @@ The package details pages provide software engineers with information (basic com - Command(s) to validate installation - Please provide a call of the methodology using the test data to verify that the program ran to completion and the output is valid. - Command line calls should programmatically return zero if validation is successful and non-zero for validation failure. - - Validation may be a single command line (eg. --test parameter provided via CLI) or a series of commands that + - Validation may be a single command line (e.g. --test parameter provided via CLI) or a series of commands that - runs the method on the reference test data set - compares the locally generated result with the reference output(s) - indicates whether the locally generated result is valid @@ -119,12 +122,15 @@ The package details pages provide software engineers with information (basic com - Example Command to validate installation: docker run --entrypoint stream\_run\_test pinellolab/stream ### Visualization Component-Specific Optional Fields: + - URL to implementation of this package in an active portal: - Please provide a URL to a simple page with a visualization demo. - In our examples we use “Mashups”, see example here https://github.com/eweitz/igv.js-react/blob/master/README.md#igvjs-in-react ## Packages Submitted in Collaboration with a Methodologist -with separate maintenance of supporting scripts should provide parallel information for "methodologist" and "accessibility maintainer" for the following fields: + +With separate maintenance of supporting scripts should provide parallel information for "methodologist" and "accessibility maintainer" for the following fields: + - Contact name - Contact email - Who to attribute @@ -137,7 +143,9 @@ with separate maintenance of supporting scripts should provide parallel informat For such collaborative submissions the long description may be used to describe the relationship between the supporting scripts and the listed package. ## How to Update Your Package Entry + Package information may be updated by creating a GitHub pull request: + - From the package details page, click "Improve this page" - Edit the page content to reflect your updates -- Start a pull request by clicking "Commit changes". Your edits will be committed to a new branch in github and queued for the Methods Registry Maintainer to push to production. +- Start a pull request by clicking "Commit changes". Your edits will be committed to a new branch in GitHub and queued for the Methods Registry Maintainer to push to production. diff --git a/content/contribute/data/analysis-tools-registry/registry-standards.md b/content/contribute/data/analysis-tools-registry/registry-standards.md index 2afa7950d..af7818676 100644 --- a/content/contribute/data/analysis-tools-registry/registry-standards.md +++ b/content/contribute/data/analysis-tools-registry/registry-standards.md @@ -1,45 +1,56 @@ --- -path: "/contribute/data/analysis-tools-registry/registry-standards" date: "2019-02-01" -title: "Registry Standards" description: "HCA DCP Analysis Tools Registry standards ensure reusability and ease deployment of methods and visualizations for analyses of Human Cell Atlas datasets for portals and others." +path: "/contribute/data/analysis-tools-registry/registry-standards" +title: "Registry Standards" --- # Analysis Tools Registry Standards Overview -The following standards of the HCA DCP Analysis Tools Registry ensure reusability and ease deployment of methods and visualizations for analyses of Human Cell Atlas datasets for portals and others. The primary audience for this document is developers of packages for methods and visualizations. +The following standards of the HCA DCP Analysis Tools Registry ensure reusability and ease deployment of methods and visualizations for analyses of Human Cell Atlas datasets for portals and others. The primary audience for this document is developers of packages for methods and visualizations. -Standards listed below are required or optional. Packages must conform to all required standards to be listed in the Analysis Tools Registry. Packages should conform to all recommended standards to enhance their registry entry. +Standards listed below are required or optional. Packages must conform to all required standards to be listed in the Analysis Tools Registry. Packages should conform to all recommended standards to enhance their registry entry. ## Required Standards + The required standards must be met for packages to be listed in the Registry. ### Be Free and Open Source -Source code for packages listed in the Analysis Tools Registry must be freely licensed and under source control in a public repository on GitHub. The license must be contained in the code repository. + +Source code for packages listed in the Analysis Tools Registry must be freely licensed and under source control in a public repository on GitHub. The license must be contained in the code repository. ### Use Containers and Modules -Method packages must be containerized in Docker and listed in a container registry, e.g. Docker Hub. Visualization packages must be modular and importable using both ES6 export syntax and traditional script tags. + +Method packages must be containerized in Docker and listed in a container registry, e.g. Docker Hub. Visualization packages must be modular and importable using both ES6 export syntax and traditional script tags. ### Register Upstream -Packages must be published in at least one upstream registry used by their respective implementation language; e.g. Bioconda for Python, Bioconductor for R, or npm for JavaScript. Participation in an upstream registry ensures that the package conforms to standards common to the implementation language. + +Packages must be published in at least one upstream registry used by their respective implementation language; e.g. Bioconda for Python, Bioconductor for R, or npm for JavaScript. Participation in an upstream registry ensures that the package conforms to standards common to the implementation language. ### Support Standard Data Formats + Packages must support standard data exchange format(s) for input and output, as defined by the relevant analysis community. When applicable, file formats used as standards by the HCA project should be used for input/output by Methods Registry tools. ### Document Installation and Usage -Packages must have at least brief documentation on how developers can install and use them. Any package with a command-line interface (CLI) must implement support for “--help” or “-h” arguments that show at least a basic program summary, parameter descriptions, and an example usage call. Documentation should describe major use cases of the method and provide example commands for each use case. + +Packages must have at least brief documentation on how developers can install and use them. Any package with a command-line interface (CLI) must implement support for “--help” or “-h” arguments that show at least a basic program summary, parameter descriptions, and an example usage call. Documentation should describe major use cases of the method and provide example commands for each use case. ### Provide Testing Data -Packages must provide a small data set that successfully runs (aka. toy data) in a reasonably short amount of time, so that developers can verify their local deployments work as expected. Packages should provide a way to validate new instantiations of the method. Validation uses methodologist-provided reference output file(s) for comparison with the results from a new docker instance running the method on methodologist-provided test data. Package documentation may also provide links to synthetic or real data for testing in realistic scenarios. Methods and visualization vignettes should (but are not required to) use Human Cell Atlas data. + +Packages must provide a small data set that successfully runs (aka. toy data) in a reasonably short amount of time, so that developers can verify their local deployments work as expected. Packages should provide a way to validate new instantiations of the method. Validation uses methodologist-provided reference output file(s) for comparison with the results from a new docker instance running the method on methodologist-provided test data. Package documentation may also provide links to synthetic or real data for testing in realistic scenarios. Methods and visualization vignettes should (but are not required to) use Human Cell Atlas data. ## Recommended Standards + The recommended standards in this section are encouraged, but not required, by the Registry. ### Use Continuous Integration -Packages should execute automated tests upon every push to their default branch (e.g. master) on GitHub using a continuous integration service. Such services include Travis CI or Circle CI, which report whether the package build passes its own tests. + +Packages should execute automated tests upon every push to their default branch (e.g. master) on GitHub using a continuous integration service. Such services include Travis CI or Circle CI, which report whether the package build passes its own tests. ### Measure Code Coverage + Packages should measure the code coverage of their automated tests using services like Coveralls or Codecov, which report the percentage of lines of code, conditional branches, and other metrics covered by tests. ### Use Conventional Parameter Names -Methods packages should use parameter names that match other methods packages in the same domain wherever possible. Consistency across package API’s will enhance interoperability. + +Methods packages should use parameter names that match other methods packages in the same domain wherever possible. Consistency across package API’s will enhance interoperability. diff --git a/content/contribute/data/contributing-to-hca.md b/content/contribute/data/contributing-to-hca.md index 7c176b64a..9b4f07a41 100644 --- a/content/contribute/data/contributing-to-hca.md +++ b/content/contribute/data/contributing-to-hca.md @@ -1,8 +1,8 @@ --- -path: "/contribute/data/contributing-to-hca" date: "2018-05-03" -title: "Overview" description: "Contribute to the HCA DCP - the Human Cell Atlas is built with the help of and for the benefit of the scientific community." +path: "/contribute/data/contributing-to-hca" +title: "Overview" --- # Help build the Human Cell Atlas - submit data @@ -58,7 +58,7 @@ Learn more about what results will be available for different data types [here]( ## Wrangler Office Hours -If you have questions about data contribution, join the DCP Wranlger office hours on Zoom. +If you have questions about data contribution, join the DCP Wrangler office hours on Zoom. ### Date / Time @@ -88,6 +88,3 @@ Meeting ID: 842 9628 1382 Passcode: 4015409680 Find your local number: https://broadinstitute.zoom.us/u/kd1itjb9pv - - - diff --git a/content/contribute/data/contributing-to-hca/contributing-data-processing-results.md b/content/contribute/data/contributing-to-hca/contributing-data-processing-results.md index 4066afd8b..8a9577915 100644 --- a/content/contribute/data/contributing-to-hca/contributing-data-processing-results.md +++ b/content/contribute/data/contributing-to-hca/contributing-data-processing-results.md @@ -1,14 +1,14 @@ --- -path: "/contribute/data/contributing-to-hca/contributing-data-processing-results" date: "2019-04-29" -title: "Data Processing and Results" description: "Overview of processing of data submissions." +path: "/contribute/data/contributing-to-hca/contributing-data-processing-results" +title: "Data Processing and Results" --- # Data Processing and Results Once submitted to the Human Cell Atlas, your data will be processed and made available in multiple ways. - + ## Searchable in the HCA Data Browser Your raw data and metadata will be searchable in the HCA Data Browser immediately after submission. @@ -17,7 +17,6 @@ Your raw data and metadata will be searchable in the Ready to contribute? Start the conversation by emailing\ > [wrangler-team@data.humancellatlas.org](mailto:wrangler-team@data.humancellatlas.org). - diff --git a/content/contribute/data/contributing-vignettes.md b/content/contribute/data/contributing-vignettes.md index bac33a6ae..4bb1deb1f 100644 --- a/content/contribute/data/contributing-vignettes.md +++ b/content/contribute/data/contributing-vignettes.md @@ -1,8 +1,8 @@ --- -path: "/contribute/data/contributing-vignettes" date: "2019-02-21" -title: "Contributing Vignettes" description: "The HCA maintains a repository of Data Consumer Vignettes containing Python scripts, Jupyter notebooks and code snippets showing how to access and use the HCA data and metadata." +path: "/contribute/data/contributing-vignettes" +title: "Contributing Vignettes" --- # Contributing Vignettes diff --git a/content/document/creating-content/content-style-guide.md b/content/document/creating-content/content-style-guide.md index 5c1134cf4..89839cc2f 100644 --- a/content/document/creating-content/content-style-guide.md +++ b/content/document/creating-content/content-style-guide.md @@ -1,8 +1,8 @@ --- -path: "/document/creating-content/content-style-guide" date: "2018-05-30" -title: "Style Guide" description: "Overview of the HCA DCP style guide." +path: "/document/creating-content/content-style-guide" +title: "Style Guide" --- # DCP Content Style Guide @@ -11,11 +11,12 @@ description: "Overview of the HCA DCP style guide." Our goal in building the Human Cell Atlas *Data Portal* is to make data and tools open and easily accessible to scientists around the world. To accomplish this, we not only need a robust, easy-to-use technology platform, but also simple and clear content to help scientists and developers accomplish their goals. The content on the *Data Portal* will come from many writers and in many forms, but we want to create a sense of consistency in our tone and terminology to help users understand and navigate our services. -This document lays out some rules and best practices for writing this content. We will update this guide as our rules evolve. All guidelines presented here should be reviewed and approved by the UX, content, and PM teams, and all content published on the data portal or in communications from the DCP should align with these rules. +This document lays out some rules and best practices for writing this content. We will update this guide as our rules evolve. All guidelines presented here should be reviewed and approved by the UX, content, and PM teams, and all content published on the data portal or in communications from the DCP should align with these rules. ## Voice and Tone The voice of the *Data Portal* is: + * Human * Straightforward but not simplistic * Friendly but not overly casual @@ -25,10 +26,11 @@ The voice of the *Data Portal* is: * Professional but not stodgy * Polite but not formal -Just like a person, the *Data Portal* should have a cohesive voice, but can have a varied tone depending on context. +Just like a person, the *Data Portal* should have a cohesive voice, but can have a varied tone depending on context. As users navigate our site, or interact with us via Zendesk or email, may have different needs, and the tone of the content should adapt to fit those needs. The tone of API documentation might be different than the tone of the help page, for example. ## General Rules + ### Language The language of the Data Portal is **US English**, consistent with the language of the HCA website. @@ -39,99 +41,102 @@ The language of the Data Portal is **US English**, consistent with the language * On each page or document, introduce acronyms before using. For first use, spell out the terms and then give the acronym in parentheses. All other mentions on the page or document can use the acronym. For example: 1st use - "Human Cell Atlas (HCA)" - + 2nd use - "HCA" -* Commonly used acronyms can be used without introduction (e.g., RNA, API). +* Commonly used acronyms can be used without introduction (e.g., RNA, API). * Do not use casual abbreviations like “info.” ### Slang, Jargon, and technical terms * Avoid jargon if a simpler, plain English word exists. Keep in mind that DCP users will come from various backgrounds, with different knowledge sets. Even words that are fairly common within the scientific or developer communities may be unfamiliar to some users here, and should only be used if needed. * Avoid slang or overly casual terms. The *Data Portal* should feel professional. -* Where a technical term is needed, make sure to introduce it or give it context first. Before using the technical term, do a quick check to make sure it’s commonly used and understood (i.e., Google the term to see whether and how it’s used elsewhere). Avoid using any term that is ambiguous. +* Where a technical term is needed, make sure to introduce it or give it context first. Before using the technical term, do a quick check to make sure it’s commonly used and understood (i.e., Google the term to see whether and how it’s used elsewhere). Avoid using any term that is ambiguous. ### Passive vs. Active Voice -Use active voice whenever possible. - * Active voice - Access the data manifest by clicking *Download*. - * Passive voice - A data manifest can be accessed by clicking *Download*. +Use active voice whenever possible. + + * Active voice - Access the data manifest by clicking *Download*. + * Passive voice - A data manifest can be accessed by clicking *Download*. Exception: There are a few cases where the action is emphasized over the subject, especially when the actor in the sentence is unknown/ambiguous or the action was not intentional. Use your best judgement in these cases. For example: + * "The download was paused." vs. "You paused the download." * "This data set has been flagged for removal." vs. "We flagged the dataset for removal." ### Politeness + * Our voice should always be polite but not overly formal. * Reserve "please" or "thank you" for cases where we are asking the user to do a task that is particularly difficult or doesn't align with their primary goal for being on the site. For example: - * YES: "Click here to download a manifest.' - * NO: "Please click here to download a manifest." - - * YES: "Thank you for completing our survey." - * NO: "Thank you for visiting our site." + + * YES: "Click here to download a manifest." + * NO: "Please click here to download a manifest." + * YES: "Thank you for completing our survey." + * NO: "Thank you for visiting our site." ### Contractions -Commonly used contractions like “don’t”, “it’s”, “isn’t”, “doesn’t”, “we’d”, etc. can be used in order to make our content a bit more conversational and human. However, for documentation we recommend NOT contracting the 2 words to help emphasize the point; for example, ‘does not’ (or ‘does NOT’) is likely to be more clear to a reader than ‘doesn’t’, especially if the person is not a native English speaker. Use your judgement on this issue -- you don’t want to sound excessively formal but you do want your documentation to be clear to any reader. + +Commonly used contractions like “don’t”, “it’s”, “isn’t”, “doesn’t”, “we’d”, etc. can be used in order to make our content a bit more conversational and human. However, for documentation we recommend NOT contracting the 2 words to help emphasize the point; for example, ‘does not’ (or ‘does NOT’) is likely to be more clear to a reader than ‘doesn’t’, especially if the person is not a native English speaker. Use your judgement on this issue -- you don’t want to sound excessively formal but you do want your documentation to be clear to any reader. ### Text Formatting + * Use italics and capitalization to refer to site elements within the *Data Portal*, and specific datasets. For example: + * Visit *Explore* to view available datasets. * The *Census of Immune Cells* dataset contains about 530,000 cells. - * Capitalize, but DON'T italicize our services, like Ingest Service, Data Store, etc. - * Capitalize the words in titles, for example: Use Title Caps, not Use title caps +* Capitalize two hyphenated words in title. For example: -* Capitalize two hyphenated words in title -For example: * Contributor-Generated Matrices * DCP-Generated Matrices - -* Spell out numbers when they begin a sentence or for the numbers zero through five, unless referring to a measurement or when comparing to other numbers. Otherwise, use the numeral. For example: - * There are four main sections in the *Data Portal*. - * These 2 visualization portals are better suited to this use case than the other 10 on the site. - * This project has 24 specimens. - * Download up to 1.5 Tb of data at a time. +* Spell out numbers when they begin a sentence or for the numbers zero through five, unless referring to a measurement or when comparing to other numbers. Otherwise, use the numeral. For example: + * There are four main sections in the *Data Portal*. + * These 2 visualization portals are better suited to this use case than the other 10 on the site. + * This project has 24 specimens. + * Download up to 1.5 Tb of data at a time. * When referring to a file type, use all uppercase (e.g., PDF, TXT, or "Download the TXT file on this page."). When referring to a specific file, use all lowercase (e.g., brain.pdf). +* When referring to DCP or contributor matrix types, use a hyphen followed by lowercase (unless using in title). For example: -* When referring to DCP or contributor matrix types, use a hyphen followed by lowercase (unless using in title) -For example: - * DCP-generated matrix - * Contributor-generated matrix - + * DCP-generated matrix + * Contributor-generated matrix * Each sentence, even a sentence fragment, should start with a capital letter and end with a period. - * Keep paragraph size to 3-4 sentences OR LESS. Add subheadings to break up longer paragraphs, for ease of reading on the web. ### Graphics and Color -* Make sure that the colors in your graphics are color-blind friendly. There are many helpful articles online about choosing the best colors; for example, [How to Optimize Charts For Color Blind Readers Using Color Blind Friendly Palettes](https://venngage.com/blog/color-blind-friendly-palette/) - + +* Make sure that the colors in your graphics are color-blind friendly. There are many helpful articles online about choosing the best colors; for example, [How to Optimize Charts For Color Blind Readers Using Color Blind Friendly Palettes](https://venngage.com/blog/color-blind-friendly-palette/) ### How to refer to users and team members -* While our team’s name is officially, the “Data Coordination Platform team” or DCP team, this overly technical to surface to Data Portal users. Instead, refer to the DCP team as “the HCA Portal team”. - * Use “we”, “us”, “our” when writing about our team to give the writing a more human feel. - + +* While our team’s name is officially, the “Data Coordination Platform team” or DCP team, this overly technical to surface to Data Portal users. Instead, refer to the DCP team as “the HCA Portal team”. + + * Use “we”, “us”, “our” when writing about our team to give the writing a more human feel. * Write to users in the second person, using “you”, “yours”. - * How to access your data. - * AVOID using third person to describe user actions, such as "A researcher can.." or "One may...", as it's overly stiff for our voice. + + * How to access your data. + * AVOID using third person to describe user actions, such as "A researcher can..." or "One may...", as it's overly stiff for our voice. ### Word list + [Word list decision doc](https://docs.google.com/document/d/1tEjnSrhOs_FoiSO9hDOKS9z8Qbk4kV_f9HoM-i5WCkQ/edit) Words: + * **Data Portal** - the name of our site, and the generic name for user interactions with the site. * For example: Contribute your data to the HCA *Data Portal*. - * Always caplitalized + * Always capitalized * **Data Coordination Platform/DCP** - the name of our platform, used only in more technical documents. * For example: Learn more about how you can reuse portions of our Data Coordination Platform. * Always capitalized - * ALways introduce before using the acronym + * Always introduce before using the acronym * **Dataset** - not data set * **Homepage** - not Home page -* **Data Processing Pipelines** should replace "Secondary Analysis" and "Secondary Analysis Service" +* **Data Processing Pipelines** should replace "Secondary Analysis" and "Secondary Analysis Service" * **Analysis Applications** should replace "Tertiary Analysis" Wording for a section about Our Values can be found at the end of [this document](https://docs.google.com/document/d/1xzUXnozewMhmePqYmFzfFpwoI-UdZd6ds3EFx1Cq7EQ/edit#heading=h.yrochflczb49). diff --git a/content/document/creating-content/creating-a-new-page.md b/content/document/creating-content/creating-a-new-page.md index f07007121..f83183573 100644 --- a/content/document/creating-content/creating-a-new-page.md +++ b/content/document/creating-content/creating-a-new-page.md @@ -1,8 +1,8 @@ --- -path: "/document/creating-content/creating-a-new-page" date: "2018-05-30" -title: "Creating a New Page" description: "How to create a new page in the HCA Data Portal." +path: "/document/creating-content/creating-a-new-page" +title: "Creating a New Page" --- # Creating a New Page @@ -14,27 +14,24 @@ To create a new page, we need to: 1. Add the page content. 1. Add the page to the site-map.yaml file in the Data Portal Repository. - ## Adding the new .md File. -You can create the new .md file using the github interface. +You can create the new .md file using the GitHub interface. Fist navigate to the parent directory and then use the `Create New File` button as shown below. -Typically the folder structure mirrors the structure or the the site. This is not strictly necessary as the URL to a page is defined by the site map and not the folder structures. However, it does make pages much easier to find when they need to be updated. +Typically the folder structure mirrors the structure or the site. This is not strictly necessary as the URL to a page is defined by the site map and not the folder structures. However, it does make pages much easier to find when they need to be updated. ![Create File](../_images/create-new-file.png) ->####TIP ->Note that you can not create an empty directory using the Github interface. However you can create a directory while adding a file by adding the new directory name before the file name then hitting the "/" character +>#### TIP +>Note that you can not create an empty directory using the GitHub interface. However you can create a directory while adding a file by adding the new directory name before the file name then hitting the "/" character ![Create Folder](../_images/create-folder.gif) - ## Create the "Front Matter" Metadata describing the page and indicating its publication date, title ane URL are held in the pages "Front Matter". Front matter is a yaml section that lists key values pairs of configuration for the page. - ``` --- path: "/document/creating-content/example-page" @@ -42,15 +39,15 @@ Metadata describing the page and indicating its publication date, title ane URL title: "Example Page" --- ``` - - >####TIP - >The front matter goes at the very top or each new page. - - ## Add the Page Content - - With the front matter created its time to add the page content in mardown. - See on of the many [markdown guides](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet) on the internet - - ## Add the page URL to the site-map.yaml - To get your page added to the site-map and deployed, please open a pull request for for your new page in the [Github Project](https://github.com/HumanCellAtlas/data-portal-content/pulls) for the data portal repository. - + +>#### TIP +>The front matter goes at the very top or each new page. + +## Add the Page Content + +With the front matter created it's time to add the page content in markdown. +See on of the many [markdown guides](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet) on the internet + +## Add the page URL to the site-map.yaml + +To get your page added to the site-map and deployed, please open a pull request for your new page in the [Github Project](https://github.com/HumanCellAtlas/data-portal-content/pulls) for the data portal repository. diff --git a/content/document/creating-content/creating-links.md b/content/document/creating-content/creating-links.md index 8295406a1..0e7fd6c54 100644 --- a/content/document/creating-content/creating-links.md +++ b/content/document/creating-content/creating-links.md @@ -1,49 +1,43 @@ --- -path: "/document/creating-content/creating-links" date: "2018-05-30" -title: "Creating Links" description: "How to create links in the HCA Data Portal." +path: "/document/creating-content/creating-links" +title: "Creating Links" --- # Creating Links in Markdown -Markdown syntax for a hyperlink is square brackets followed by parentheses. The square brackets hold the text, the parentheses hold the link. +Markdown syntax for a hyperlink is square brackets followed by parentheses. The square brackets hold the text, the parentheses hold the link. ``` [Link text Here] (https://link-url-here.org) - ``` - -## Internal Lniks +## Internal Links For internal links (links to other data portal pages) we need to follow a few rules: 1. Use the "relative path" to the page omitting the protocol and domain name. For example use `/document/creating-content/creating-links` instead of `https://dev.data.humancellatlas.org`. 1. For the link address use the `path` of the page in the site regardless of the location of the file in the repository. -1. Do not use the `.md` suffix. +1. Do not use the `.md` suffix. 1. Don't forget to start the path with a forward slash: `/`. - Putting this all together an internal link looks like: ``` [An Internal Link](/document/creating-content/editing-an-existing-page) - ``` This renders as: [An Internal Link](/document/creating-content/editing-an-existing-page) ->####Tip: +> #### TIP > Don't forget the leading slash "/" in the internal link paths. - ### Links to a Page Heading -It is possible to link directly to any heading on an internal page as the page headings each have an anchor. +It is possible to link directly to any heading on an internal page as the page headings each have an anchor. - -You can find out the link to a page heading by clicking on link icon that appears when you hover over a heading. After you click on the link symbol, the url to that heading will be in your browsers address bar. Copy the link and stip off the method and domain to make a relative url. +You can find out the link to a page heading by clicking on link icon that appears when you hover over a heading. After you click on the link symbol, the url to that heading will be in your browsers address bar. Copy the link and stip off the method and domain to make a relative url. ![Link Icon](../_images/internal-link.png) @@ -51,7 +45,6 @@ Then use the path to create a link like so: ``` [An Internal Link to a Section Heading](/document/creating-content/editing-an-existing-page#editpreview-the-markdown) - ``` This renders like: @@ -61,17 +54,11 @@ This renders like: ## External Links The markdown for external links is the same for internal links except we use the full url. - + ``` [This is an external Lnik] (https://www.humancellatlas.org/) - ``` This displays as: -[This is an external Lnik] (https://www.humancellatlas.org/) - - - - - +[This is an external Link] (https://www.humancellatlas.org/) diff --git a/content/document/creating-content/editing-an-existing-page.md b/content/document/creating-content/editing-an-existing-page.md index d648c0cdc..793286b63 100644 --- a/content/document/creating-content/editing-an-existing-page.md +++ b/content/document/creating-content/editing-an-existing-page.md @@ -1,35 +1,37 @@ --- -path: "/document/creating-content/editing-an-existing-page" date: "2018-05-30" -title: "Editing an Existing Page" description: "How to edit an existing page in the HCP Data Portal." +path: "/document/creating-content/editing-an-existing-page" +title: "Editing an Existing Page" --- -# Editing an Existing Page Directly in Github -The HCA Data Portal pages are written in [markdown](https://guides.github.com/features/mastering-markdown/) and stored in the [HumanCellAtlas/data-portal-content](https://github.com/HumanCellAtlas/data-portal-content) repository on github. +# Editing an Existing Page Directly in GitHub + +The HCA Data Portal pages are written in [markdown](https://guides.github.com/features/mastering-markdown/) and stored in the [HumanCellAtlas/data-portal-content](https://github.com/HumanCellAtlas/data-portal-content) repository on GitHub. -## Find the Page Source in Github -To edit an HCA Data Portal content page, first find the page in the content directory of the data-portal-content github repository, located here: https://github.com/HumanCellAtlas/data-portal-content/tree/master/content. - - Pages are generally in a folder structure that follows the site outline. It is possible, however, for a page to be anwhere in the conntent folder as the folder structure does not drive the site outline. +## Find the Page Source in GitHub -## Enter Edit Mode on the Github Markdown Editor -Once you have found the page, you can edit and do a basic preview of the page in github. To do this, click on the edit icon on the right hand side of the github markdown page. +To edit an HCA Data Portal content page, first find the page in the content directory of the data-portal-content GitHub repository, located here: https://github.com/HumanCellAtlas/data-portal-content/tree/master/content. -Selecting the edit button will open the file in the github web editor and let you make changes to the page content. The page content is in standard markdown. +Pages are generally in a folder structure that follows the site outline. It is possible, however, for a page to be anywhere in the content folder as the folder structure does not drive the site outline. + +## Enter Edit Mode on the GitHub Markdown Editor + +Once you have found the page, you can edit and do a basic preview of the page in GitHub. To do this, click on the edit icon on the right-hand side of the GitHub markdown page. + +Selecting the edit button will open the file in the GitHub web editor and let you make changes to the page content. The page content is in standard markdown. - ![Edit an Existing Page](../_images/edit-existing-page.png "Edit an Existing Page") ## Edit/Preview the Markdown -Once in the editor, you can update content as you like and even preview the markdown by selecing the "Preview Changes" tab. -The preview will be styled for the github site, not for the HCA site, but you can see if your image links work and check general formatting of the document. +Once in the editor, you can update content as you like and even preview the markdown by selecting the "Preview Changes" tab. ->Note that the github editor does not highlight spell check errors so be vigilant. +The preview will be styled for the GitHub site, not for the HCA site, but you can see if your image links work and check general formatting of the document. +> Note that the GitHub editor does not highlight spell check errors so be vigilant. -![Edit in Github](../_images/edit-mode.png "Edit an Existing Page") +![Edit in GitHub](../_images/edit-mode.png "Edit an Existing Page") ### Front Matter @@ -37,21 +39,19 @@ At the top of the file you will see a "Front Matter" sections between the `---` ``` --- - path: "/document/creating-content/example-page" date: "2018-05-30" + path: "/document/creating-content/example-page" title: "Example Page" --- ``` ->####Tip ->Note that any changes to the `path:` section need a corresponding change to a `site-map.yaml` file in the data-data portal repository. Please create [Github/Zenhub](https://app.zenhub.com/workspace/o/humancellatlas/data-portal-content/boards?repos=130759918) ticket for updating the stiemap. +> #### TIP +> Note that any changes to the `path:` section need a corresponding change to a `site-map.yaml` file in the data-data portal repository. Please create [GitHub/ZenHub](https://app.zenhub.com/workspace/o/humancellatlas/data-portal-content/boards?repos=130759918) ticket for updating the sitemap. ## Create a Pull Request -Once you are ready, request a review from a peer, by creating a pull request in github using the pull reqest UI at the bottom of the edit or preview mode editor. +Once you are ready, request a review from a peer, by creating a pull request in GitHub using the pull request UI at the bottom of the edit or preview mode editor. Once your pull request is merged it will be deployed to the site. -![Create a Pull Reqeust](../_images/edit-mode.png "Edit an Existing Page") - - +![Create a Pull Request](../_images/edit-mode.png "Edit an Existing Page") diff --git a/content/document/creating-content/example-page.md b/content/document/creating-content/example-page.md index 784070118..72e1ed011 100644 --- a/content/document/creating-content/example-page.md +++ b/content/document/creating-content/example-page.md @@ -9,39 +9,39 @@ description: "An example page in the HCA Data Portal." The HCA data portal content is written by community members in markdown. -Markdown is lightweight markup language with plain text formatting syntax. It is designed to be converetd to HTML by markdown prosessing tools. There are many markdown guides and cheatsheets available on the internet for example here: +Markdown is a lightweight markup language with plain text formatting syntax. It is designed to be converted to HTML by markdown processing tools. There are many markdown guides and cheatsheets available on the internet for example here: https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet#links -Most of what you see in the general guides will work on ths site. Below we give specific examples of basic use caes in the context of the data portal. +Most of what you see in the general guides will work on ths site. Below we give specific examples of basic use cases in the context of the data portal. -Here for example is a demo of adding an immage to a page: +Here for example is a demo of adding an image to a page: -![Data Portal](../_images/data-portal.png "Data Potal Learn Section") +![Data Portal](../_images/data-portal.png "Data Portal Learn Section") First upload the image to the _images folder at the same level as your source document. -Then, link to the the image using something like: +Then, link to the image using something like: ```![Data Portal](../_images/data-portal.png "Data Potal Learn Section")``` -## Viewing the Source Markdown for this page on Github +## Viewing the Source Markdown for this page on GitHub The raw source for this page is located at: https://raw.githubusercontent.com/HumanCellAtlas/data-portal-content/master/content/document/creating-content/example-page.md -## Viewing a Preview of this page on Github +## Viewing a Preview of this page on GitHub The preview source for this page is located at: https://github.com/HumanCellAtlas/data-portal-content/blob/master/content/document/creating-content/example-page.md ->####Tip ->I am a blockquote with only one paragrah +>#### TIP +>I am a blockquote with only one paragraph >I am a blockquote with no tip heading >But I have a second paragraph ->####Tip +>#### TIP >Here is a blockquote with two paragraphs and >This is the second one diff --git a/content/document/creating-content/overview.md b/content/document/creating-content/overview.md index 2361a3c78..046a46519 100644 --- a/content/document/creating-content/overview.md +++ b/content/document/creating-content/overview.md @@ -1,8 +1,8 @@ --- -path: "/document/creating-content/overview" date: "2018-05-30" -title: "Overview" description: "Overview of how to create content in the HCA Data Portal." +path: "/document/creating-content/overview" +title: "Overview" --- # Overview @@ -17,12 +17,13 @@ In addition to generating static pages optimized for download, the Data Portal w # Content -The site content is written in [markdown](https://en.wikipedia.org/wiki/Markdown) and stored in the `/content` directory of the [Data Portal Content Repository](https://github.com/HumanCellAtlas/data-portal-content) in Github. +The site content is written in [markdown](https://en.wikipedia.org/wiki/Markdown) and stored in the `/content` directory of the [Data Portal Content Repository](https://github.com/HumanCellAtlas/data-portal-content) in GitHub. # Site Organization ## Sections -The site is layed out in a 4 level hierarchy with the top level sections being: + +The site is laid out in a 4 level hierarchy with the top level sections being: 1. Explore (links to the Data Browser) 1. Analyze @@ -30,33 +31,18 @@ The site is layed out in a 4 level hierarchy with the top level sections being: 1. Learn 1. About - ## Sub Sections Each section may have zero to 3 sub sections. - ## Left Nav Level 1 -Each sub section may have a left hand nav of 0 or more documents. +Each sub section may have a left-hand nav of 0 or more documents. ## Left Nav Level 2 + Each top level left nav document may have 0 or more child documents. #Site Map -The site-map is controlled by a site-map.yaml file in the Data Portal Repository. This file is a json file that defines the structure of website. Reach out on the [Content Team Slack Channel](https://slack.com/app_redirect?channel=CA53K2C3A&team=T2EQJFTMJ) for help getting modifications made to the site map. - - - - - - - - - - - - - - +The site-map is controlled by a site-map.yaml file in the Data Portal Repository. This file is a json file that defines the structure of website. Reach out on the [Content Team Slack Channel](https://slack.com/app_redirect?channel=CA53K2C3A&team=T2EQJFTMJ) for help getting modifications made to the site map. diff --git a/content/document/creating-content/using-images.md b/content/document/creating-content/using-images.md index e7c490365..af194bb70 100644 --- a/content/document/creating-content/using-images.md +++ b/content/document/creating-content/using-images.md @@ -1,8 +1,8 @@ --- -path: "/document/creating-content/using-images" date: "2018-05-30" -title: "Using Images" description: "How to use images in the HCA Data Portal." +path: "/document/creating-content/using-images" +title: "Using Images" --- # Using Images @@ -11,9 +11,9 @@ To include an image in your page: ## Upload the Image to an "_images" Folder -Using the github web interface navigate to the parent folder of your page and add the image an `_images` folder. +Using the GitHub web interface navigate to the parent folder of your page and add the image an `_images` folder. -In the github editor you can not create an empty folder so if the folder does not exist it will be crated during the upload step. +In the GitHub editor you can not create an empty folder so if the folder does not exist it will be crated during the upload step. ## Reference the Image in the Markdown @@ -29,15 +29,9 @@ This renders like: ![Cell Image](../_images/jumbotron-cell-mobile.png "That Cell!") ->####TIP +>#### TIP >You can check if the image link is correct by using the preview link in the markdown editor. ## Controlling Image Size Currently images all expand to the same size in the page. - - - - - - diff --git a/content/feedback/feedback/feedback.md b/content/feedback/feedback/feedback.md index 4816dff09..b5b6723df 100644 --- a/content/feedback/feedback/feedback.md +++ b/content/feedback/feedback/feedback.md @@ -1,14 +1,12 @@ --- -path: "/feedback" date: "2018-05-03" -title: "Feedback" description: "The Human Cell Atlas Data Coordination Platform team needs your feedback to help us improve and refine the DCP." +path: "/feedback" +title: "Feedback" --- # HCA DCP Feedback The Human Cell Atlas Data Coordination Platform team needs your feedback to help improve and refine the DCP. - - -Please email us at [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org) to ask questions or report issues. +Please email us at [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org) to ask questions or report issues. diff --git a/content/guides/userguides/consumer-vignettes/export-to-terra.md b/content/guides/userguides/consumer-vignettes/export-to-terra.md index 80f8315d4..edfa3ab5d 100644 --- a/content/guides/userguides/consumer-vignettes/export-to-terra.md +++ b/content/guides/userguides/consumer-vignettes/export-to-terra.md @@ -1,65 +1,48 @@ --- -path: "/guides/userguides/consumer-vignettes/export-to-terra" date: "2019-09-17" -title: "Exporting HCA Data to Terra" -subTitle: "" description: "This tutorial will walk you through exporting search results from the HCA Data Explorer to Terra." +path: "/guides/userguides/consumer-vignettes/export-to-terra" +subTitle: "" +title: "Exporting HCA Data to Terra" --- # Exporting Search Results from the HCA Data Explorer to Terra +In this tutorial, you will learn how to send search results from the HCA Data Explorer to Terra and how to run a basic workflow with that data. -In this tutorial, you will learn how to send search results from the HCA Data -Explorer to Terra and how to run a basic workflow with that data. - -This tutorial assumes some familiarity with the aforementioned tools. If you are -not familiar with Terra, see the [Overview of Terra](#overview-of-terra) section -below. +This tutorial assumes some familiarity with the aforementioned tools. If you are not familiar with Terra, see the [Overview of Terra](#overview-of-terra) section below. You should also be acquainted with the content in this tutorial: -- [Accessing HCA Data and Metadata](../quick-start-guide) +- [Accessing HCA Data and Metadata](../quick-start-guide) -Terra [recommends the Google Chrome browser](https://support.terra.bio/hc/en-us/articles/360028235911), which we -follow in this tutorial. +Terra [recommends the Google Chrome browser](https://support.terra.bio/hc/en-us/articles/360028235911), which we follow in this tutorial. -Overview of Terra ------------------ +## Overview of Terra -[Terra](https://app.terra.bio/) is a scalable cloud platform for biomedical research. Terra offers the -ability to use data, tools, and workflows to do interactive analysis in the -cloud. +[Terra](https://app.terra.bio/) is a scalable cloud platform for biomedical research. Terra offers the ability to use data, tools, and workflows to do interactive analysis in the cloud. Visit [Terra Support](https://support.terra.bio/hc/en-us) to learn how to [register for a Terra account](https://support.terra.bio/hc/en-us/articles/360028235911) and [get started](https://support.terra.bio/hc/en-us/sections/360006866192) with analyzing data in the cloud. After registering, you can view multiple workspaces dedicated to using HCA data, such as the: + * [Optimus workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline) for processing 10x data with the [Optimus Pipeline](/pipelines/optimus-workflow). * [Smart-seq2 workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA%20Smart-seq2%20Multi%20Sample%20Pipeline) for processing Smart-seq2 data with the [Smart-seq2 Multi-Sample Pipeline](/pipelines/smart-seq2-workflow). * [Intro-to-HCA-data-on-Terra workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/Intro-to-HCA-data-on-Terra) for exporting HCA data and analyzing it with community tools like [Seurat](https://satijalab.org/seurat/index.html), [Scanpy](https://scanpy-tutorials.readthedocs.io/en/latest/index.html), [Cumulus](https://cumulus.readthedocs.io/en/latest/index.html), and [Pegasus](https://pegasus.readthedocs.io/en/stable/#). +## Overview of Dockstore -Overview of Dockstore ---------------------- - -[Dockstore](https://dockstore.org/) is a platform for sharing bioscience tools by wrapping them in Docker -containers and describing their use with high-level workflow languages like the Common Workflow -Language (CWL) and the Workflow Description Language (WDL). +[Dockstore](https://dockstore.org/) is a platform for sharing bioscience tools by wrapping them in Docker containers and describing their use with high-level workflow languages like the Common Workflow Language (CWL) and the Workflow Description Language (WDL). For more information about how to use the Dockstore, see the [Dockstore documentation](https://docs.dockstore.org/en/develop/). -Step one: finding BAM files with the HCA Data Explorer ------------------------------------------------------- +## Step one: finding BAM files with the HCA Data Explorer You can use the HCA Data Explorer to find data to export to Terra. -The Data Explorer lists projects with data available for download from the Data -Store and lets you filter the data for a number of attributes. +The Data Explorer lists projects with data available for download from the Data Store and lets you filter the data for a number of attributes. -Using the Data Explorer, select some data that you are interested in. Choose anything -that looks interesting - we will be running a really simple workflow that -generates MD5 checksums of files, so the type of data is not important. -When you have found a data set of interest, click on the big blue *Export -Selected Data* button at the top right of the page. You will see something like -this: +Using the Data Explorer, select some data that you are interested in. Choose anything that looks interesting - we will be running a really simple workflow that generates MD5 checksums of files, so the type of data is not important. +When you have found a data set of interest, click on the big blue *Export Selected Data* button at the top right of the page. You will see something like this: @@ -67,8 +50,7 @@ this: -Click on the *Export to Terra* button. You will then see a page like this where -you can select what kind of data to export: +Click on the *Export to Terra* button. You will then see a page like this where you can select what kind of data to export: @@ -78,14 +60,11 @@ you can select what kind of data to export: Again, choose anything that looks interesting. -When you click the *Request Export* button, the Data Explorer will process your -request, and you will be redirected to Terra. +When you click the *Request Export* button, the Data Explorer will process your request, and you will be redirected to Terra. -Step two: importing data to Terra and finding a workflow in Dockstore ---------------------------------------------------------------------- +## Step two: importing data to Terra and finding a workflow in Dockstore -Select a Terra workspace to import your selected data into. Once you have selected the -workspace, you will see a page like this, showing the data you just exported: +Select a Terra workspace to import your selected data into. Once you have selected the workspace, you will see a page like this, showing the data you just exported: @@ -93,11 +72,7 @@ workspace, you will see a page like this, showing the data you just exported: -Next, we find a workflow to run with the data we've just exported. For this -tutorial, we are looking for *dockstore-wdl-workflow-md5sum*, which will -generate an MD5 checksum for a file (or files) that we provide. We will need -to import this workflow from Dockstore. To do that, click on the *Workflows* -tab at the top of the page, then on the big square *Find a Workflow* button. +Next, we find a workflow to run with the data we've just exported. For this tutorial, we are looking for *dockstore-wdl-workflow-md5sum*, which will generate an MD5 checksum for a file (or files) that we provide. We will need to import this workflow from Dockstore. To do that, click on the *Workflows* tab at the top of the page, then on the big square *Find a Workflow* button. It will look something like this: @@ -106,10 +81,7 @@ It will look something like this: -Click on the *Dockstore* link at the bottom of the pop-up. Dockstore is a -workflow repository where we will find the workflow we want to run. Once -Dockstore has loaded, search for `md5sum`. The search box is on the left -side of the page. Results should load instantly. Look for a workflow named +Click on the *Dockstore* link at the bottom of the pop-up. Dockstore is a workflow repository where we will find the workflow we want to run. Once Dockstore has loaded, search for `md5sum`. The search box is on the left side of the page. Results should load instantly. Look for a workflow named `briandoconnor/dockstore-workflow-md5sum/dockstore-wdl-workflow-md5sum`. Once you find it, click on it. You will see this: @@ -119,10 +91,7 @@ Once you find it, click on it. You will see this: -Note the blue *Terra* button at the bottom left which will let us load this -workflow in Terra. Click on the button and load the workflow into your -workspace. Once you have, Terra will ask you to select an input to this -workflow: +Note the blue *Terra* button at the bottom left which will let us load this workflow in Terra. Click on the button and load the workflow into your workspace. Once you have, Terra will ask you to select an input to this workflow: @@ -130,26 +99,15 @@ workflow: -Step three: running the workflow in Terra ------------------------------------------ +## Step three: running the workflow in Terra -On this screen, we want to select a single file from the data that we exported -and find the MD5 checksum of that file. Make sure that the *Process multiple -workflows* radio button is selected, then choose a single file to process by -navigating to *Select Data* > *Choose specific rows to process*. +On this screen, we want to select a single file from the data that we exported and find the MD5 checksum of that file. Make sure that the *Process multiple workflows* radio button is selected, then choose a single file to process by navigating to *Select Data* > *Choose specific rows to process*. -Next, tell the workflow how to find the file you selected by setting the -*inputFile* variable. Click on the *Attribute* field (red box in the -screenshot above). +Next, tell the workflow how to find the file you selected by setting the *inputFile* variable. Click on the *Attribute* field (red box in the screenshot above). -Select the DRS URL attribute (something like `this.__bam__.drs_url`). Once -you're done, click *Save*. You will see a blue *Run Analysis* button pop up. -Click that one, and confirm your input when prompted. Terra's running the -workflow now - walk away for a few minutes, grab a coffee, stretch. You -deserve it. +Select the DRS URL attribute (something like `this.__bam__.drs_url`). Once you're done, click *Save*. You will see a blue *Run Analysis* button pop up. Click that one, and confirm your input when prompted. Terra's running the workflow now - walk away for a few minutes, grab a coffee, stretch. You deserve it. -When you come back, refresh the page. Hopefully, your workflow will be done -running. If it is, you will seem something like this: +When you come back, refresh the page. Hopefully, your workflow will be done running. If it is, you will seem something like this: @@ -159,8 +117,4 @@ running. If it is, you will seem something like this: Note the green checkmark in the *Status* column. -Congrats! If you want to see the results of this workflow execution, click -on the workflow ID (the UUID on the right of the page), which will show the -data generated by this workflow execution. - - +Congrats! If you want to see the results of this workflow execution, click on the workflow ID (the UUID on the right of the page), which will show the data generated by this workflow execution. diff --git a/content/guides/userguides/data-lifecycle.md b/content/guides/userguides/data-lifecycle.md index c978bf61a..6d2259e6a 100644 --- a/content/guides/userguides/data-lifecycle.md +++ b/content/guides/userguides/data-lifecycle.md @@ -1,8 +1,8 @@ --- -path: "/guides/userguides/data-lifecycle" date: "2018-05-03" -title: "Data Lifecycle" description: "A description of the general process of data flow through the components of the DCP." +path: "/guides/userguides/data-lifecycle" +title: "Data Lifecycle" --- # Data Lifecycle @@ -50,8 +50,9 @@ When raw data moves into the Data Store a notification is triggered and sent to See the *Pipelines* Section for [detailed information](/pipelines) about the DCP Data Processing Pipelines; [guides for developing pipelines](/pipelines/pipeline-processing-development-guides) are also in *Pipelines*. ## Data Access by End Users + Access to the Data Store is supported with REST API (and associated CLI) using the Data Store's Consumer API. In addition, we have developed a Data Browser, accessible from the *Explore* section, that enables extensive browsing of the data through this Data Portal. Data will also be accessible through tools and portals developed by the community. ## Data Use Policy -For information regarding data sharing and data use, please see our [Data Release Policy](https://www.humancellatlas.org/data-release-policy/) +For information regarding data sharing and data use, please see our [Data Release Policy](https://www.humancellatlas.org/data-release-policy/) diff --git a/content/guides/userguides/exploring-projects.md b/content/guides/userguides/exploring-projects.md index fe9e996e1..e121929e7 100644 --- a/content/guides/userguides/exploring-projects.md +++ b/content/guides/userguides/exploring-projects.md @@ -1,9 +1,9 @@ --- -path: "/guides/userguides/exploring-projects" date: "2018-05-30" -title: "Exploring Projects" -draft: false description: "Overview of exploring projects in the HCA Data Browser." +draft: false +path: "/guides/userguides/exploring-projects" +title: "Exploring Projects" --- # Exploring Projects @@ -14,9 +14,8 @@ Projects are a basic unit of data organization in the Data Coordination Platform 1. the [processes](/metadata/dictionary/process/analysis_process) and [protocols](/metadata/dictionary/protocol/aggregate_generation_protocol) used to collect and process the cells prior to sequencing 1. the [sequencing](/metadata/dictionary/protocol/sequencing_protocol) methods used 1. details about the [project](/metadata/dictionary/project/project) contributors and their institutions - -This [Metadata](/metadata/dictionary/process/analysis_process) is included in the project's Metadata Manifest (TSV file). When the DCP [processes](/pipelines) the contributor's raw data with uniform pipelines, this processing information is also added to the Metadata Manifest. +This [Metadata](/metadata/dictionary/process/analysis_process) is included in the project's Metadata Manifest (TSV file). When the DCP [processes](/pipelines) the contributor's raw data with uniform pipelines, this processing information is also added to the Metadata Manifest. ## Finding a Project of Interest @@ -24,7 +23,6 @@ The Data Portal Explore page lists all projects by title along with key project ![Browsing Projects in the Data Explorer](../_images/explore_dcp_2.png "Exploring Projects") - ### Choosing the DCP 2.0 View or the DCP 1.0 View We are incrementally adding new projects and data for the DCP 2.0 (see the DCP [Updates](/dcp-updates) for details). @@ -85,7 +83,8 @@ Scroll to identify the relevant matrix and then select the download icon. DCP-generated matrices in the DCP 2.0 View are available in Loom format only (see the [Loom documentation](http://loompy.org/) for details on loading and viewing Loom files), whereas matrices in the DCP 1.0 View are available in MTX, CSV, Loom formats. > **DCP 1.0 Matrix Deprecation Notice:** -The DCP is reprocessing DCP 1.0 data and will deprecate all DCP 1.0 matrices (MTX, CSV, and Loom files) once reprocessing is complete. All reprocessed and newly processed projects in the DCP 2.0 will have matrices in Loom format only. +> +> The DCP is reprocessing DCP 1.0 data and will deprecate all DCP 1.0 matrices (MTX, CSV, and Loom files) once reprocessing is complete. All reprocessed and newly processed projects in the DCP 2.0 will have matrices in Loom format only. The type of count available in the DCP-generated matrix depends on the pipeline used for data processing. DCP 2.0 matrices generated with Optimus (10x data) include raw counts whereas matrices generated with Smart-seq2 include TPMs and estimated counts. @@ -100,5 +99,3 @@ To download the contributor-generated matrix, select the **"Project Matrices"** Scroll to the **Contributor-Generated Matrices** section and select the download icon. ![Contributor Matrices](../_images/contributor_matrices.png "Contributor Matrices") - - diff --git a/content/guides/userguides/matrices.md b/content/guides/userguides/matrices.md index fb1d6528a..152d111d2 100644 --- a/content/guides/userguides/matrices.md +++ b/content/guides/userguides/matrices.md @@ -1,22 +1,24 @@ --- -path: "/guides/userguides/matrices" date: "2020-11-13" -title: "Matrices" description: "An overview of the available matrices" +path: "/guides/userguides/matrices" +title: "Matrices" --- -# DCP 2.0 Data Matrix Overview +# DCP 2.0 Data Matrix Overview + Cell-by-gene matrices (commonly referred to as "count matrices" or "expression matrices") are files that contain a measure of gene expression for every gene in every cell in your single-cell sample(s). These matrices can be used for downstream analyses like filtering, clustering, differential expression testing, and annotating cell types. This overview describes the Data Coordination Platform (DCP) 2.0 matrix types, how to download them, and how to link them back to the HCA metadata. Overall, three types of matrices are currently available for DCP 2.0 data: + - DCP-generated matrices (Loom file format) for projects - DCP-generated matrices (Loom file format) for individual library preparations within a project - Contributor-generated matrices (variable file format) provided by the project-contributor - ## DCP-Generated Matrices + Each DCP 2.0 project that is processed with [uniform pipelines](/pipelines) has two types of DCP-generated matrices available for download: - [project-level matrices](#dcp-project-level-matrices) @@ -24,39 +26,39 @@ Each DCP 2.0 project that is processed with [uniform pipelines](/pipelines) has Both matrix types are in Loom file format, and contain standard [metrics](/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics) and counts that are specific to the data processing pipeline used to generate the file. - > For the most up-to-date information on counts and metrics, see the Matrix Overviews for the **[Smart-seq2 Pipeline](https://broadinstitute.github.io/warp/docs/Pipelines/Smart-seq2_Multi_Sample_Pipeline/Loom_schema)** and the **[Optimus Pipeline](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/Loom_schema) (10x data)**. - DCP-generated Loom matrices have three types of attributes containing metadata and metrics: -- **global**: information that applies to all data in the Loom (i.e. pipeline version, etc.) +- **global**: information that applies to all data in the Loom (i.e. pipeline version, etc.) - **row**: gene-specific information and metrics (one row = one gene) - - **column**: cell-specific information and metrics (one column = one cell) For more information on working with Loom attributes and format, see the [Loom documentation](http://linnarssonlab.org/loompy/index.html#). Loom files are compatible with multiple downstream community tools, including [Seurat](https://satijalab.org/seurat/index.html), [Scanpy](https://scanpy-tutorials.readthedocs.io/en/latest/index.html), [Cumulus](https://cumulus.readthedocs.io/en/latest/index.html), and [Pegasus](https://pegasus.readthedocs.io/en/stable/#). -> Step-by-step Jupyter Notebook tutorials for analyzing Loom matrices with community tools are available in the cloud-based platform [Terra](https://app.terra.bio/). After registering, get started by navigating to the [Intro-to-HCA-data-on-Terra workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/Intro-to-HCA-data-on-Terra). +> Step-by-step Jupyter Notebook tutorials for analyzing Loom matrices with community tools are available in the cloud-based platform [Terra](https://app.terra.bio/). After registering, get started by navigating to the [Intro-to-HCA-data-on-Terra workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/Intro-to-HCA-data-on-Terra). #### DCP-Generated Matrix Filenames + Both project matrices and library-level matrices have unique filenames. -* Project matrices have filenames in the format `---`. - * Example: the project-level matrix for "Dissecting the human liver cellular landscape by single cell RNA-seq reveals novel intrahepatic monocyte/ macrophage populations" has the filename `sc-landscape-human-liver-10XV2.loom`. + +* Project matrices have filenames in the format `---`. + * Example: the project-level matrix for "Dissecting the human liver cellular landscape by single cell RNA-seq reveals novel intrahepatic monocyte/ macrophage populations" has the filename `sc-landscape-human-liver-10XV2.loom`. ![Project Matrices Filenames](../_images/project_matrix_name.png "Matrix Name") * Library-level matrices have filenames matching the numerical ID in the HCA metadata field `sequencing_process.provenance.document_id`. - - #### DCP Project-Level Matrix Overview + Project-level matrices are Loom files that contain standardized cell-by-gene measures and metrics for all the data in a project that are of the same species, organ, and sequencing method. + * Example: If a project contains both human and mouse data, it will have one project matrix for human and one for mouse. The gene measures in project matrices vary based on the pipeline used for analysis. + * Matrices produced with the Optimus Pipeline (10x data) will have UMI-aware counts. * Matrices produced with the Smart-seq2 pipeline will have TPMs and estimated counts. * 10x matrices are minimally filtered by UMIs (only cells with 100 molecules or more are retained). @@ -69,7 +71,7 @@ Read more about each metadata field in the [Metadata Dictionary](/metadata/). | --- | --- | | `donor_organism.genus_species` | Species information; human or mouse | | `library_preparation_protocol.library_construction_approach` | Technology used for library preparation, i.e 10x or Smart-seq2 | -| `specimen_from_organism.organ` | Organ | +| `specimen_from_organism.organ` | Organ | | `project.project_core.project_name` | Project name | | `project.provenance.document_id` | Project id | | `input_id` | Metadata values for `sequencing_process.provenance.document_id` | @@ -77,22 +79,24 @@ Read more about each metadata field in the [Metadata Dictionary](/metadata/). More information about DCP post-processing for the project-level matrices can be found in the Matrix Overview for the [Optimus Pipeline](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/Loom_schema#hca-data-coordination-platform-matrix-processing) and the [Smart-seq2 Pipeline](https://broadinstitute.github.io/warp/docs/Pipelines/Smart-seq2_Multi_Sample_Pipeline/Loom_schema#table-2-column-attributes-cell-metrics) (in development). - #### DCP Library-Level Matrix Overview + Library-level matrices (also Loom files) are cell-by-gene matrices for each individual library preparation in a project. Overall, library-level matrices: + * Contain the same standardized gene (row) metrics, cell (column) metrics, and counts as the project-level matrices. * Are separated by the metadata field for library preparation, `sequencing_process.provenance.document_id`, allowing you to only use a sub-sampling of all the project's data. * Are **not filtered** by UMIs to remove cells with low numbers of molecules. * Only contain the metadata for `input_id` and `input_name` (described in the table above). * They do not contain all the metadata for species, organ, and sequencing method in the matrix global attributes. - ## Contributor-Generated Matrix Overview + Contributor-generated matrices are optionally provided by the data contributors and can be useful when trying to annotate cell types or when comparing results back to a contributor’s published results. When these contributor-generated matrices are available, you can download them from the individual Project page. They will vary in file format and content across projects. For questions about the Contributor-generated matrix, reach out to the contributors listed in the Project page Contact section. ## Downloading Matrices + DCP-generated project-level matrices and contributor-generated matrices may be downloaded from the "Matrices" column of the DCP Data Browser (see image below) or alternatively, from the individual Project page. ![Browsing Projects in the Data Explorer](../_images/explore_dcp_2_matrices.png "Exploring Projects") @@ -100,27 +104,24 @@ DCP-generated project-level matrices and contributor-generated matrices may be d You can also download all matrices (including library-level matrices) using a curl command as described in the [Accessing HCA Data and Metadata](../quick-start-guide) guide, or export matrices to [Terra](https://app.terra.bio/), a cloud-based platform for bioinformatic analysis (see the [Exporting to Terra](/guides/consumer-vignettes/export-to-terra) guide). ## Linking DCP-Generated Matrices to the Data Manifest (Metadata) + DCP 2.0 project-level matrices only contain some of the available project metadata (species, organs, library methods, etc.). However, there are several metadata facets in the Metadata Manifest, such as disease state or donor information, that you might want to link back to the DCP-generated cell-by-gene matrix. To link a metadata field in the Metadata Manifest back to an individual sample in a DCP- generated matrix, use the matrix `input_id` field. This field includes all the values for the metadata `sequencing_process.provenance.document_id`, the ID used to demarcate each library preparation. - ## Matrix Normalization and Batch Correction + Data normalization and batch correction account for technical noise introduced during sample processing, as well as differences between datasets generated from different contributors or at different times. Both techniques are crucial for identifying differentially expressed genes. Normalization and batch correction techniques vary between processing methods and individual data contributors, and may not be consistent across the matrices available from the Data Portal. - ### Normalization and Batch Correction for DCP-Generated Matrices -- Data processed with the [uniform pipelines](/pipelines) are **not** normalized across nor within projects. +- Data processed with the [uniform pipelines](/pipelines) are **not** normalized across nor within projects. - 10x matrices produced with the [Optimus Pipeline](/pipelines/optimus-workflow) only contain raw counts whereas [Smart-seq2 Pipeline](/pipelines/smart-seq2-workflow) matrices contain raw counts as well as normalized TPMs. These TPMs are calculated per individual cell (library preparation) and not across all cells within a project. - - No batch correction is performed for DCP-generated matrices. ### Normalization and Batch Correction for Contributor-Generated Matrices -- For contributor-generated matrices, normalization and/or batch correction techniques are used at the discretion of the project contributor and vary between projects. +- For contributor-generated matrices, normalization and/or batch correction techniques are used at the discretion of the project contributor and vary between projects. - To learn more about the techniques used for a particular matrix, please reach out to the Contact listed on the Project's Information page. - - diff --git a/content/guides/userguides/quick-start-guide.md b/content/guides/userguides/quick-start-guide.md index 08716aa96..d2036ae85 100644 --- a/content/guides/userguides/quick-start-guide.md +++ b/content/guides/userguides/quick-start-guide.md @@ -1,20 +1,22 @@ --- -path: "/guides/userguides/quick-start-guide" date: "2018-05-30" -title: "Quick Start Guide" description: "A quick start guide on accessing HCA data and metadata." +path: "/guides/userguides/quick-start-guide" +title: "Quick Start Guide" --- - # Accessing HCA Data and Metadata + This section briefly reviews how to find and download cross-project data and associated metadata using the Data Browser and curl commands. ## Finding Data + The **Explore** section of the Data Portal provides an interactive data browser. You can design a unique cohort, or data subset, by selecting various facets in the Browser's Organ, Method, Donor, Specimen sections. The Specimen's tab shows you how many specimens have been selected. It also gives an estimate of the size of the data set if the entire list were downloaded. ## Preparing Data for Export + After you identify a cohort of interest, you download the raw data, analysis files, and metadata by clicking the blue **Export Selected Data** icon on the right of the page. @@ -24,38 +26,37 @@ After you identify a cohort of interest, you download the raw data, analysis fil This will open a new page giving you the option to: - 1) Download Selected Data Using "curl" - 2) Download a File Manifest with Metadata for the Selected Data - 3) Export to Terra +1) Download Selected Data Using "curl" +2) Download a File Manifest with Metadata for the Selected Data +3) Export to Terra ## Downloading Data with a Curl Command -To download the raw and processed data: -1. Go to **Download Selected Data Using 'curl'** and select **Start** +To download the raw and processed data: -![Export Data](../_images/Export_selected_data.png "Export Selected Data") +1. Go to **Download Selected Data Using 'curl'** and select **Start** -2. Select the files to include in the download- the download dialog box gives you the option to further refine the types of files + ![Export Data](../_images/Export_selected_data.png "Export Selected Data") -![Select Files](../_images/select_file_types.png "Select Files") +2. Select the files to include in the download- the download dialog box gives you the option to further refine the types of files + ![Select Files](../_images/select_file_types.png "Select Files") 3. Select **Request curl Command** -After a few seconds, a new window with a curl command will open. - - -![curl command](../_images/curl_command.png "curl command") - - -Paste this curl command in your local or cloud-based terminal to download the data. - -After downloading the data files, return to the **Export Selected Data** page using the back icon to download the metadata (see step-by-step instructions below). - -![Back icon](../_images/back_icon.png "back icon") + After a few seconds, a new window with a curl command will open. + + ![curl command](../_images/curl_command.png "curl command") + + Paste this curl command in your local or cloud-based terminal to download the data. + + After downloading the data files, return to the **Export Selected Data** page using the back icon to download the metadata (see step-by-step instructions below). + + ![Back icon](../_images/back_icon.png "back icon") ## Downloading Metadata in a Data Manifest + Once you have downloaded the selected data files, you can download all the metadata associated with the cross-project data files. This metadata, also called a "Data Manifest" is in TSV file format and lists all the details about your selected data such as donor information and disease-state; however, the manifest is not the actual data file itself. @@ -63,18 +64,15 @@ This metadata, also called a "Data Manifest" is in TSV file format and lists all To download the metadata from the **Export Selected Data** page: 1. Go to **Download a File Manifest with Metadata for the Selected Data** and select **Start** -![Export Manifest](../_images/Export_selected_manifest.png "Export Manifest") + + ![Export Manifest](../_images/Export_selected_manifest.png "Export Manifest") 2. Select the file types to include in the manifest; the default selection will be the same as what you selected for the data download -![Prepare Manifest](../_images/prepare_manifest.png "Prepare Manifest") + ![Prepare Manifest](../_images/prepare_manifest.png "Prepare Manifest") 3. Select **Prepare Manifest** - When selecting file types for the metadata manifest, note that the listed **File Sizes** are for the actual data files and not for the manifest itself. The format of the manifest file (TSV) is a simple tab-separated text file, with the first line representing the header title for each column. It is OK to remove rows for unwanted files but the header row must remain, and the columns should remain the same. - - - diff --git a/content/help/help/help-and-faq.md b/content/help/help/help-and-faq.md index 1ac6d50bb..f902d074d 100644 --- a/content/help/help/help-and-faq.md +++ b/content/help/help/help-and-faq.md @@ -1,8 +1,8 @@ --- -path: "/help/help/help-and-faq" date: "2018-05-03" -title: "Get Help" description: "If you have questions or issues to report please email data-help@humancellatlas.org." +path: "/help/help/help-and-faq" +title: "Get Help" --- # Get Help @@ -21,8 +21,7 @@ The HCA is a collaborative effort by an international group of scientists to cre The HCA data will eventually accept all types of single-cell data, but it currently contains single-cell and single-nuclei RNA-sequencing data from Smart-seq2 and 10x v2 and v3 assays. While we are focused on curating a collection of data from healthy human specimens, as we grow we are accepting data from a number of sources, including model organisms and organoids, and both healthy and diseased samples. Stay tuned, the HCA is constantly evolving! -Currently, both Smart-seq2 and 10x processed data can be downloaded as expression or count matrices, respectively, using the Data Browser or by navigating to the Porject page - +Currently, both Smart-seq2 and 10x processed data can be downloaded as expression or count matrices, respectively, using the Data Browser or by navigating to the Project page. ### How do I get more details about projects, like protocols or publications? @@ -30,7 +29,7 @@ You can learn more about each project by visiting the Projects tab in the *Explo ### Can I use this data for a publication? -Yes, data from the HCA can be used for publication. Learn more about our [data use agreement](/about/data-use-agreement). +Yes, data from the HCA can be used for publication. Learn more about our [data use agreement](/about/data-use-agreement). ### How do I cite HCA data? @@ -42,7 +41,6 @@ The URL for the HCA DCP *Data Portal*, https://data.humancellatlas.org/, can be The user guides in the *Intro* section of the data portal contain detailed information about [how to access the data](/guides). - ### How do I visualize or analyze the data in the HCA? Single-cell sequencing analysis methods are constantly changing, and there are many tools for visualizing or analyzing the data in the HCA. Popular tools for analyzing single-cell RNA-seq data include Seurat (R) and ScanPy (Python),though many other excellent tools exist. We offer a list of community-built analysis applications in the *Analyze* section on the *Data Portal*. @@ -57,6 +55,4 @@ Visit the *Contact* page to learn how to collaborate with us and to reach us wit ### How can I reuse parts of the DCP? -One of our goals is to make the HCA Data Coordination Platform code open and reusable to the community. For now, contact us at data-help@humancellatlas.org with questions about reusing our code. Stay tuned for more information about reusing code in the *Intro* section of the *Data Portal*. - - +One of our goals is to make the HCA Data Coordination Platform code open and reusable to the community. For now, contact us at data-help@humancellatlas.org with questions about reusing our code. Stay tuned for more information about reusing code in the *Intro* section of the *Data Portal*. diff --git a/content/metadata/metadata-overview/modules.md b/content/metadata/metadata-overview/modules.md index ab38b99cf..dc8b1a23f 100644 --- a/content/metadata/metadata-overview/modules.md +++ b/content/metadata/metadata-overview/modules.md @@ -14,18 +14,19 @@ The use of modules helps to keep the HCA metadata schema both compact and self-c An overview of the different kinds of modules is given below. Please see the [metadata structure][metadata-structure] or [metadata design](/metadata/design) documentation for additional detail. - ## Core Modules + Every metadata type includes a single [core module][biomaterial-core] which defines the type’s super class. For example, all of the biomaterials (e.g. Cell Line, Cell Suspension, Donor Organism, etc. ) include the [Biomaterial Core][biomaterial-core] module. The Biomaterial Core module contributes Biomaterial ID, among other properties to each of these types. Similarly, all file types include the File Core module, all process types include the Process Core module and so on. ## Entity Modules + [Entity modules][8] are the main, general class of module. Entity modules model concepts such as [Funder][funder], [Channel][channel], and [Probe][probe]. Entity modules are also used to represent the various ontologies used by the HCA metadata schema such as [Cell cycle ontology][cell-cycle-ontology] . ## System Modules -[System modules][9] are created and populated by the Data Coordination Platform during data ingest and processing. For example, the main System module is [Provenance][provenance] which models an entity's Document ID, Submission Date, and Submitter ID among other fields. +[System modules][9] are created and populated by the Data Coordination Platform during data ingest and processing. For example, the main System module is [Provenance][provenance] which models an entity's Document ID, Submission Date, and Submitter ID among other fields. [1]: /metadata/dictionary/biomaterial/specimen_from_organism [2]: /metadata/dictionary/process/process @@ -44,4 +45,3 @@ Similarly, all file types include the File Core module, all process types includ [probe]: /metadata/dictionary/protocol/probe [provenance]: /metadata/dictionary/system/provenance [types]: /metadata - diff --git a/content/metadata/metadata-overview/types.md b/content/metadata/metadata-overview/types.md index 52678441f..8d563ec90 100644 --- a/content/metadata/metadata-overview/types.md +++ b/content/metadata/metadata-overview/types.md @@ -7,8 +7,8 @@ title: "Metadata Overview - Types" # Metadata Types In the HCA metadata schema, **types** represent the different parts of an experiment. The schema currently defines five core types (Biomaterial, Process, Protocol, Project, File). These core types are extended and refined by one or more subtypes. - - For example, a **biomaterial** (e.g. a [tissue sample][1]) can undergo a **process** (e.g. [dissociation][2]) to produce another biomaterial (e.g. a [sample of dissociated cells][3]) or a set of data **files** (e.g. [10X fastq files][4]). The process that was actually executed follows a specific **protocol** (e.g. a [10X protocol][5]). All of these parts together make up the overall [project][6] (e.g. Understanding cell types in the human heart). + +For example, a **biomaterial** (e.g. a [tissue sample][1]) can undergo a **process** (e.g. [dissociation][2]) to produce another biomaterial (e.g. a [sample of dissociated cells][3]) or a set of data **files** (e.g. [10X fastq files][4]). The process that was actually executed follows a specific **protocol** (e.g. a [10X protocol][5]). All of these parts together make up the overall [project][6] (e.g. Understanding cell types in the human heart). An overview of each of current HCA metadata schema types is given below. Please see the [metadata structure](/metadata/structure) or [metadata design](/metadata/design) documentation for additional details and examples of how the schema is structured. @@ -24,7 +24,6 @@ Process types represent information relevant to how a biomaterial or file was co - ## Protocols Protocol types represent information about an intended protocol that was followed in a process. @@ -38,6 +37,7 @@ File types represent information about files produced from any process. ## Projects + The project type specifies information about a project that contributes to the HCA DCP. diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides.md b/content/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides.md index af504a2fc..d5e754906 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides.md @@ -1,15 +1,17 @@ --- -path: "/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides" date: "2018-05-03" -title: "Overview of Data Processing Pipelines" description: "Overview of the data processing pipelines in the HCA DCP." +path: "/pipelines/hca-pipelines/data-processing-pipelines/data-processing-pipelines-user-guides" +title: "Overview of Data Processing Pipelines" --- # Overview of Data Processing Pipelines ## What is Data Processing? -Data processing refers to the use of a computational pipeline to analyze raw experimental data from a specific assay. Processing data submitted to the Data Coordination Platform (DCP) produces collections of quality metrics and features that can be used for further analysis. For example, the processing of single-cell RNA-Seq data produces aligned, QC’d reads, a matrix of gene expression, and a matrix of quality control metrics describing the data. + +Data processing refers to the use of a computational pipeline to analyze raw experimental data from a specific assay. Processing data submitted to the Data Coordination Platform (DCP) produces collections of quality metrics and features that can be used for further analysis. For example, the processing of single-cell RNA-Seq data produces aligned, QC’d reads, a matrix of gene expression, and a matrix of quality control metrics describing the data. ## What is the Data Processing Pipeline Service? + The Data Processing Pipeline Service consists of analysis pipelines and execution infrastructure that move raw data through analysis, producing measurements that are available for download by the community from the Data Portal. These include both the submitted raw data and data resulting from data processing. As new single-cell technologies and analysis methods are developed and accepted by the research community, we will implement new data processing pipelines and make both the pipelines and the data publicly available. Data processing pipelines are each bespoke to the characteristics of the data they process. These pipelines can attempt to address the quality of the measurements, detecting false positives or negatives, optimal processing (such as aligning, collapsing UMIs, or segmenting images into accurate features), and many other concerns. Please see the details about each of our pipelines and send us your feedback! @@ -21,16 +23,8 @@ The following are pipelines in development or in production in this platform: | Smart-seq2 | Full transcript single-cell transcriptomics, paired or single- end, plate- or fluidigm-based | This pipeline currently supports the Smart-seq2 protocol as described [here](https://www.nature.com/articles/nprot.2014.006). Read more about the pipeline [here](/pipelines/smart-seq2-workflow). | Aligned BAM with tagsCounts Matrix (genes); QC Matrix | | Optimus | 3’ capture single-cell and single-nuclei transcriptomics | This pipeline supports 3’ scRNA-Seq data from the 10x v2 and v3 assay. Read more about the pipeline [here](/pipelines/optimus-workflow). | Aligned BAM with tagsCounts Matrix (genes); QC Matrix | - > Pipeline code and detailed documentation are hosted in the [WDL Analysis Research Pipelines (WARP)](https://github.com/broadinstitute/warp) repository on GitHub. ## Access to Pipeline Outputs -Matrices are publicly available and can be accessed through the DCP Data Browser or from the individual Project page. - - - - - - - +Matrices are publicly available and can be accessed through the DCP Data Browser or from the individual Project page. diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/file-formats.md b/content/pipelines/hca-pipelines/data-processing-pipelines/file-formats.md index 0062fa2dd..be50b2a12 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/file-formats.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/file-formats.md @@ -1,16 +1,17 @@ --- -path: "/pipelines/hca-pipelines/data-processing-pipelines/file-formats" date: "2018-05-03" -title: "Data Processing Pipelines File Formats" description: "Overview of the file formats used by the data processing pipelines of the HCA DCP." +path: "/pipelines/hca-pipelines/data-processing-pipelines/file-formats" +title: "Data Processing Pipelines File Formats" --- # File Formats of the Data Processing Pipelines Service ## DCP Matrix Download File Format -> **MTX and CSV Matrix Deprecation Notice:** -DCP 1.0 matrices are deprecated in the DCP 2.0. Loom files are now the default format. +>**MTX and CSV Matrix Deprecation Notice:** +> +>DCP 1.0 matrices are deprecated in the DCP 2.0. Loom files are now the default format. Cell by gene count matrices are provided in [Loom](http://loompy.org/) file format and can be downloaded through the Data Coordination Platform's (DCP) Data Portal. From the Portal's Data Browser, you can make a multifaceted search to download matrices for multiple projects. Alternatively, you can explore the matrices available for download on the individual Project pages. @@ -18,6 +19,6 @@ Cell by gene count matrices are provided in [Loom](http://loompy.org/) file form Loom files can be explored using multiple Python- and R-supported downstream analysis tools, including [Loompy](http://loompy.org/), [SCANPY](https://github.com/theislab/scanpy), and [Pegasus](https://pegasus.readthedocs.io/en/latest/). You can also visualize Loom files using Bioconductor's [LoomExperiment](https://www.bioconductor.org/packages/release/bioc/html/LoomExperiment.html). - ## Contributor-generated Matrix File Format + When available, contributor-generated count matrices will be provided on individual Project pages. These matrices will vary in file format and content. To learn more about a specific contributor-generated matrix file, reach out to the Contacts listed on the Project page. diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow.md b/content/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow.md index fc5615bed..729ca0cf5 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow.md @@ -1,8 +1,8 @@ --- -path: "/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow" date: "2018-05-03" -title: "Optimus" description: "Overview of the HCA DCP Optimus analysis workflow." +path: "/pipelines/hca-pipelines/data-processing-pipelines/optimus-workflow" +title: "Optimus" --- # Introduction to the Optimus Workflow @@ -11,10 +11,8 @@ The long-term goal of the Optimus workflow is to support any 3 prime single-cell The workflow supports the [10x v2 and v3 gene expression assay](https://www.10xgenomics.com/solutions/single-cell/) and has been validated for analyzing single-cell and [single-nucleus](https://docs.google.com/document/d/1rv2M7vfpOzIOsMnMfNyKB4HV18lQ9dnOGHK2tPikiH0/edit) from both [human](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/optimus/benchmarking/v1_Apr2019/optimus_report.rst) and [mouse](https://docs.google.com/document/d/1_3oO0ZQSrwEoe6D3GgKdSmAQ9qkzH_7wrE7x6_deL10/edit) data sets. - > View the open-source workflow code in the [WARP repository](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/optimus) on GitHub or in [Dockstore](https://dockstore.org/workflows/github.com/broadinstitute/warp/Optimus:Optimus_v4.2.2?tab=info). Read WARP's [Optimus Overview](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README) for the latest pipeline details. - ## Commonalities Among Sequencing Assays The introduction of droplet-based technologies such as inDrop ([Klein, et al., 2015](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4441768/)) and Drop-seq ([Macosko, et al., 2015](https://www.sciencedirect.com/science/article/pii/S0092867415005498)) moved the throughput of a single-cell RNA sequencing experiment from hundreds to thousands of cells. Technology developed by [10x Genomics](https://www.10xgenomics.com) further increased throughput to hundreds of thousands of cells and has opened up the possibility of creating datasets for millions of cells. Common among many of the single-cell transcriptomics high-throughput technologies is the use of: @@ -28,15 +26,15 @@ The bead-specific barcodes and UMIs are encoded on sequencing primers that also ## Quick Start Table | Pipeline Features | Description | Source | -|-------------------|---------------------------------------------------------------|-----------------------| -| Assay Type | 10x Single Cell/Nucleus Expression (v2 and v3) |[10x Genomics](https://www.10xgenomics.com) -| Overall Workflow |Quality control module and transcriptome quantification module | Code available from [Github](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/optimus/Optimus.wdl) | +| --- | --- | --- | +| Assay Type | 10x Single Cell/Nucleus Expression (v2 and v3) | [10x Genomics](https://www.10xgenomics.com) | +| Overall Workflow | Quality control module and transcriptome quantification module | Code available from [Github](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/optimus/Optimus.wdl) | | Workflow Language | WDL | [openWDL](https://github.com/openwdl/wdl) | -| Genomic Reference Sequence| GRCh38 human genome primary sequence and M21 (GRCm38.p6) mouse genome primary sequence | GENCODE [Human](https://www.gencodegenes.org/human/release_27.html) and [Mouse](https://www.gencodegenes.org/mouse/release_M21.html) | +| Genomic Reference Sequence | GRCh38 human genome primary sequence and M21 (GRCm38.p6) mouse genome primary sequence | GENCODE [Human](https://www.gencodegenes.org/human/release_27.html) and [Mouse](https://www.gencodegenes.org/mouse/release_M21.html) | | Transcriptomic Reference Annotation | V27 GenCode human transcriptome and M21 mouse transcriptome | GENCODE [Human](ftp://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_27/gencode.v27.annotation.gtf.gz) and [Mouse](ftp://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_mouse/release_M21/gencode.vM21.annotation.gff3.gz) | -| Aligner | STAR | [Dobin, et al.,2013](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3530905/) | -| Transcript Quantification |Utilities for processing large-scale single cell datasets |[sctools](https://github.com/HumanCellAtlas/sctools) | -|Data Input File Format |File format in which sequencing data is provided |[FASTQ](https://academic.oup.com/nar/article/38/6/1767/3112533) | +| Aligner | STAR | [Dobin, et al.,2013](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3530905/) | +| Transcript Quantification | Utilities for processing large-scale single cell datasets | [sctools](https://github.com/HumanCellAtlas/sctools) | +|Data Input File Format | File format in which sequencing data is provided | [FASTQ](https://academic.oup.com/nar/article/38/6/1767/3112533) | | Data Output File Format | File formats in which Optimus output is provided | [BAM](http://samtools.github.io/hts-specs/), [Loom version 3](http://loompy.org/) | ## Optimus Summary @@ -46,6 +44,7 @@ Here we describe the modules of Optimus; [the code](https://github.com/broadinst The workflow runs in two modes: single-cell (`sc_rna`) or single-nucleus (`sn_rna`). When appropriate, differences between the modes are noted. Overall, the workflow: + 1. corrects cell barcodes and Unique Molecular Identifiers (UMIs) 2. aligns reads to the genome 3. generates an expression count matrix in a UMI-aware fashion @@ -67,7 +66,7 @@ Each 10x v2 and v3 3’ sequencing experiment generates triplets of FASTQ files: 2. reverse reads (R2), which is the alignable genomic information from the mRNA transcript 3. an index FASTQ file that contains the sample barcodes, when provided by the sequencing facility -Because the pipeline processing steps require a BAM file format, the first step of Optimus is to [convert](https://github.com/broadinstitute/warp/blob/develop/tasks/skylab/FastqProcessing.wdl) the R2 FAST files, containing the alignable genomic information, to BAM files. Next, the [FastqProcessing](https://github.com/broadinstitute/warp/blob/develop/tasks/skylab/FastqProcessing.wdl) step appends the UMI and Cell Barcode sequences from R1 to the corresponding R2 sequence as tags, in order to properly label the genomic information for alignment. +Because the pipeline processing steps require a BAM file format, the first step of Optimus is to [convert](https://github.com/broadinstitute/warp/blob/develop/tasks/skylab/FastqProcessing.wdl) the R2 FAST files, containing the alignable genomic information, to BAM files. Next, the [FastqProcessing](https://github.com/broadinstitute/warp/blob/develop/tasks/skylab/FastqProcessing.wdl) step appends the UMI and Cell Barcode sequences from R1 to the corresponding R2 sequence as tags, in order to properly label the genomic information for alignment. ### Cell Barcode Correction @@ -75,7 +74,6 @@ Although the function of the cell barcodes is to identify unique cells, barcode The output BAM files contain the reads with correct barcodes, including barcodes that came within one edit distance ([Levenshtein distance](http://www.levenshtein.net/)) of matching the whitelist of barcode sequences and were corrected by this tool. Correct barcodes are assigned a “CB” tag. Uncorrected barcodes (with more than one error) are preserved and given a “CR” (Cell barcode Raw) tag. Cell barcode quality scores are also preserved in the file under the “CY” tag. - ## Alignment The [STAR alignment](https://github.com/broadinstitute/warp/blob/master/tasks/skylab/StarAlignBamSingleEnd.wdl) software ([Dobin, et al., 2013](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3530905/)) is used to map barcoded reads in the BAM files to the human genome primary assembly reference. STAR (Spliced Transcripts Alignment to a Reference) is widely used for RNA-seq alignment and identifies the best matching location(s) on the reference for each sequencing read. @@ -89,6 +87,7 @@ The [TagGeneExon](https://github.com/broadinstitute/warp/tree/master/tasks/skyla Annotations include INTERGENIC, INTRONIC, UTR and CODING (EXONIC), and are stored using the 'XF' BAM tag. In cases where the gene corresponds to an **exon or UTR**, the name of the gene that overlaps the alignment is associated with the read and stored using the GE BAM tag. #### Single-nucleus mode + Annotations include INTERGENIC, INTRONIC, UTR and CODING (EXONIC), and are stored using the 'XF' BAM tag. In cases where the gene corresponds to an **exon, UTR, or intron**, the name of the gene that overlaps the alignment is associated with the read and stored using the GE BAM tag. ## UMI Correction @@ -110,6 +109,7 @@ The pipeline outputs a count matrix that contains, for each cell barcode and for ## Pipeline Output Files Outputs of the pipeline include: + 1. Raw count matrix 2. Unfiltered, sorted BAM file (BamTags are used to tag reads that could be filtered downstream) 3. Cell metadata, including cell metrics @@ -124,13 +124,11 @@ Additionally, you can use the public [Intro-to-HCA-data-on-Terra workspace](http For more information on using the Terra platform, please view the [Support Center](https://support.terra.bio/hc/en-us). ## Versioning + All Optimus workflow versions are detailed in the [Optimus Changelog](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/optimus/Optimus.changelog.md) in GitHub. > This documentation applies to Optimus v4.1.7 and later. If you are working with data processed with a previous version, please check the [Optimus changelog](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/optimus/Optimus.changelog.md) for any data processing changes that may be applicable to your data. - ## Learn More About Optimus -For more detailed information about the Optimus pipeline, please see the [Optimus Overview](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README) in the WARP repository documentation. - - +For more detailed information about the Optimus pipeline, please see the [Optimus Overview](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README) in the WARP repository documentation. diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/pipeline-best-practices.md b/content/pipelines/hca-pipelines/data-processing-pipelines/pipeline-best-practices.md index 52261e8b1..c6ae47262 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/pipeline-best-practices.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/pipeline-best-practices.md @@ -1,8 +1,8 @@ --- -path: "/pipelines/pipeline-development/processing-pipelines/pipeline-best-practices" date: "2018-05-03" -title: "Best Practices for Building Data Processing Pipelines" description: "Overview of best practices for building data processing pipelines in the HCA DCP." +path: "/pipelines/pipeline-development/processing-pipelines/pipeline-best-practices" +title: "Best Practices for Building Data Processing Pipelines" --- # Best Practices for Building Data Processing Pipelines @@ -10,33 +10,40 @@ description: "Overview of best practices for building data processing pipelines Each of our Data Coordination Platform (DCP) uniform pipelines are developed using the best practices detailed below and are approved by the [Human Cell Atlas Analysis Working Group](https://www.humancellatlas.org/learn-more/working-groups/). We describe each of these best practices to give insight as to why they are important and we provide examples to give you a sense of how to apply them. Overall, the best pipelines should be: + - automated - easily testable - portable - scalable to their data - easy to maintain - + ## Automation + ### What is Automation? + Automation refers to the ability of a pipeline to run, end-to-end, without human intervention. - + ### Why do we care about automation? + Pipelines cannot scale to large amounts of data, or many runs, if manual steps must be performed within the pipeline. They also cannot be part of an automated system if they in fact are not automated. Manual steps will bottleneck your entire system and can require unmanageable operations. Moreover, manual steps performed by humans will vary, and will promote the production of data that can not be appropriately harmonized. - *Do*: Reduce parameterization to minimal inputs that do not vary for each input data. - *Do*: Remove the need for parameters, replacing them with data-driven settings. - *Do*: Offer defaults that are generally applicable for inputs that cannot be defined in a data-driven manner. - *Do*: Offer the ability to check the status of pipeline runs. -- *Don’t*: Assume any file produced at any step of the pipeline is ok. Always check the status of underlying tools (Eg. check return codes). -- *Don’t*: Keep output files produced by steps of the pipeline that errored; people will accidently use them if they exist. (Do keep logs for debugging.) +- *Don’t*: Assume any file produced at any step of the pipeline is ok. Always check the status of underlying tools (e.g. check return codes). +- *Don’t*: Keep output files produced by steps of the pipeline that errored; people will accidentally use them if they exist. (Do keep logs for debugging.) - *Don’t*: Delete outputs from steps that passed when the full pipeline fails, keeping them enables you to pick up where you left off. - *Don’t*: Use tools that are “buggy” or fragile, find alternatives or improve the tools. - + ## Testability + ### What is a testable pipeline? + A testable pipeline is one in which isolated sections or the full pipeline can checked for specified characteristics without modifying the pipeline’s code. Testability requires the existence of appropriate data with which to run the test and a testing checklist that reflects a clear understanding of how the data will be used to evaluate the pipeline. - + ### Why do we care about testabilty? + The availability of test data enables validation that the pipeline can produce the desired outcome. Formulation of a testing checklist allows the developer to clearly define the capabilities of the pipeline and the parameters of its use. - *Do*: Provide example test data with your pipeline/tool. @@ -51,34 +58,40 @@ The availability of test data enables validation that the pipeline can produce t ## Portability ### What is pipeline portability? + Pipeline portability refers to the ability of a pipeline to execute successfully on multiple technical architectures. ### Why do we care about portability? + _Science._ Science is not science if results are not reproducible; the scientific method cannot occur without a repeatable experiment that can be modified. Data processing pipelines are an essential part of some scientific inquiry and where they are leveraged they should be repeatable to validate and extend scientific discovery. _Impact._ Pipelines will have greatest impact when they can be leveraged in multiple environments. The more technical requirements for installing and running of a pipeline, the longer it will take for a researcher to have a usable running pipeline. - + _Maintainability._ Over the long term, it is easier to maintain pipelines that can be run in multiple environments. Portability avoids being tied to specific infrastructure and enables ease of deployment to development environments. Within the scope of the HCA project, to ensure that others will be able to use your pipeline, avoid building in assumptions about environments and infrastructures in which it will run. ### Configurability for running on different technical infrastructures. + Code should not change to enable a pipeline to run on a different technical architecture; this change in execution environment should be configurable outside of the pipeline code. - *Do*: Use a workflow language that allows a separation between the code that executes the data processing logic and the logic to run the pipeline on an infrastructure. [WDL](https://software.broadinstitute.org/wdl/documentation) and [CWL](https://www.commonwl.org/user_guide/rec-practices) languages have this feature. - *Don’t*: Put logic to run the pipeline in the same code that executes the logic to process the data. ### Separation between the environment, the execution of the pipeline, and the pipeline itself. + - *Do*: Use a containerization technology, such as [Docker](https://www.docker.com/), to execute software. - *Do*: Incorporate into your testing the execution of the pipeline in multiple execution environments. -- *Don’t*: Put environmental paths in software tools or the workflow language. When they must exist they belong in the configuration or (if they refer to the execution environment) in the container’s build instructions (Eg. Dockerfile). +- *Don’t*: Put environmental paths in software tools or the workflow language. When they must exist they belong in the configuration or (if they refer to the execution environment) in the container’s build instructions (e.g. Dockerfile). ## Scaling Characteristics ### What do we mean by scaling characteristics? + Scaling characteristics describe the performance of the pipeline given a certain amount of data. This is often described with Big O notation when describing algorithms. This answers the question: As the size of the data for the pipeline increases, how many additional computes are needed to process that data? One would want to avoid algorithms or tools that scale poorly, or improve this relationship to be linear (or better). ### Why care about scalability? + If you have poor scaling characteristics, it may take an exponential amount of time to process more data. This will eventually require unreasonable amounts of time (and money if running in the cloud) and generally reduce the applicability of the pipeline. - *Do*: Measure the relationship between the size of your input (or something equivalent) and resources needed to successfully complete the pipeline. @@ -87,19 +100,24 @@ If you have poor scaling characteristics, it may take an exponential amount of t ## Maintainability ### What is a maintainable pipeline? + A pipeline that can be easily operated and updated is maintainable. ### Why do we care about maintainability? + The majority of the life of code involves maintenance and updates. Design and initial implementation require vastly shorter amounts of time compared to the typical time period over which the code is operated and updated. This is generally true in many areas of software engineering. Thus it is important to engineer software so that the maintenance phase is manageable and does not burden new software development or operations. ### Readability + Software is a living document that should be easily read and understood, regardless of who is the reader or author of the code. -- *Do*: Work in a space where the code is easy to access and navigate (Eg. [GitHub](https://github.com/)) + +- *Do*: Work in a space where the code is easy to access and navigate (e.g. [GitHub](https://github.com/)) - *Do*: Use common software package structure and idioms to aid the navigation of the software. - *Do*: Use automated documentation for all technical documents as much as possible. - *Don’t*: Write a large amount of documentation that does not live beside or within the code itself (it will become out of date). ### Modularity + Modularity enables small units of code to be independently benchmarked, updated, validated, and exchanged as science or technology changes. Using these small units enables more rapid updates and better adaptation to innovation. - *Do*: Save progress by creating intermediate output between modules as they successfully complete. @@ -107,24 +125,27 @@ Modularity enables small units of code to be independently benchmarked, updated, - *Don’t*: Break every functionality of a pipeline into a separate module. (This contrasts with not making monolithic tasks; there is an optimum between monolithic tasks and highly resolved modularity that is the goal. One can use benchmarking, the tendency for functionality to be updated, and how dependent functionalities are to get a sense of what should be separate and what can be combined.) ### Leveraging Standards + We recommend using standard file formats and interfaces. In computational biology, [GA4GH](https://www.ga4gh.org/genomic-data-toolkit/) is a great source of these standards. In cases where new formats are needed, we recommend working with a standards group like [GA4GH](https://www.ga4gh.org/) if possible. - *Do*: When using containerization technologies, follow best practices to assure associated images do not update without explicit updates. - *Do*: Make both the images and the build files (Dockerfile) available to document the environment. [More on Dockerfiles](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/). ## Versioning + Versioning pipelines and associated Docker images allows you to determine when and how data is created (provenance). As you make improvements and changes to your pipeline, it is important to know which version of the pipeline and software you used to create a given dataset so that it can be easily reproduced. This not only facilitates scientific reproducibility for the greater community, it also allows you to verify that new pipeline changes produce consistent results. We recommend choosing a consistent versioning system (for example, the [semantic system](https://semver.org/)) and tracking pipeline changes in a [changelog](https://keepachangelog.com/en/1.0.0/). ## Licensing ### What is licensing? -According to Wikipedia "A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software.” (see [this Wikipedia article](https://en.wikipedia.org/wiki/Software_license) for details). + +According to Wikipedia "A software license is a legal instrument (usually by way of contract law, with or without printed material) governing the use or redistribution of software." (see [this Wikipedia article](https://en.wikipedia.org/wiki/Software_license) for details). ### Why do we care about licensing? + Note: this section is opinion and is NOT legal advice. Licenses sometimes legally bind you as to how you use tools, and sometimes the terms of the license transfer to the software and data that is produced. This can restrict the potential for leveraging the pipeline and may require additional work. -- *Do*: Select tools that are openly licensed to run in your pipelines to avoid the possibility that legal requirements will restrict execution where technical requirements do not. +- *Do*: Select tools that are openly licensed to run in your pipelines to avoid the possibility that legal requirements will restrict execution where technical requirements do not. - *Don’t*: Create software tools or libraries without licenses, clear guidance on your intent for use is important. - diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics.md b/content/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics.md index f4047dfbc..8a2b8501a 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics.md @@ -1,8 +1,8 @@ --- -path: "/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics" date: "2018-05-03" -title: "Data Processing Pipelines QC Metrics" description: "Overview of the quality control metrics used to evaluate data quality for each data processing pipeline in the HCA DCP." +path: "/pipelines/hca-pipelines/data-processing-pipelines/qc-metrics" +title: "Data Processing Pipelines QC Metrics" --- # Data Processing Pipelines QC Metrics @@ -15,93 +15,91 @@ Here we provide the quality control metrics used to evaluate data quality for ea The Smart-seq2 pipeline processes data generated from plate-based Smart-seq2 scRNA sequencing protocols (full transcript). The metrics below are generated from the quality control module of the pipeline. -| Metric | Program |Details | -|-------------------------------|------------- |------------------------| -|`RnaSeqMetrics`|[CollectRnaSeqMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectRnaSeqMetrics)|[Metrics Definitions](http://broadinstitute.github.io/picard/picard-metric-definitions.html#RnaSeqMetrics) | -|`DuplicationMetrics`|[MarkDuplicates](https://broadinstitute.github.io/picard/command-line-overview.html#MarkDuplicates) |[Metrics Definitions](http://broadinstitute.github.io/picard/picard-metric-definitions.html#DuplicationMetrics) | -|`AlignmentSummaryMetrics`|[CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) |[Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#AlignmentSummaryMetrics) | -|`InsertSizeMetrics`|[CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) |[Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#InsertSizeMetrics) | -|`GcBiasMetrics,GcBiasDetailMetrics`|[CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) |[Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#GcBiasDetailMetrics) | -|`QualityYieldMetrics`| [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) |[Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#CollectQualityYieldMetrics.QualityYieldMetrics) | -|`SequencingArtifactMetrics`| [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) |[Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#ErrorSummaryMetrics) | -|`HISAT2 Metrics` | [HISAT2](https://ccb.jhu.edu/software/hisat2/manual.shtml#alignment-summary) | HISAT2 alignment summary metrics | +| Metric | Program |Details | +| --- | --- | --- | +| `RnaSeqMetrics` | [CollectRnaSeqMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectRnaSeqMetrics) | [Metrics Definitions](http://broadinstitute.github.io/picard/picard-metric-definitions.html#RnaSeqMetrics) | +| `DuplicationMetrics` | [MarkDuplicates](https://broadinstitute.github.io/picard/command-line-overview.html#MarkDuplicates) | [Metrics Definitions](http://broadinstitute.github.io/picard/picard-metric-definitions.html#DuplicationMetrics) | +| `AlignmentSummaryMetrics` | [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) | [Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#AlignmentSummaryMetrics) | +| `InsertSizeMetrics` | [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) | [Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#InsertSizeMetrics) | +| `GcBiasMetrics,GcBiasDetailMetrics` | [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) | [Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#GcBiasDetailMetrics) | +| `QualityYieldMetrics` | [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) | [Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#CollectQualityYieldMetrics.QualityYieldMetrics) | +| `SequencingArtifactMetrics` | [CollectMultipleMetrics](https://broadinstitute.github.io/picard/command-line-overview.html#CollectMultipleMetrics) | [Metrics Definitions](https://broadinstitute.github.io/picard/picard-metric-definitions.html#ErrorSummaryMetrics) | +| `HISAT2 Metrics` | [HISAT2](https://ccb.jhu.edu/software/hisat2/manual.shtml#alignment-summary) | HISAT2 alignment summary metrics | | `RSEM Metrics` | [RSEM](https://github.com/deweylab/RSEM/blob/master/cnt_file_description.txt) | Metrics from the RSEM cnt file | ## Optimus Pipeline Metrics -This pipeline processes genomic data generated from the [10x Genomics](https://www.10xgenomics.com/solutions/single-cell/) 3 prime v2 (and v3) assay. The metrics below are detected using Single Cell Tools ([sctools](https://github.com/HumanCellAtlas/sctools)). +This pipeline processes genomic data generated from the [10x Genomics](https://www.10xgenomics.com/solutions/single-cell/) 3 prime v2 (and v3) assay. The metrics below are detected using Single Cell Tools ([sctools](https://github.com/HumanCellAtlas/sctools)). | Cell Metrics | Program | Details | -|:---|:---:|:---| -|`n_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads associated with this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_reads)| -|`noise_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| Number of reads that are categorized by 10x Genomics Cell Ranger as "noise". Refers to long polymers, or reads with high numbers of N (ambiguous) nucleotides. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.noise_reads)| -|`perfect_molecule_barcodes`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads with molecule barcodes that have no errors. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_molecule_barcodes)| -|`n_mitochondrial_genes`| [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of mitochondrial genes detected by this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_mitochondrial_genes)| -|`n_mitochondrial_molecules`| [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of molecules from mitochondrial genes detected for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_mitochondrial_molecules)| -|`pct_mitochondrial_molecules`| [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The percentage of molecules from mitochondrial genes detected for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.pct_mitochondrial_molecules)| -|`reads_mapped_exonic`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads for this entity that are mapped to exons. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_exonic)| -|`reads_mapped_intronic`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads for this entity that are mapped to introns. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intronic)| -|`reads_mapped_utr`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads for this entity that are mapped to 3' untranslated regions (UTRs). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_utr)| -|`reads_mapped_uniquely`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads mapped to a single unambiguous location in the genome. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_uniquely)| -|`reads_mapped_multiple`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads mapped to multiple genomic positions with equal confidence. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_multiple)| -|`duplicate_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads that are duplicates (see README.md for definition of a duplicate). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.duplicate_reads)| -|`spliced_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads that overlap splicing junctions. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.spliced_reads)| -|`antisense_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads that are mapped to the antisense strand instead of the transcribed strand. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.antisense_reads)| -|`molecule_barcode_fraction_bases_above_30_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The average fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_mean)| -|`molecule_barcode_fraction_bases_above_30_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The variance in the fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_variance)| -|`genomic_reads_fraction_bases_quality_above_30_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The average fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_mean)| -|`genomic_reads_fraction_bases_quality_above_30_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The variance in the fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_variance)| -|`genomic_read_quality_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| Average quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_mean)| -|`genomic_read_quality_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Variance in quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_variance)| -|`n_molecules`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| Number of molecules corresponding to this entity. See README.md for the definition of a Molecule. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_molecules)| -|`n_fragments`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| Number of fragments corresponding to this entity. See README.md for the definition of a Fragment. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_fragments)| -|`reads_per_fragment`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The average number of reads associated with each fragment in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_fragment)| -|`fragments_per_molecule`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The average number of fragments associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_per_molecule)| -|`fragments_with_single_read_evidence`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of fragments associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_with_single_read_evidence)| -|`molecules_with_single_read_evidence`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of molecules associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecules_with_single_read_evidence)| -|`perfect_cell_barcodes`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads whose cell barcodes contain no error. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_cell_barcodes)| -|`reads_mapped_intergenic`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads mapped to an intergenic region for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intergenic)| -|`reads_mapped_too_many_loci`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of reads that were mapped to too many loci across the genome and as a consequence, are reported unmapped by the aligner. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_too_many_loci)| -|`cell_barcode_fraction_bases_above_30_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The variance of the fraction of Illumina base calls for the cell barcode sequence that are greater than 30, across molecules. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.cell_barcode_fraction_bases_above_30_variance)| -|`cell_barcode_fraction_bases_above_30_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The average fraction of Illumina base calls for the cell barcode sequences that are greater than 30, across molecules. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.cell_barcode_fraction_bases_above_30_mean)| -|`n_genes`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of genes detected by this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_genes)| -|`genes_detected_multiple_observations`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)| The number of genes that are observed by more than one read in this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genes_detected_multiple_observations)| -| `reads_unmapped`| [SC Tools](https://github.com/HumanCellAtlas/sctools/blob/master/src/sctools/metrics/aggregator.py) | Reads that are non-transcriptomic | +| :--- | :---: | :--- | +| `n_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads associated with this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_reads) | +| `noise_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of reads that are categorized by 10x Genomics Cell Ranger as "noise". Refers to long polymers, or reads with high numbers of N (ambiguous) nucleotides. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.noise_reads) | +| `perfect_molecule_barcodes` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads with molecule barcodes that have no errors. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_molecule_barcodes) | +| `n_mitochondrial_genes` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of mitochondrial genes detected by this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_mitochondrial_genes) | +| `n_mitochondrial_molecules` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of molecules from mitochondrial genes detected for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_mitochondrial_molecules) | +| `pct_mitochondrial_molecules` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The percentage of molecules from mitochondrial genes detected for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.pct_mitochondrial_molecules) | +| `reads_mapped_exonic` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to exons. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_exonic) | +| `reads_mapped_intronic` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to introns. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intronic) | +| `reads_mapped_utr` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to 3' untranslated regions (UTRs). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_utr) | +| `reads_mapped_uniquely` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads mapped to a single unambiguous location in the genome. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_uniquely) | +| `reads_mapped_multiple` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads mapped to multiple genomic positions with equal confidence. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_multiple) | +| `duplicate_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that are duplicates (see README.md for definition of a duplicate). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.duplicate_reads) | +| `spliced_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that overlap splicing junctions. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.spliced_reads) | +| `antisense_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that are mapped to the antisense strand instead of the transcribed strand. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.antisense_reads) | +| `molecule_barcode_fraction_bases_above_30_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_mean) | +| `molecule_barcode_fraction_bases_above_30_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The variance in the fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_variance) | +| `genomic_reads_fraction_bases_quality_above_30_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_mean) | +| `genomic_reads_fraction_bases_quality_above_30_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The variance in the fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_variance) | +| `genomic_read_quality_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Average quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_mean) | +| `genomic_read_quality_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) |Variance in quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_variance) | +| `n_molecules` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of molecules corresponding to this entity. See README.md for the definition of a Molecule. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_molecules) | +| `n_fragments` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of fragments corresponding to this entity. See README.md for the definition of a Fragment. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_fragments) | +| `reads_per_fragment` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average number of reads associated with each fragment in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_fragment) | +| `fragments_per_molecule` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average number of fragments associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_per_molecule) | +| `fragments_with_single_read_evidence` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of fragments associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_with_single_read_evidence) | +| `molecules_with_single_read_evidence` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of molecules associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecules_with_single_read_evidence) | +| `perfect_cell_barcodes` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads whose cell barcodes contain no error. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_cell_barcodes) | +| `reads_mapped_intergenic` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads mapped to an intergenic region for this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intergenic) | +| `reads_mapped_too_many_loci` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that were mapped to too many loci across the genome and as a consequence, are reported unmapped by the aligner. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_too_many_loci) | +| `cell_barcode_fraction_bases_above_30_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The variance of the fraction of Illumina base calls for the cell barcode sequence that are greater than 30, across molecules. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.cell_barcode_fraction_bases_above_30_variance) | +| `cell_barcode_fraction_bases_above_30_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average fraction of Illumina base calls for the cell barcode sequences that are greater than 30, across molecules. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.cell_barcode_fraction_bases_above_30_mean) | +| `n_genes` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of genes detected by this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_genes) | +| `genes_detected_multiple_observations` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of genes that are observed by more than one read in this cell. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genes_detected_multiple_observations) | +| `reads_unmapped` | [SC Tools](https://github.com/HumanCellAtlas/sctools/blob/master/src/sctools/metrics/aggregator.py) | Reads that are non-transcriptomic | | `emptydrops_FDR` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | False Discovery Rate (FDR) for being a non-empty droplet; not included when running in single-nuclei mode | | `emptydrops_IsCell` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | Binarized call of cell/background based on predefined FDR cutoff; not included when running in single-nuclei mode | | `emptydrops_Limited` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | Indicates whether a lower p-value could be obtained by increasing the number of iterations; not included when running in single-nuclei mode | |`emptydrops_LogProb` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | The log-probability of observing the barcode’s count vector under the null model; not included when running in single-nuclei mode | -| `emptydrops_PValue` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | Numeric, the Monte Carlo p-value against the null model; not included when running in single-nuclei mode | +| `emptydrops_PValue` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | Numeric, the Monte Carlo p-value against the null model; not included when running in single-nuclei mode | | `emptydrops_Total` | [dropletUtils](https://bioconductor.org/packages/release/bioc/html/DropletUtils.html) | Numeric, the total read counts for each barcode; not included when running in single-nuclei mode | -| Gene Metrics | Program |Details | -|-------------------------------|--------------------|------------------------| -|`n_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads associated with this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_reads)| -|`noise_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Number of reads that are categorized by 10x Genomics Cell Ranger as "noise". Refers to long polymers, or reads with high numbers of N (ambiguous) nucleotides. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.noise_reads)| -|`perfect_molecule_barcodes`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads with molecule barcodes that have no errors. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_molecule_barcodes)| -|`reads_mapped_exonic`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads for this entity that are mapped to exons. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_exonic)| -|`reads_mapped_intronic`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads for this entity that are mapped to introns. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intronic)| -|`reads_mapped_utr`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads for this entity that are mapped to 3' untranslated regions (UTRs). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_utr)| -|`reads_mapped_uniquely`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads mapped to a single unambiguous location in the genome. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_uniquely)| -|`reads_mapped_multiple`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads mapped to multiple genomic positions with equal confidence. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_multiple)| -|`duplicate_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads that are duplicates (see README.md for definition of a duplicate). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.duplicate_reads)| -|`spliced_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads that overlap splicing junctions. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.spliced_reads)| -|`antisense_reads`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of reads that are mapped to the antisense strand instead of the transcribed strand. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.antisense_reads)| -|`molecule_barcode_fraction_bases_above_30_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The average fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_mean)| -|`molecule_barcode_fraction_bases_above_30_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The variance in the fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_variance)| -|`genomic_reads_fraction_bases_quality_above_30_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The average fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_mean)| -|`genomic_reads_fraction_bases_quality_above_30_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The variance in the fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_variance)| -|`genomic_read_quality_mean`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Average quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_mean)| -|`genomic_read_quality_variance`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Variance in quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_variance)| -|`n_molecules`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Number of molecules corresponding to this entity. See README.md for the definition of a Molecule. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_molecules)| -|`n_fragments`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|Number of fragments corresponding to this entity. See README.md for the definition of a Fragment. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_fragments)| -|`reads_per_molecule`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The average number of reads associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_molecule)| -|`reads_per_fragment`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The average number of reads associated with each fragment in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_fragment)| -|`fragments_per_molecule`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The average number of fragments associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_per_molecule)| -|`fragments_with_single_read_evidence`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of fragments associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_with_single_read_evidence)| -|`molecules_with_single_read_evidence`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of molecules associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecules_with_single_read_evidence)| -|`number_cells_detected_multiple`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of cells which observe more than one read of this gene. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.GeneMetrics.number_cells_detected_multiple)| -|`number_cells_expressing`|[SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics)|The number of cells that detect this gene. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.GeneMetrics.number_cells_expressing)| - - +| Gene Metrics | Program |Details | +| --- | --- | --- | +| `n_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads associated with this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_reads) | +| `noise_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of reads that are categorized by 10x Genomics Cell Ranger as "noise". Refers to long polymers, or reads with high numbers of N (ambiguous) nucleotides. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.noise_reads) | +| `perfect_molecule_barcodes` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads with molecule barcodes that have no errors. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.perfect_molecule_barcodes) | +| `reads_mapped_exonic` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to exons. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_exonic) | +| `reads_mapped_intronic` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to introns. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_intronic) | +| `reads_mapped_utr` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads for this entity that are mapped to 3' untranslated regions (UTRs). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_utr) | +| `reads_mapped_uniquely` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads mapped to a single unambiguous location in the genome. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_uniquely) | +| `reads_mapped_multiple` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads mapped to multiple genomic positions with equal confidence. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_mapped_multiple) | +| `duplicate_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that are duplicates (see README.md for definition of a duplicate). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.duplicate_reads) | +| `spliced_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that overlap splicing junctions. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.spliced_reads) | +| `antisense_reads` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of reads that are mapped to the antisense strand instead of the transcribed strand. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.antisense_reads) | +| `molecule_barcode_fraction_bases_above_30_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_mean) | +| `molecule_barcode_fraction_bases_above_30_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The variance in the fraction of bases in molecule barcodes that receive quality scores greater than 30 across the reads of this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecule_barcode_fraction_bases_above_30_variance) | +| `genomic_reads_fraction_bases_quality_above_30_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_mean) | +| `genomic_reads_fraction_bases_quality_above_30_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The variance in the fraction of bases in the genomic read that receive quality scores greater than 30 across the reads of this entity (included for 10x Cell Ranger count comparison). [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_reads_fraction_bases_quality_above_30_variance) | +| `genomic_read_quality_mean` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Average quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_mean) | +| `genomic_read_quality_variance` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Variance in quality of Illumina base calls in the genomic reads corresponding to this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.genomic_read_quality_variance) | +| `n_molecules` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of molecules corresponding to this entity. See README.md for the definition of a Molecule. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_molecules) | +| `n_fragments` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | Number of fragments corresponding to this entity. See README.md for the definition of a Fragment. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.n_fragments) | +| `reads_per_molecule` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average number of reads associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_molecule) | +| `reads_per_fragment` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average number of reads associated with each fragment in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.reads_per_fragment) | +| `fragments_per_molecule` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The average number of fragments associated with each molecule in this entity. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_per_molecule) | +| `fragments_with_single_read_evidence` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of fragments associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.fragments_with_single_read_evidence) | +| `molecules_with_single_read_evidence` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of molecules associated with this entity that are observed by only one read. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.CellMetrics.molecules_with_single_read_evidence) | +| `number_cells_detected_multiple` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of cells which observe more than one read of this gene. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.GeneMetrics.number_cells_detected_multiple) | +| `number_cells_expressing` | [SC Tools](https://github.com/HumanCellAtlas/sctools/tree/master/src/sctools/metrics) | The number of cells that detect this gene. [Metrics Definitions](https://sctools.readthedocs.io/en/latest/sctools.metrics.html#sctools.metrics.aggregator.GeneMetrics.number_cells_expressing) | diff --git a/content/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow.md b/content/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow.md index 32a2bb0f8..98ce3d5ab 100644 --- a/content/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow.md +++ b/content/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow.md @@ -1,8 +1,8 @@ --- -path: "/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow" date: "2018-05-03" -title: "Smart-seq2" description: "Overview of Smart-seq2 scRNA sequencing in the HCA DCP." +path: "/pipelines/hca-pipelines/data-processing-pipelines/smart-seq2-workflow" +title: "Smart-seq2" --- # Description of Smart-seq2 scRNA Sequencing @@ -15,7 +15,6 @@ Currently the most technically and economically feasible methodology for single- The SMART acronym, which stands for Switching Mechanism At the end of the 5’-end of the RNA Transcript, describes a key property of the reverse transcriptase enzyme from Maloney Murine Leukemia Virus (MMLV). During reverse transcription this enzyme adds a few nucleotides, generally 2-5 cytosines, when it reaches the 5’ end of the RNA template. These extra nucleotides on the newly-synthesized cDNA act as a docking site for a complementary oligonucleotide (termed a TSO or Template Switching Oligonucleotide) that carries 3 riboguanosines at its 3’ end. The reverse transcriptase is then able to switch templates and synthesize the complementary cDNA strand using the TSO as a primer. Overall, optimization of this technique has improved both the yield and the length of transcripts from single-cell cDNA libraries. These features, coupled with reasonable cost, have made Smart-seq2 a widely used method for single-cell RNA sequencing. - ## Overview of the Pipeline The Smart-seq2 pipeline processes data generated from plate- or fluidigm-based Smart-seq2 scRNA sequencing protocols. The pipeline is comprised of two modules: a quality control module, which generates post-alignment quality control metrics, and a transcriptome quantification module, which aligns reads to the transcriptome and estimates transcript expression levels. @@ -25,42 +24,43 @@ The Smart-seq2 pipeline processes data generated from plate- or fluidigm-based S ## Quick Start Table | Pipeline Features | Description | Source | -|-------------------|---------------------------------------------------------------|-----------------------| -|Assay Type |Paired- or single-end, plate- or fluidigm-based Smart-seq2 |Validation reports for human and [mouse](https://docs.google.com/document/d/12zGTFROrcXEByt9z0h06qjSqb9vWutn28Tx6YiND1Ds/edit) [single-end](https://docs.google.com/document/d/1MonsTG8UnROHZ_XpulrSZNTxO988KEH6T6h45plFYQg/edit#heading=h.ixoqmhbabdvh) and [fluidigm](https://docs.google.com/document/d/1FEg86Tlu657j9Kjw_v3keFQRXcBIs8gOqCwLbPSP-C0/edit#heading=h.sun21prlcwz3) datasets | -| Overall workflow |Quality control module and transcriptome quantification module | [Code available from GitHub](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_single_sample/SmartSeq2SingleSample.wdl) | -| Workflow language |WDL |[openWDL](https://github.com/openwdl/wdl)| -| Genomic reference sequence|GRCh38 human genome primary sequence|[GENCODE](https://www.gencodegenes.org/human/release_27.html)| -|Gene Model |GENCODE v27 PRI GTF and FASTA files |[GENCODE](https://www.gencodegenes.org/human/release_27.html)| -| Aligner |HISAT2 |[Kim, et al.,2015](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4655817/); [HISAT2 tool](https://ccb.jhu.edu/software/hisat2/manual.shtml)| -|QC |Metrics determined using Picard command line tools |[Picard Tools](https://broadinstitute.github.io/picard/) | -| Estimation of gene expression |RSEM ([rsem-calculate-expression](http://deweylab.biostat.wisc.edu/rsem/rsem-calculate-expression.html)) is used to estimate the gene expression profile. The input of RSEM is a BAM file aligned by HISAT2. | [Li and Dewey, 2011](https://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-12-323)| -|Data Input File Format |File format in which sequencing data is provided |[FASTQ](https://academic.oup.com/nar/article/38/6/1767/3112533) | -| Data Output File Format | File formats in which Smart-seq2 pipeline output is provided |[BAM](http://samtools.github.io/hts-specs/), [Loom version 3](http://loompy.org/)| +| --- | --- | --- | +| Assay Type | Paired- or single-end, plate- or fluidigm-based Smart-seq2 | Validation reports for human and [mouse](https://docs.google.com/document/d/12zGTFROrcXEByt9z0h06qjSqb9vWutn28Tx6YiND1Ds/edit) [single-end](https://docs.google.com/document/d/1MonsTG8UnROHZ_XpulrSZNTxO988KEH6T6h45plFYQg/edit#heading=h.ixoqmhbabdvh) and [fluidigm](https://docs.google.com/document/d/1FEg86Tlu657j9Kjw_v3keFQRXcBIs8gOqCwLbPSP-C0/edit#heading=h.sun21prlcwz3) datasets | +| Overall workflow | Quality control module and transcriptome quantification module | [Code available from GitHub](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_single_sample/SmartSeq2SingleSample.wdl) | +| Workflow language | WDL | [openWDL](https://github.com/openwdl/wdl) | +| Genomic reference sequence | GRCh38 human genome primary sequence | [GENCODE](https://www.gencodegenes.org/human/release_27.html) | +| Gene Model | GENCODE v27 PRI GTF and FASTA files | [GENCODE](https://www.gencodegenes.org/human/release_27.html) | +| Aligner | HISAT2 | [Kim, et al.,2015](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4655817/); [HISAT2 tool](https://ccb.jhu.edu/software/hisat2/manual.shtml) | +| QC | Metrics determined using Picard command line tools | [Picard Tools](https://broadinstitute.github.io/picard/) | +| Estimation of gene expression | RSEM ([rsem-calculate-expression](http://deweylab.biostat.wisc.edu/rsem/rsem-calculate-expression.html)) is used to estimate the gene expression profile. The input of RSEM is a BAM file aligned by HISAT2. | [Li and Dewey, 2011](https://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-12-323) | +| Data Input File Format | File format in which sequencing data is provided | [FASTQ](https://academic.oup.com/nar/article/38/6/1767/3112533) | +| Data Output File Format | File formats in which Smart-seq2 pipeline output is provided | [BAM](http://samtools.github.io/hts-specs/), [Loom version 3](http://loompy.org/) | ## Pipeline Details -Choice of appropriate reference sequences and annotations are critical for optimizing the interpretation of reads as transcriptomic features. Currently this pipeline uses the genomic reference sequence GRCh38 and the transcriptomic reference GenCode Comprehensive Gene Annotation v27. Alignment is performed using HISAT2, a fast-paced, cost-efficient tool; gene expression is quantified using the RSEM algorithm. The overall schematic is shown below. +Choice of appropriate reference sequences and annotations are critical for optimizing the interpretation of reads as transcriptomic features. Currently this pipeline uses the genomic reference sequence GRCh38 and the transcriptomic reference GenCode Comprehensive Gene Annotation v27. Alignment is performed using HISAT2, a fast-paced, cost-efficient tool; gene expression is quantified using the RSEM algorithm. The overall schematic is shown below. ![Smart seq2](_images/smart-seq2-diagram.png) ### Quality Control Module -To assess the quality of the input data, this module uses a pre-constructed index of species reference information: GRCh38, GENCODE Annotation v27, and dbSNP150 ([see code for more details](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_single_sample/SmartSeq2SingleSample.wdl). HISAT2 is used to perform a graph-based alignment of sample data to the reference genome to determine the presence of non-transcript sequences and true transcript sequences, taking into account the presence of single-nucleotide polymorphisms (based on dbSNP150). The output is a BAM file. Quality control measurements are then calculated using [Picard tools](http://broadinstitute.github.io/picard/), command line tools used for working with high-throughput sequencing data. This pipeline uses a number of these tools, but the main modules are listed below. Follow the link for a detailed explanation of each tool; a more detailed table of our QC metrics is in the [QC Metrics](/pipelines/qc-metrics) guide. +To assess the quality of the input data, this module uses a pre-constructed index of species reference information: GRCh38, GENCODE Annotation v27, and dbSNP150 ([see code for more details](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_single_sample/SmartSeq2SingleSample.wdl)). HISAT2 is used to perform a graph-based alignment of sample data to the reference genome to determine the presence of non-transcript sequences and true transcript sequences, taking into account the presence of single-nucleotide polymorphisms (based on dbSNP150). The output is a BAM file. Quality control measurements are then calculated using [Picard tools](http://broadinstitute.github.io/picard/), command line tools used for working with high-throughput sequencing data. This pipeline uses a number of these tools, but the main modules are listed below. Follow the link for a detailed explanation of each tool; a more detailed table of our QC metrics is in the [QC Metrics](/pipelines/qc-metrics) guide. + * [CollectAlignmentSummaryMetrics](http://broadinstitute.github.io/picard/command-line-overview.html#CollectAlignmentSummaryMetrics) - The quality of the read alignments and the proportion of reads that passed signal-to-noise threshold filters. * [CollectRnaSeqMetrics](http://broadinstitute.github.io/picard/command-line-overview.html#CollectRnaSeqMetrics) - Distribution of bases within the transcripts, as well as the median depth, ratio of 5 prime/3 prime biases, and the numbers of reads with correct or incorrect strand designation. * [MarkDuplicates](http://broadinstitute.github.io/picard/command-line-overview.html#MarkDuplicatesWithMateCigar) - Duplicate reads that originate from the same fragment of DNA are identified and tagged. * [InsertSizeMetrics](https://broadinstitute.github.io/picard/picard-metric-definitions.html#InsertSizeMetrics) - Metrics about the insert size distribution of a paired-end library. - ### Transcriptome Quantification Module -This second module uses RSEM (RNA-Seq by Expectation Maximization) to quantify the transcript abundance. RSEM uses a statistical model that accounts for the uncertainties of read mapping, as RNA-Seq reads do not always map uniquely to a single gene. Using a pre-constructed transcriptome index created from GRCh38 and GENCODE Annotation v27, HISAT2 aligns the test data with the reference transcriptome and a BAM file of aligned data is generated. The RSEM program rsem-calculate-expression is then used to estimate gene/isoform expression levels, resulting in an output file including expected_counts, TPM (Transcripts Per Million), or FPKM (Fragments Per Kilobase of transcript per Million mapped reads). +This second module uses RSEM (RNA-Seq by Expectation Maximization) to quantify the transcript abundance. RSEM uses a statistical model that accounts for the uncertainties of read mapping, as RNA-Seq reads do not always map uniquely to a single gene. Using a pre-constructed transcriptome index created from GRCh38 and GENCODE Annotation v27, HISAT2 aligns the test data with the reference transcriptome and a BAM file of aligned data is generated. The RSEM program rsem-calculate-expression is then used to estimate gene/isoform expression levels, resulting in an output file including expected_counts, TPM (Transcripts Per Million), or FPKM (Fragments Per Kilobase of transcript per Million mapped reads). -## Try the Smart-seq2 Pipleine in Terra +## Try the Smart-seq2 Pipeline in Terra The Smart-seq2 pipeline is currently available on the cloud-based platform [Terra](https://app.terra.bio). If you have a Terra account, you can access the Featured Workspace using this address: https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline. The workspace is preloaded with instructions and sample data. For more information on using the Terra platform, please view the [Support Center](https://support.terra.bio/hc/en-uso). ## Versioning + All Smart-seq2 Multi Sample workflow versions are detailed in the [Smart-seq2 Multi Sample Changelog](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_multisample/MultiSampleSmartSeq2.changelog.md) in GitHub. > This documentation applies to Smart-seq2 Multi Sample v2.1.4 and later. If you are working with data processed with a previous version, please check the [Smart-seq2 Multi Sample Changelog](https://github.com/broadinstitute/warp/blob/master/pipelines/skylab/smartseq2_multisample/MultiSampleSmartSeq2.changelog.md) for any data processing changes that may be applicable to your data. diff --git a/content/privacy/privacy/lungmap-privacy.md b/content/privacy/privacy/lungmap-privacy.md index 574626d53..d7227cab1 100644 --- a/content/privacy/privacy/lungmap-privacy.md +++ b/content/privacy/privacy/lungmap-privacy.md @@ -5,7 +5,7 @@ path: "/privacy/privacy/lungmap-privacy" title: "Privacy" --- -#Privacy Notice for LungMAP Data Browser Public Website +# Privacy Notice for LungMAP Data Browser Public Website This Privacy Notice explains what personal data is collected by the specific service you are requesting, for what purposes, how it is processed, and how we keep it secure. Note that this service collects personal data directly provided by the user, and also collects personal data from users that are provided by other organizations. @@ -13,18 +13,18 @@ This service is operated by the University of California, Santa Cruz in collabor The privacy statement of the University of California, Santa Cruz is listed below. Please see https://app.terra.bio/#privacy for the Broad Institute’s privacy statement. - - ## The University of California Statement of Privacy Practices + General Data Protection Regulation LungMAP - Data Browser ### 1. Transparency Regarding the Use of Your Personal Data -As part of our commitment to protecting your privacy, this statement is designed to provide you with information regarding how the LungMAP Data Browser, part of UCSC Genomics Institute, collects and processes the information you share when you use our website located at , and each of its associated domains (together, the "Sites"), utilize the services of ours which include API, GUI, and CLI, or when you otherwise communicate with LungMAP Data Browser (“LungMAP Data Browser] Services”). +As part of our commitment to protecting your privacy, this statement is designed to provide you with information regarding how the LungMAP Data Browser, part of UCSC Genomics Institute, collects and processes the information you share when you use our website located at , and each of its associated domains (together, the "Sites"), utilize the services of ours which include API, GUI, and CLI, or when you otherwise communicate with LungMAP Data Browser (“LungMAP Data Browser Services”). This statement is applicable to individuals using LungMAP Data Browser Services who are located in the European Economic Area (“EEA”). ### 2. General Data Protection Regulation (“GDPR”) + For purposes of the General Data Protection Regulation (“GDPR”), the data controller is the Regents of the University of California, with a location at 1156 High Street, Santa Cruz, CA 95064. ### 3. Your Personal Data We Use @@ -137,12 +137,10 @@ UC may be obligated to retain your Personal Data as required by U.S. federal or If you wish to exercise your rights, you can contact the UC Privacy Official identified below. -You may choose not to visit or use UC Sites or participate in LungMAP Data Browser Services. If you choose not to share your Personal Data with UC or UC-approved third parties for LungMAP Data Browser Services your site usage will not be tracked and you will not be able to log in to view controlled-access data. You will still be able to view and access open-access data. You may choose to set your web browser to refuse cookies or to alert you when cookies are being sent. If cookies are turned off the portal and browser will continue to function however Google Analytics tracking will not function. +You may choose not to visit or use UC Sites or participate in LungMAP Data Browser Services. If you choose not to share your Personal Data with UC or UC-approved third parties for LungMAP Data Browser Services your site usage will not be tracked and you will not be able to log in to view controlled-access data. You will still be able to view and access open-access data. You may choose to set your web browser to refuse cookies or to alert you when cookies are being sent. If cookies are turned off the portal and browser will continue to function however Google Analytics tracking will not function. ### 11. Questions and Complaints; UC Privacy Official If you have questions or complaints about our treatment of your Personal Data, or about our privacy practices more generally, please feel free to contact the UCSC Privacy Official: or . -**Effective Date**: This statement is effective as of 07/01/2021. - - +**Effective Date**: This statement is effective as of 07/01/2021. diff --git a/content/privacy/privacy/privacy.md b/content/privacy/privacy/privacy.md index 27e5711fd..d9e9a7593 100644 --- a/content/privacy/privacy/privacy.md +++ b/content/privacy/privacy/privacy.md @@ -5,7 +5,7 @@ path: "/privacy/privacy/privacy" title: "Privacy" --- -#Privacy Notice for Human Cell Atlas Data Portal Public Website +# Privacy Notice for Human Cell Atlas Data Portal Public Website This Privacy Notice explains what personal data is collected by the specific service you are requesting, for what purposes, how it is processed, and how we keep it secure. Note that this service collects personal data directly provided by the user, and also collects personal data from users that is provided by other organizations. @@ -13,18 +13,18 @@ This service is operated by the University of California, Santa Cruz in collabor The privacy statement of the University of California, Santa Cruz is listed below. Please see https://app.terra.bio/#privacy for the Broad Institute’s privacy statement. - - ## The University of California Statement of Privacy Practices - General Data Protection Regulation Human Cell Atlas (HCA) - Data Portal + +General Data Protection Regulation Human Cell Atlas (HCA) - Data Portal ### 1. Transparency Regarding the Use of Your Personal Data -As part of our commitment to protecting your privacy, this statement is designed to provide you with information regarding how the HCA Data Portal, part of UCSC Genomics Institute, collects and processes the information you share when you use our website located at , and each of its associated domains (together, the "Sites"), utilize the services of ours which include API, GUI and CLI, or when you otherwise communicate with HCA Data Portal (“HCA Data Portal] Services”). - - This statement is applicable to individuals using HCA Data Portal Services who are located in the European Economic Area (“EEA”). +As part of our commitment to protecting your privacy, this statement is designed to provide you with information regarding how the HCA Data Portal, part of UCSC Genomics Institute, collects and processes the information you share when you use our website located at , and each of its associated domains (together, the "Sites"), utilize the services of ours which include API, GUI and CLI, or when you otherwise communicate with HCA Data Portal (“HCA Data Portal Services”). + +This statement is applicable to individuals using HCA Data Portal Services who are located in the European Economic Area (“EEA”). ### 2. General Data Protection Regulation (“GDPR”) + For purposes of the General Data Protection Regulation (“GDPR”), the data controller is the Regents of the University of California, with a location at 1156 High Street, Santa Cruz, CA 95064. ### 3. Your Personal Data We Use @@ -58,10 +58,10 @@ If users login to the service we also collect: We also collect more sensitive information about you, with your explicit consent, where the processing is necessary to meet a legal or regulatory obligation, the processing is in connection with UC establishing, exercising or defending legal claims, or is otherwise expressly permitted by GDPR. This sensitive information includes Aggregated transcriptomic and metadata, as well as individual-level transcriptomic and metadata [donor age, biological sex, disease, and sampled organ]. **Log, Cookie and Device Data**: We also collect log data, which is information collected whenever you visit a website. This log data includes your Internet Protocol address, device type, operating system, browser type and some settings, unique device identifiers, crash data, the date and time of your request, and information about how you used the Service. - - Depending on how you are accessing the Services, we may also use “cookies” (small text files stored by your computer when you visit our website) or similar technologies. We use Google Analytics. Google Analytics uses cookies to help track the users visit to the site. In addition to log and cookie data, we also collect information about the device you’re using to access the Services, including what type of device it is, what operating system you are using, device settings, unique device identifiers and crash data. - - Whether we collect some or all of this information often depends on what type of device you are using and its settings. For example, different types of information are available depending on whether you are using a Mac or a PC, or an iPhone or Android phone. To learn more about what information your device makes available to us, please also check the policies of your device manufacturer or software provider. + +Depending on how you are accessing the Services, we may also use “cookies” (small text files stored by your computer when you visit our website) or similar technologies. We use Google Analytics. Google Analytics uses cookies to help track the users visit to the site. In addition to log and cookie data, we also collect information about the device you’re using to access the Services, including what type of device it is, what operating system you are using, device settings, unique device identifiers and crash data. + +Whether we collect some or all of this information often depends on what type of device you are using and its settings. For example, different types of information are available depending on whether you are using a Mac or a PC, or an iPhone or Android phone. To learn more about what information your device makes available to us, please also check the policies of your device manufacturer or software provider. **Information from Other Sources**: We do not obtain information about you from other sources and we do not combine that information with information we collect from you directly. @@ -114,8 +114,8 @@ Automated decisions are defined as decisions about individuals that are based so HCA Data Portal does not make use of automated decisions or utilizes profiling for any purpose. In certain instances, UC may be required to obtain your consent to make automated decisions or profile. In these instances, UC will inform you of the automated decision-making or profiling, and will request that you affirmatively indicate that you consent to the intended use of your Personal Data for that purpose, prior to the automated decision-making or profiling. - - Where automated decisions are made or profiling is used, affected persons will be given an opportunity to express their views on the automated decision in question and instructions on how such persons can object to, or opt-out of such processing. + +Where automated decisions are made or profiling is used, affected persons will be given an opportunity to express their views on the automated decision in question and instructions on how such persons can object to, or opt-out of such processing. We do not allow others to serve advertisements on our behalf across the Internet and to provide analytics services. @@ -137,11 +137,10 @@ UC may be obligated to retain your Personal Data as required by U.S. federal or If you wish to exercise your rights, you can contact the UC Privacy Official identified below. -You may choose not to visit or use UC Sites or participate in HCA Data Portal Services. If you choose not to share your Personal Data with UC or UC-approved third parties for HCA Data Portal Services your site usage will not be tracked and you will not be able to login to view controlled-access data. You will still be able to view and access open-access data.. You may choose to set your web browser to refuse cookies or to alert you when cookies are being sent. If cookies are turned off the portal and browser will continue to function however Google Analytics tracking will not function. +You may choose not to visit or use UC Sites or participate in HCA Data Portal Services. If you choose not to share your Personal Data with UC or UC-approved third parties for HCA Data Portal Services your site usage will not be tracked and you will not be able to login to view controlled-access data. You will still be able to view and access open-access data. You may choose to set your web browser to refuse cookies or to alert you when cookies are being sent. If cookies are turned off the portal and browser will continue to function however Google Analytics tracking will not function. ### 11. Questions and Complaints; UC Privacy Official If you have questions or complaints about our treatment of your Personal Data, or about our privacy practices more generally, please feel free to contact the UCSC Privacy Official: or . -**Effective Date**: This statement is effective as of 09/14/2020. - +**Effective Date**: This statement is effective as of 09/14/2020. diff --git a/content/releases/documentation/2020-mar/acknowledgements.md b/content/releases/documentation/2020-mar/acknowledgements.md index bac80b62c..11818a01d 100644 --- a/content/releases/documentation/2020-mar/acknowledgements.md +++ b/content/releases/documentation/2020-mar/acknowledgements.md @@ -1,9 +1,9 @@ --- -path: "/releases/documentation/2020-mar/acknowledgements" date: "2020-03-31" -title: "Acknowledgements" description: "This release represents the work of many people, the dataset contributors who provided both the raw data and the cell type annotation, the release working group who assembled the release data, ran the clustering pipelines and produced the DCP interfaces and the DCP implementation team who built the DCP." draft: true +path: "/releases/documentation/2020-mar/acknowledgements" +title: "Acknowledgements" --- # Acknowledgements @@ -11,7 +11,9 @@ draft: true This release represents the work of many people, the dataset contributors who provided both the raw data and the cell type annotation, the release working group who assembled the release data, ran the clustering pipelines and produced the DCP interfaces and the DCP implementation team who built the DCP. ## Dataset Contributors + ### A single-cell transcriptome atlas of the adult human retina + Lyujie Fang, Centre for Eye Research Australia (Experimental Scientist)\ Mark C Gillies, University of Sydney (Principal Investigator)\ Ulrike Grünert, University of Sydney (Experimental Scientist)\ @@ -37,8 +39,8 @@ Raymond CB Wong, Centre for Eye Research Australia (Principal Investigator)\ Ting Zhang, University of Sydney (Experimental Scientist)\ Ling Zhu, University of Sydney (Experimental Scientist) - ### Dissecting the human liver cellular landscape by single cell RNA-seq reveals novel intrahepatic monocyte/ macrophage populations + Oyedele Adeyi, University Health Network (Pathologist)\ Gary D Bader, University of Toronto (Principal Investigator)\ Agata M Bartczak, University Health Network (Experimental Scientist)\ @@ -74,8 +76,8 @@ William G Sullivan, University of California Santa Cruz (Data Curator)\ Michael D Wilson, Hospital for Sick Children (Co-investigator)\ Neil Winegarden, University Health Network (Co-investigator) - ### A single-cell reference map of transcriptional states for human blood and tissue T cell activation + Erin C Bush, Columbia University Irving Medical Center\ Yim Ling Cheng, Columbia University Irving Medical Center\ Pranay Dogra, Columbia University Irving Medical Center\ @@ -90,26 +92,26 @@ Puspa Thapa, Columbia University Irving Medical Center\ Chris Villarreal, University of California, Santa Cruz (Data Curator)\ Jinzhou Yuan, Columbia University Irving Medical Center - ### Spatio-temporal immune zonation of the human kidney + Sam Behjati, Wellcome Sanger Institute (Principal Investigator)\ -Menna Clatworthy, Department of Medicine, University of Cambridge (Principal Investigator)\ -John Ferdinand, Department of Medicine, University of Cambridge (Computational Scientist)\ +Menna Clatworthy, Department of Medicine, University of Cambridge (Principal Investigator)\ +John Ferdinand, Department of Medicine, University of Cambridge (Computational Scientist)\ Muzlifah Haniffa, University of Newcastle (Principal Investigator)\ Tom Mitchell, Wellcome Sanger Institute (Clinician)\ Enrique Sapena Ventura, European Bioinformatics Institute (Data Curator)\ -Benjamin Stewart, Department of Medicine, University of Cambridge (Computational Scientist)\ +Benjamin Stewart, Department of Medicine, University of Cambridge (Computational Scientist)\ Sarah Teichmann, Wellcome Sanger Institute (Principal Investigator)\ Matthew Young, Wellcome Sanger Institute (Computational Scientist) - ### Single cell transcriptome analysis of human pancreas reveals transcriptional signatures of aging and somatic mutation patterns. + Martin Enge, Stanford University\ Matthew Green, EMBL-EBI European Bioinformatics Institute (Data Curator)\ Laura Huerta, EMBL-EBI (Data Curator) - ### Structural Remodeling of the Human Colonic Mesenchyme in Inflammatory Bowel Disease + David Ahern, University of Oxford\ Agne Antanaviciute, University of Oxford\ Neil Ashley, University of Oxford\ @@ -132,15 +134,15 @@ Eshita Sharma, University of Oxford\ Alison Simmons, University of Oxford\ Quin Wills, Novo Nordisk Research Centre Oxford - ### Profiling of CD34+ cells from human bone marrow to understand hematopoiesis + Vaidotas Kiseliovas, Sloan Kettering Institute\ Parisa Nejad, University of California, Santa Cruz (Data Curator)\ Dana Pe'er, Sloan Kettering Institute (Principal Investigator)\ Manu Setty, Sloan Kettering Institute +### Single-cell RNA-seq analysis throughout a 125-day differentiation protocol that converted H1 human embryonic stem cells to a variety of ventrally-derived cell types. -### Single-cell RNA-seq analysis throughout a 125-day differentiation protocol that converted H1 human embryonic stem cells to a variety of ventrally-derived cell types. Trygve Bakken, The Allen Institute for Brain Science\ Susan Bort, The Allen Institute for Brain Science\ Jennie Close, The Allen Institute for Brain Science\ @@ -168,8 +170,8 @@ Jonathan Ting, The Allen Institute for Brain Science\ Abigail Wall, The Allen Institute for Brain Science\ Zizhen Yao, The Allen Institute for Brain Science - ### Census of Immune Cells + Orr Ashenberg, Broad Institute\ Danielle Dionne, Broad Institute\ Mallory Ann Freeberg, EMBL-EBI (Data Curator)\ @@ -185,8 +187,8 @@ Timothy Tickle, Broad Institute\ Julia Waldman, Broad Institute\ Danielle Welter, EMBL-EBI (Data Curator) - ### Ischaemic sensitivity of human tissue by single cell RNA seq + Tracey Andrew, Wellcome Sanger Institute (Administrator)\ Liam Bolt, Wellcome Sanger Institute (Experimental Scientist)\ John R Ferdinand, University of Cambridge (Experimental Scientist)\ @@ -213,8 +215,8 @@ Anthi Tsingene, Wellcome Sanger Institute (Experimental Scientist)\ Danielle Welter, EMBL-EBI (Data Curator)\ Anna Wilbrey-Clark, Wellcome Sanger Institute (Experimental Scientist) - ### Reconstructing the human first trimester fetal-maternal interface using single cell transcriptomics + Zinaida A Perova, EMBL-EBI (Data Curator)\ Mirjana Efremova, Wellcome Trust Sanger Institute\ Mallory Ann Freeberg, EMBL-EBI (Data Curator)\ @@ -225,25 +227,25 @@ Ashley Moffett, University of Cambridge (Co-investigator)\ Sarah A Teichmann, Wellcome Trust Sanger Institute (Principal Investigator)\ Roser Vento-Tormo, Wellcome Trust Sanger Institute - ### Single cell profiling of human induced dendritic cells generated by direct reprogramming of embryonic fibroblasts + Ilia Kurochkin, Skolkovo Institute of Science and Technology (Computational Scientist)\ Filipe Pereira, Lund University (Principal Investigator)\ Cristiana Pires, Lund University (Experimental Scientist)\ Fábio Rosa, Lund University (Experimental Scientist)\ Enrique Sapena Ventura, European Bioinformatics Institute (Data Curator) - - ## DCP Release Working Group + Nick Barkas2, Laura Clarke1, Kylee Degatano2, Trevor Heathorn3, Elizabeth Kiernan2, Bo Li4, John Marioni1, Nikelle Petrillo1, Zinaida Perova1, Dave Rogers3, Hannes Schmidt3, Marion Shadbol1, Kathleen Tibbetts2, Timothy Tickle2, Yiming Yang4, Galabina Yordanova1, Jing Zhu3 ### Single Cell Portal Team + Eno-Abasi Augustine-Akpan2, Jonathan Bistline2, Devon Bush2, Jean Chang2, Vicky Li Horst2, Christine Loreth2, Jared Nedzel2, Eric Weitz2 ### DCP Implementation team -Alegria Aclan1, Rhian Anthony2, Galt Barber3, Michael Baumann2, David Bernick2, Lon Blauvelt3, Jesse Brennan3, Tony Burdett1, Ambrose Carr6, Jonathan Casper3, Abraham Chavez3, T.J. Chen6, J. Michael Cherry5, Jenny Chien6, Justin Clark-Casey2, Jonah Cool6, Ami Day1, Mark Diekhans3, Albano Drazhi2, Madison Dunitz6, Saman Ehsan2, Oihane Fano Bilbao1, Rolando Fernandez1, Henry Ferrara2, Javier Ferrer Gomez1, Clay Fischer3, Mallory Freeberg1, Jeremy Freeman6, Deep Ganguli6, Rodrey Mark Goite1, Maximillian Haeussler3, Genevieve Halliburton6, Brian Hannafious3, Miriam Hastie3, Jason Hilton5, Jodi Hirschman2, Amar Jandu3, Simon Jupp1, Janki Kaneria2, Jim Kent3, Marcus Kinsella6, Andrey Kislyuk6, Kishori Konwar2, Lila Lamar2, Arathi Mani6, Bruce Martin6, Cara Mason2, Fran McDade3, Norman Morrison1, Marcio Von Muhlen6, Paris Nejad3, Calvin Nhieu6, Brian O'Connor2, Kevin Osborn3, David Osumi-Sutherland1, Helen Parkinson1, Benedict Paten1, Geryl Pelayo3, Anthony Philippakis2, Sam Pierson6, Brian Raymor6, Charles Reid3, Jason Rose2, Enrique Sapena Ventura1, Mark Sarcevicz2, Samantha Scovanner6, Gavin Schuette2, Parth Shah6, Phil Shapiro2, David Shiga2, Trent Smith6, Daniel Sotirhos2, Matthew Speir2, Alexie Staffer1, Ray Stefancsik1, J. Seth Strattan5, William Sullivan2, Adrienne Sussman6, Sarah Tahiri2, Prabhat Totoo1, Fabien Traquet2, Tony Tung6, Chris Villarreal3, Chengchen (Rex) Wang2, Matthew Weiden6, Dani Welter1, Jishu Xu2, Jennifer Zamanian5 +Alegria Aclan1, Rhian Anthony2, Galt Barber3, Michael Baumann2, David Bernick2, Lon Blauvelt3, Jesse Brennan3, Tony Burdett1, Ambrose Carr6, Jonathan Casper3, Abraham Chavez3, T.J. Chen6, J. Michael Cherry5, Jenny Chien6, Justin Clark-Casey2, Jonah Cool6, Ami Day1, Mark Diekhans3, Albano Drazhi2, Madison Dunitz6, Saman Ehsan2, Oihane Fano Bilbao1, Rolando Fernandez1, Henry Ferrara2, Javier Ferrer Gomez1, Clay Fischer3, Mallory Freeberg1, Jeremy Freeman6, Deep Ganguli6, Rodrey Mark Goite1, Maximillian Haeussler3, Genevieve Halliburton6, Brian Hannafious3, Miriam Hastie3, Jason Hilton5, Jodi Hirschman2, Amar Jandu3, Simon Jupp1, Janki Kaneria2, Jim Kent3, Marcus Kinsella6, Andrey Kislyuk6, Kishori Konwar2, Lila Lamar2, Arathi Mani6, Bruce Martin6, Cara Mason2, Fran McDade3, Norman Morrison1, Marcio Von Muhlen6, Paris Nejad3, Calvin Nhieu6, Brian O'Connor2, Kevin Osborn3, David Osumi-Sutherland1, Helen Parkinson1, Benedict Paten1, Geryl Pelayo3, Anthony Philippakis2, Sam Pierson6, Brian Raymor6, Charles Reid3, Jason Rose2, Enrique Sapena Ventura1, Mark Sarcevicz2, Samantha Scovanner6, Gavin Schuette2, Parth Shah6, Phil Shapiro2, David Shiga2, Trent Smith6, Daniel Sotirhos2, Matthew Speir2, Alexie Staffer1, Ray Stefancsik1, J. Seth Strattan5, William Sullivan2, Adrienne Sussman6, Sarah Tahiri2, Prabhat Totoo1, Fabien Traquet2, Tony Tung6, Chris Villarreal3, Chengchen (Rex) Wang2, Matthew Weiden6, Dani Welter1, Jishu Xu2, Jennifer Zamanian5 ### Affiliations 1 European Bioinformatics Institute diff --git a/content/releases/documentation/2020-mar/feedback.md b/content/releases/documentation/2020-mar/feedback.md index ee85cd4ea..adcc90060 100644 --- a/content/releases/documentation/2020-mar/feedback.md +++ b/content/releases/documentation/2020-mar/feedback.md @@ -1,9 +1,9 @@ --- -path: "/releases/documentation/2020-mar/feedback" date: "2020-03-31" -title: "Feedback" description: "The Human Cell Atlas Data Coordination Platform team needs your feedback to help us improve and refine the DCP data release processes and content." draft: true +path: "/releases/documentation/2020-mar/feedback" +title: "Feedback" --- # Release Feedback @@ -11,4 +11,3 @@ draft: true The Human Cell Atlas Data Coordination Platform team needs your feedback to help us improve and refine the DCP data release processes and content. Please email us at [data-help@humancellatlas.org](mailto:data-help@humancellatlas.org) to ask questions or report issues. - diff --git a/content/releases/documentation/2020-mar/methods.md b/content/releases/documentation/2020-mar/methods.md index cbdb52f43..5baff5d32 100644 --- a/content/releases/documentation/2020-mar/methods.md +++ b/content/releases/documentation/2020-mar/methods.md @@ -1,18 +1,19 @@ --- -path: "/releases/documentation/2020-mar/methods" date: "2020-03-31" -title: "Methods" description: "This document details the Human Cell Atlas (HCA) Data Coordination Platform’s (DCP) methods for cell clustering, differential expression analyses, and data visualization used in the HCA March 2020 Data Release." draft: true +path: "/releases/documentation/2020-mar/methods" +title: "Methods" --- # March 2020 Release Methods -## Overview -This document details the Human Cell Atlas (HCA) Data Coordination Platform’s (DCP) methods for cell clustering, differential expression analyses, and data visualization used in the HCA March 2020 Data Release. Overall, 12 individual DCP projects were stratified into 23 datasets by organ, developmental stage and sample processing technology. Gene matrices for each dataset were uploaded into the cloud-based platform [Terra](https://app.terra.bio/) and analyzed using Cumulus (v0.13.0), a single-cell analysis workflow ([Li et al. 2019](https://www.biorxiv.org/content/10.1101/823682v1)). All Release files are available for download on the main March 2020 Release page. +## Overview +This document details the Human Cell Atlas (HCA) Data Coordination Platform’s (DCP) methods for cell clustering, differential expression analyses, and data visualization used in the HCA March 2020 Data Release. Overall, 12 individual DCP projects were stratified into 23 datasets by organ, developmental stage and sample processing technology. Gene matrices for each dataset were uploaded into the cloud-based platform [Terra](https://app.terra.bio/) and analyzed using Cumulus (v0.13.0), a single-cell analysis workflow ([Li et al. 2019](https://www.biorxiv.org/content/10.1101/823682v1)). All Release files are available for download on the main March 2020 Release page. ## Project stratification and gene matrix preparation + The March 2020 Release includes all human DCP projects that were processed with DCP standardized pipelines (Optimus or Smart-seq2). Each project was stratified into individual datasets by organ and when applicable, by developmental stage (adult or fetal) or by sample processing technology (10x or Smart-seq2). For each Release dataset, gene matrices in loom format were obtained by filtering projects on the DCP Data Browser using the stratification criteria above. | What is in the gene matrix? | @@ -20,88 +21,91 @@ The March 2020 Release includes all human DCP projects that were processed with | The content of the gene matrix depends on the sequencing technology of the dataset. The gene matrix for datasets processed with the 10x technology contain gene counts. The matrix for datasets processed with Smart-seq2 technology contain RSEM TPMs. All matrices also include important metadata, such as sample processing and organ information. | #### Gene matrix corrections -The metadata in each gene matrix file was modified to include new ontology labels and corrections to existing ontology labels. Additionally, due to a processing error, all EmptyDrops output was removed from files produced with the Optimus pipeline. This has been corrected and EmptyDrops will be available in future releases. All updated gene matrix files (loom format) used for the March 2020 Release are available for download under the Release Files column of the March 2020 Release -page. -**Please note that the March 2020 Release datasets were not corrected for batch effects.** Discrepancies may exist between published datasets and the March 2020 Release datasets. +The metadata in each gene matrix file was modified to include new ontology labels and corrections to existing ontology labels. Additionally, due to a processing error, all EmptyDrops output was removed from files produced with the Optimus pipeline. This has been corrected and EmptyDrops will be available in future releases. All updated gene matrix files (loom format) used for the March 2020 Release are available for download under the Release Files column of the March 2020 Release page. +**Please note that the March 2020 Release datasets were not corrected for batch effects.** Discrepancies may exist between published datasets and the March 2020 Release datasets. ## Dataset IDs + Each dataset was given a unique ID with a “2020-Mar...” prefix. All Dataset IDs are listed on the March 2020 Release page in the “Dataset” column. This Dataset ID was used to name all input and output files relevant to each dataset. ## Terra workspace preparation + Each Release dataset was analyzed in individual workspaces in the cloud-based platform [Terra](https://app.terra.bio). The Cumulus workflow ([Snapshot 14](https://portal.firecloud.org/#methods/cumulus/cumulus/14/wdl)) was imported from the Broad Methods Repository into each Terra workspace. Each workspace links to a workspace-specific Google bucket (WORKSPACE_BUCKET); each dataset’s gene matrix (loom format) was uploaded to the Google bucket. Throughout the Cumulus workflow, the cloud path to the Google bucket was used to specify the name of each dataset’s input and output files (see an example in the [Global Inputs section](#global-inputs)). ## Cumulus workflow + The Cumulus workflow was used to perform cell clustering, differential expression analyses, and plotting using each dataset’s gene matrix (loom format) as input. More information about Cumulus can be found in the [main documentation](https://cumulus.readthedocs.io/en/0.13.0/cumulus.html). Additionally, you can view the Cumulus workflow used for these analyses in the [Broad Methods Repository](https://portal.firecloud.org/#methods/cumulus/cumulus/14/wdl) or on [GitHub](https://github.com/klarman-cell-observatory/cumulus/blob/c937a832718aacbe75a0fdbca9cde682c48e2407/workflows/cumulus/cumulus.wdl). ### Parameters + All parameters are detailed in the [Cumulus documentation](https://cumulus.readthedocs.io/en/latest/cumulus.html#run-cumulus-analysis). Any unspecified Cumulus parameters (not listed in the tables below) were set to default attributes listed in the documentation. To run the Cumulus workflow in Terra, these parameters were specified using a workspace configuration file (JSON format) which was uploaded directly to each workspace (see an [example JSON](_downloads/Example.JSON) file used for the March 2020 Release). #### Global inputs + The table below details the attributes for Cumulus input files, output files, and the CPUs that were used for analyses in Terra. -| Input name | Description | Attribute | +| Input name | Description | Attribute | | --- | --- | --- | | `Input_file` | String location of the google cloud bucket hosting the DCP dataset gene matrix (loom format) | `“gs://WORKSPACE_BUCKET”` | | `Num_cpu` | Number of CPUs recommended for the analysis | 8 | -| `Output_name` | String describing cloud path to an outpath folder | `"gs://WORKSPACE_BUCKET/output/Dataset ID”` |\ - - +| `Output_name` | String describing cloud path to an outpath folder | `"gs://WORKSPACE_BUCKET/output/Dataset ID”` | #### Clustering + The Cumulus workflow was set to cluster cells using the Louvain method, a modularity-based community detection algorithm ([Li et al. 2019]( https://www.biorxiv.org/content/10.1101/823682v1.full)). The following table lists all the Cumulus workflow parameters used for cell clustering and dimensionality reduction. -| Input name | Description | Attribute | +| Input name | Description | Attribute | | --- | --- | --- | | `channel` | Specifies the sample ID in the input dataset | `“cell_suspension.provenance.document_id”` | | `considered_refs` | Will read all groups from reference genome` | GRCh38 | | `output_filtration_results` | Output the filtration results | true | | `output_loom` | Output the loom file | true | | `output_seurat_compatible` | Generate Seurat-compatible h5ad file. Caution: File size might be large, do not turn this option on for large data sets. | true | -| `plot_filtration_results` | Plot the filtration results | true -| `run_diffmap` | Run a diffusion map for visualization | true | +| `plot_filtration_results` | Plot the filtration results | true | +| `run_diffmap` | Run a diffusion map for visualization | true | | `run_fitsne` | Run a FFT-accelerated Interpolation-based t-SNE (FItSNE) for visualization | true | | `run_fle` | Run force-directed layout embedding (FLE) for visualization | true | | `run_louvain` | Run Louvain clustering algorithm | true | | `run_umap` | Run umap for visualization | true | -| `max_genes` | Only keep cells with less than of genes. This is set higher than the default parameter to avoid filtering cells | 15,000 | -| `percent_mito` | Only keep cells with mitochondrial ratio less than % of total counts. This is set lower than default parameters to avoid filtering cells. | 5.0 | \ - +| `max_genes` | Only keep cells with less than of genes. This is set higher than the default parameter to avoid filtering cells | 15,000 | +| `percent_mito` | Only keep cells with mitochondrial ratio less than % of total counts. This is set lower than default parameters to avoid filtering cells. | 5.0 | #### Differential expression + Differential expression analyses were carried out using the statistical tests specified in the table below. False Discovery Rates were calculated using the Benjamini-Hochberg procedure with a default alpha set to 0.05. For each test, gene expression within a specified louvain cluster was compared to the average of all other clusters. -| Input name | Description | Attribute | +| Input name | Description | Attribute | | --- | --- | --- | | `auc` | Calculates area under the curve | true | | `Fisher` | Calculate fisher exact test | true | | `mwu` | Calculate Mann-Whitney U | true | | `perform_de_analysis` | Perform differential expression analyses | true | -| `t_test` | Calculate Welch’s t-test | true |\ +| `t_test` | Calculate Welch’s t-test | true | #### Visualization (plotting) + To visualize cell clusters, multiple low-dimension embeddings were generated using the Cumulus parameters below. The attributes for visualization depended on sequencing technology (10x vs. Smart-seq2), as described in the table. | Input name | Description | 10x Attributes | Smart-seq2 Attributes | | --- | --- | --- | --- | -| `plot_fitsne` | Create a FFT-accelerated Interpolation-based t-SNE (FItSNE)-like plot according to “attribute, “attribute...” | “louvain_labels,Channel” | “louvain_labels” | -| `plot_fle` | Create a Force-directed Layout Embedding (FLE)-like plot according to “attribute, “attribute...” | “louvain_labels,Channel” | “louvain_labels” | -| `plot_umap` | Create a uniform manifold approximation and projection (UMAP)-like plot | "louvain_labels,Channel” | "louvain_labels” |\ - +| `plot_fitsne` | Create a FFT-accelerated Interpolation-based t-SNE (FItSNE)-like plot according to “attribute, “attribute...” | “louvain_labels,Channel” | “louvain_labels” | +| `plot_fle` | Create a Force-directed Layout Embedding (FLE)-like plot according to “attribute, “attribute...” | “louvain_labels,Channel” | “louvain_labels” | +| `plot_umap` | Create a uniform manifold approximation and projection (UMAP)-like plot | "louvain_labels,Channel” | "louvain_labels” | +#### Generating Single Cell Portal compatible outputs: -#### Generating Single Cell Portal compatible outputs: Single Cell Portal compatible outputs were generated with the following parameters. The resulting files were used to create interactive [Single Cell Portal studies](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release). -| Input name | Description | Attribute | +| Input name | Description | Attribute | | --- | --- | --- | -| `generate_scp_outputs` | Generate outputs compatible with Single-Cell Portal | true | -| `output_dense` | Boolean describing if outputs should be in dense format | false |\ - +| `generate_scp_outputs` | Generate outputs compatible with Single-Cell Portal | true | +| `output_dense` | Boolean describing if outputs should be in dense format | false | ## Cumulus output files + The following table describes all Cumulus output files, including unannotated, normalized expression matrices. Files with a “.scp” demarcation are only needed to create Single Cell Portal studies and can be found on the Single Cell Portal study page (see the [Single Cell Portal HCA Release Page](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release)). @@ -110,21 +114,20 @@ All output file names start with the Dataset ID, the unique ID given to each rel *Important Note*: These files include unannotated expression matrices and are not the final Release files. For descriptions of the final Release files, please see the section [Final March 2020 Release files](#final-march-2020-release-files). -| File name | Description | Format | +| File name | Description | Format | | --- | --- | --- | -| `Dataset_ID.de.xlsx` | Output file containing differential expression with correction | XLSX | -| `Dataset_ID.filt.xlsx` | Output file containing filtering information | XLSX | -| `Output/Dataset_ID.loom` | Expression matrix; contains clustering information and log-transformed gene expression | Loom | +| `Dataset_ID.de.xlsx` | Output file containing differential expression with correction | XLSX | +| `Dataset_ID.filt.xlsx` | Output file containing filtering information | XLSX | +| `Output/Dataset_ID.loom` | Expression matrix; contains clustering information and log-transformed gene expression | Loom | | `Output/Dataset_ID.seurat.h5ad` | Seurat-compatible expression matrix; contains clustering information and log-transformed gene expression | h5ad | -| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | Diffusion map coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fitsne.coords.txt` | FIt-SNE coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fle.coords.txt` | fle cluster coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_umap.coords.txt` | UMAP cluster coordinates for Single Cel Portal| TXT | -| `Dataset_ID.scp.barcodes.tsv` | 10x compatible barcodes file for Single Cell Portal | TSV | -| `Dataset_ID.scp.features.tsv` | 10x compatible features (genes) file for Single Cell Portal | TSV | +| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | Diffusion map coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fitsne.coords.txt` | FIt-SNE coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fle.coords.txt` | fle cluster coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_umap.coords.txt` | UMAP cluster coordinates for Single Cel Portal | TXT | +| `Dataset_ID.scp.barcodes.tsv` | 10x compatible barcodes file for Single Cell Portal | TSV | +| `Dataset_ID.scp.features.tsv` | 10x compatible features (genes) file for Single Cell Portal | TSV | | `Dataset_ID.scp.matrix.mtx` | 10x compatible mtx expression file for Single Cell Portal | mtx | -| `Dataset_ID.scp.metadata.txt` | Metadata matrix for Single Cell Portal | TXT |\ - +| `Dataset_ID.scp.metadata.txt` | Metadata matrix for Single Cell Portal | TXT | For more information regarding how cell clustering information is stored in the normalized expression matrix (h5ad and loom format files), please read the [Cumulus Documentation](https://cumulus.readthedocs.io/en/0.13.0/cumulus.html#cluster-outputs). You can also read more about the available differential expression outputs in the [Cumulus DE Outputs documentation](https://cumulus.readthedocs.io/en/0.13.0/cumulus.html#de-analysis-outputs). @@ -137,58 +140,45 @@ Cell type annotations were obtained from the publications listed on each dataset Annotations were added to normalized expression matrices in loom and h5ad formats using [LoomPy](http://loompy.org/) and [SCANPY](https://icb-scanpy.readthedocs-hosted.com/en/stable/), respectively. #### Annotation metadata + Three column attributes were added to each Cumulus output expression matrix (loom and h5ad): + - `annotated_cell_identity.text`: the original cell type labels provided by the project contributor - `annotated_cell_identity.ontology`: the ontology ID - `annotated_cell_identity.ontology_label`: the harmonized cell type label obtained using the specified ontology For visualization in Single Cell Portal, these columns were also added to the file named `Dataset_ID_annotated_v1.scp.metadata.txt`. - ## Final March 2020 Release files + The following table describes the final Release files available in the DCP Release page and in interactive portals. Files with a “.scp” demarcation are only needed to create Single Cell Portal studies and can be found on the Single Cell Portal study page (see the [Single Cell Portal HCA Release Page](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release)). All output file names start with the Dataset ID, the unique ID given to each release dataset and is listed on the DCP Release page. -| File name | File location: DCP and/or SCP | Description | Format | +| File name | File location: DCP and/or SCP | Description | Format | | --- | --- | --- | --- | -| `Dataset_ID.loom` | DCP/SCP | Gene matrix file generated with DCP standardized pipelines (Optimus and Smart-seq2) and used as Cumulus input. | Loom | -| `Dataset_ID.de.xlsx` | DCP/SCP | Cumulus output file containing differential expression with correction | XLSX | -| `Dataset_ID.de.CSV.zip` | DCP/SCP | zip of CSV files containing differential expression analyses | CSV | -| `Dataset_ID.filt.xlsx` | DCP/SCP | Cumulus output file containing filtering information | XLSX | -| `Dataset_ID_annoated_v1.loom` | DCP/SCP | Expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | Loom | +| `Dataset_ID.loom` | DCP/SCP | Gene matrix file generated with DCP standardized pipelines (Optimus and Smart-seq2) and used as Cumulus input. | Loom | +| `Dataset_ID.de.xlsx` | DCP/SCP | Cumulus output file containing differential expression with correction | XLSX | +| `Dataset_ID.de.CSV.zip` | DCP/SCP | zip of CSV files containing differential expression analyses | CSV | +| `Dataset_ID.filt.xlsx` | DCP/SCP | Cumulus output file containing filtering information | XLSX | +| `Dataset_ID_annoated_v1.loom` | DCP/SCP | Expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | Loom | | `Dataset_ID.seurat_annotated_v1.h5ad` | DCP/SCP | Seurat compatible expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | h5ad | -| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | SCP | Diffusion map coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fitsne.coords.txt` | SCP | FIt-SNE coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fle.coords.txt` | SCP | fle cluster coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_umap.coords.txt` | SCP | UMAP cluster coordinates for Single Cel Portal| TXT | -| `Dataset_ID.scp.barcodes.tsv` | SCP | 10x compatible barcodes file for Single Cell Portal | TSV | -| `Dataset_ID.scp.features.tsv` | SCP | 10x compatible features (genes) file for Single Cell Portal | TSV | +| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | SCP | Diffusion map coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fitsne.coords.txt` | SCP | FIt-SNE coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fle.coords.txt` | SCP | fle cluster coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_umap.coords.txt` | SCP | UMAP cluster coordinates for Single Cel Portal| TXT | +| `Dataset_ID.scp.barcodes.tsv` | SCP | 10x compatible barcodes file for Single Cell Portal | TSV | +| `Dataset_ID.scp.features.tsv` | SCP | 10x compatible features (genes) file for Single Cell Portal | TSV | | `Dataset_ID.scp.matrix.mtx` | SCP | 10x compatible mtx expression file for Single Cell Portal | mtx | -| `Dataset_ID.scp.metadata.txt` | SCP | Metadata matrix for Single Cell Portal | TXT | -`Dataset_ID_annotated_v1.scp.metadata.txt` | SCP | Annotated metadata matrix file for Single Cell Portal | TXT |\ +| `Dataset_ID.scp.metadata.txt` | SCP | Metadata matrix for Single Cell Portal | TXT | +| `Dataset_ID_annotated_v1.scp.metadata.txt` | SCP | Annotated metadata matrix file for Single Cell Portal | TXT | *DCP = Data Coordination Platform; SCP = Single Cell Portal* - -| Note about CSV files | +| Note about CSV files | | :-- | -| The CSV files contain differential expression data. These were generated from the `Dataset_ID.de.xlsx` to enable easier viewing with R or Python. Detailed steps for using these files are listed in the [Working with Release Files](working-with-release-files.md) guide). |\ - +| The CSV files contain differential expression data. These were generated from the `Dataset_ID.de.xlsx` to enable easier viewing with R or Python. Detailed steps for using these files are listed in the [Working with Release Files](working-with-release-files.md) guide. | ## Want to learn more? Techniques for uploading loom and h5ad files into common analysis software are described in the [Working with Release Files](working-with-release-files.md) guide. You can also get hands-on experience with these methods by following the [Replicating the Release Analysis](replicating-the-release-analysis.md) tutorial. For additional details about each individual dataset, visit the March 2020 Release page. - - - - - - - - - - - - - diff --git a/content/releases/documentation/2020-mar/overview.md b/content/releases/documentation/2020-mar/overview.md index e28884b80..1c786d76e 100644 --- a/content/releases/documentation/2020-mar/overview.md +++ b/content/releases/documentation/2020-mar/overview.md @@ -1,27 +1,23 @@ --- -path: "/releases/documentation/2020-mar/overview" date: "2020-03-31" -title: "Overview" description: "The March 2020 Data Release includes datasets from all human DCP projects that have been processed using the DCP's standardized pipelines (Optimus or Smart-seq2)." draft: true +path: "/releases/documentation/2020-mar/overview" +title: "Overview" --- # 2020 March Data Release - ## Release Datasets -The March 2020 Data Release -includes datasets from all human DCP projects that have been processed using the DCP's standardized pipelines ([Optimus](/pipelines/optimus-workflow.md) or [Smart-seq2](/pipelines/smart-seq2-workflow.md)). +The March 2020 Data Release includes datasets from all human DCP projects that have been processed using the DCP's standardized pipelines ([Optimus](/pipelines/optimus-workflow.md) or [Smart-seq2](/pipelines/smart-seq2-workflow.md)). Projects were stratified into individual datasets by organ, developmental stage, and sequencing technology, and were further processed using [Cumulus](https://cumulus.readthedocs.io/en/latest/cumulus.html#), a tool for cell clustering, differential expression analyses, and visualization. - + Cell type annotations were taken from publications or project contributor feedback, were harmonized, and then added to analyzed files. To learn more about these processes and get started working with the release, please review the [Methods](/releases/2020-mar/methods.md) documentation, take a [tutorial on how to replicate these analyses](/releases/2020-mar/replicating-the-release-analysis.md) or view tips for [working with release files](/releases/2020-mar/working-with-release-files.md). - - ## Release Feedback The HCA DCP is actively seeking feedback for improving the utility of the March 2020 Release data, the documentation and the overall Release presentation. Please see our March 2020 Release [feedback](/releases/2020-mar/feedback.md) page for information on how to contact the HCA DCP team with questions, concerns, or ways we can improve. diff --git a/content/releases/documentation/2020-mar/replicating-the-release-analysis.md b/content/releases/documentation/2020-mar/replicating-the-release-analysis.md index ddb734d32..57c4a4272 100644 --- a/content/releases/documentation/2020-mar/replicating-the-release-analysis.md +++ b/content/releases/documentation/2020-mar/replicating-the-release-analysis.md @@ -1,26 +1,33 @@ --- -path: "/releases/documentation/2020-mar/replicating-the-release-analysis" date: "2018-05-03" -title: "Replicating the Release Analysis" description: "This tutorial covers how to process a gene matrix (in loom format) for clustering, differential expression, and visualization using the Cumulus workflow in a Terra workspace." draft: true +path: "/releases/documentation/2020-mar/replicating-the-release-analysis" +title: "Replicating the Release Analysis" --- -# Replicating the March 2020 Release Analyses +# Replicating the March 2020 Release Analyses + This tutorial covers how to process a gene matrix (in loom format) for clustering, differential expression, and visualization using the Cumulus workflow in a Terra workspace. It uses the same [methods](methods.md) outlined for the DCP March 2020 Release. All projects for the release were divided into individual datasets which are listed on the DCP March 2020 Release page. For this tutorial, we use the sample dataset 2020-Mar-Landscape-Adult-Liver-10x derived from the project "Dissecting the human liver cellular landscape by single cell RNA-seq reveals novel intrahepatic monocyte/ macrophage populations". You can explore other datasets and repeat these analyses on additional matrix files. ## Downloading the gene matrix (loom format) for the example dataset + To start your analyses, you will need to download the gene matrix from the DCP. For each dataset, this matrix was generated using a standardized DCP pipeline (Optimus or Smart-seq2). The matrix contains either gene counts (10x samples) or RSEM TPMS (Smart-seq2 datasets), in addition to important metadata, such as specimen and sample processing information. This file will be used as input for the Cumulus workflow. #### 1. Navigate to the DCP March 2020 Release. + #### 2. Find the sample dataset 2020-Mar-Landscape-Adult-Liver-10x in the Liver section. + #### 3. Select “View Files”. + #### 4. Download the gene matrix (2020-Mar-Landscape-Adult-Liver-10x.loom file). ## Creating a Terra workspace + [Terra](https://app.terra.bio/) is a cloud-based platform for bioinformatic analyses. To use Terra, you will need to set up a Google account and Billing project. If you are new to working with Terra, we recommend the following relevant links for getting started: + - [Navigating in Terra](https://support.terra.bio/hc/en-us/articles/360022704371) - [Importing a workflow from Dockstore](https://support.terra.bio/hc/en-us/articles/360039827191) (Coming Soon!) - [Billing](https://support.terra.bio/hc/en-us/articles/360026182251) @@ -28,51 +35,72 @@ To start your analyses, you will need to download the gene matrix from the DCP. If you already have a Terra account, follow these step-by-step instructions for setting up a workspace: #### 1. Go to app.terra.bio and select the View Workspaces card. + #### 2. Select the Create a New Workspace card. -![image](../../_images/01_new_workspace_card_v2.png) +![image](../../_images/01_new_workspace_card_v2.png) #### 3. Fill out a unique workspace name and billing information. + You can optionally add a description of the workspace purpose. You can also optionally fill out the authorization domain. If you are unsure about the authorization domain field, you can read more information in [this article](https://support.terra.bio/hc/en-us/articles/360026775691). ![image](../../_images/02_new_workspace_info.png) - + #### 4. Upload the gene matrix (2020-Mar-Landscape-Adult-Liver-10x.loom file) to the workspace Google bucket. + Each Terra workspace you create comes with its own Google bucket. + - 4.1 Go to the Data tab. - 4.2 In the “Other Data” section, select the Files option. -![image](../../_images/Sample.png) + + ![image](../../_images/Sample.png) + - 4.3 Click the plus icon to upload the gene matrix file (2020-Mar-Landscape-Adult-Liver-10x.loom file) to the workspace Google bucket. + If you are already familiar with Terra and cloud computing, you can also transfer files to the workspace Google Bucket using gsutils (see description [here](https://support.terra.bio/hc/en-us/articles/360024056512)). ## Uploading the Cumulus workflow to a Terra workspace + #### 1. Go to the Workflows tab of the Terra workspace. + #### 2. Select the Find a Workflow card. + #### 3. Under “Find Additional Workflows”, select the Broad Methods Repository. + ![image](../../_images/04_Find_Addnl_Workflows.png) - + This will redirect you to the Broad Methods (you will no longer be in Terra). + #### 4. In the search bar, type “Cumulus” to search for the Cumulus workflow. + ![image](../../_images/05_Find_Cumulus_v2.png) + #### 5. Select the “cumulus/cumulus” option. + #### 6. Change the Snapshot to 14. + ![image](../../_images/07_snapshot14_export_v2.png) - + #### 7. Select Export to Workspace. + #### 8. Select Use Blank Configuration. + A new page will appear with a Destination Workspace drop-down menu. + #### 9. Select your destination workspace from this menu. + An option will appear to go to your workspace. You will be redirected back into the Terra workflow configuration page. ## Importing a workflow configuration file (JSON format) + The sequencing data for all March 2020 Release datasets were generated using either 10x or Smart-seq2 technology. There are two Cumulus workflow configuration files available for Terra import depending on which sequencing technology (10x or Smart-seq2) was used for the dataset of interest. The technology will be specified next to the dataset on the March 2020 Release page. The 2020-Mar-Landscape-Adult-Liver-10x dataset uses the 10x configuration. For all datasets, you can download the 10x configuration (JSON) [here](_downloads/10x_json.JSON) or the Smart-seq2 configuration [here](_downloads/SS2_json.JSON). After downloading the 10x_json.JSON (to be used with the 2020-Mar-Landscape-Adult-Liver-10x dataset), do the following: #### 1. Select the “upload json” link on the Workflow configuration page and choose the appropriate configuration file. + ![image](../../_images/08_json_upload.png) - #### 2. Click Save. @@ -83,16 +111,19 @@ After downloading the 10x_json.JSON (to be used with the 2020-Mar-Landscape-Adul ## Specifying workflow input and output files #### 1. Examine the `input_file` attribute of the workflow configuration page. + Notice the field requires a “File” input. Specify the Google bucket location for the gene matrix (loom) file. To do this, select the folder icon in the `input_file` attribute box. Choose the 2020-Mar-Landscape-Adult-Liver-10x.loom. ![image](../../_images/09_input_file.png) - ##### 2. Examine the `output_name` attribute. + This attribute requires a “String” input. Specify a string in quotes that includes the workspace Google bucket location, an output folder, and a prefix you would like to give all analysis output files. ![image](../../_images/10_output_name.png) + - The Google bucket location can be found on the right side of the workspace Dashboard tab ![image](../../_images/11_Dashboard_google_bucket.png). - The output folder can have any name, but for this example, we choose “output”. - The filename prefix can also be any name to identify the dataset, but we used the dataset ID 2020-Mar-Landscape-Adult-Liver-10x. + The final string should look similar to: `“gs://GOOGLE_BUCKET/output/2020-Mar-Landscape-Adult-Liver-10x”.` #### 3. Select Save on the configuration page. @@ -100,14 +131,21 @@ The final string should look similar to: `“gs://GOOGLE_BUCKET/output/2020-Mar- ## Running the workflow #### 1. On the Workflows configuration page, select the radial button next to “Run workflow with inputs defined by file paths”. + #### 2. Select Save. + #### 3. Select Run Analysis and then Launch. + ![image](../../_images/12_launch_workflow.png) + #### 4. You can check the status of your run in the workspace Job History tab. ## What you should see after running Cumulus + #### 1. Return to the Terra workspace Data tab. + #### 2. In the Files section, select the “output” folder. + ![image](../../_images/16_data_tab_output.png) #### 3. You should now see the processed data files. @@ -117,13 +155,11 @@ The processed files include the normalized expression matrices (in loom and h5ad **Note:** Although the normalized expression files (loom and h5ad) contain cell clustering, they do not yet contain cell type annotations. ## Annotating Release files + The March2020 Release includes normalized expression matrices that have been annotated with the guidance of project contributors. This tutorial does not cover how to add these annotations to expression matrices, but there are multiple tools you can use to add annotations, including [LoomPy](http://loompy.org/) and [SCANPY](https://icb-scanpy.readthedocs-hosted.com/en/stable/). Additionally, you can add annotations using [Single Cell Portal](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release). See the [Working with Release Files](working-with-release-files.md) guide for details about creating your own Single Cell Portal study. ## Next steps + You can export Cumulus workflow output files from the Terra workspace to a Single Cell Portal study. There, you can visualize cell clusters and make annotations. Please see the [Working with Release Files](working-with-release-files.md) documentation for available tools (including Single Cell Portal) you can use to interact with Release data. If you have any suggestions for this tutorial or questions, please see the [Community Feedback page](feedback.md). - - - - diff --git a/content/releases/documentation/2020-mar/working-with-release-files.md b/content/releases/documentation/2020-mar/working-with-release-files.md index bd7a531d9..d3e2521f3 100644 --- a/content/releases/documentation/2020-mar/working-with-release-files.md +++ b/content/releases/documentation/2020-mar/working-with-release-files.md @@ -1,12 +1,11 @@ --- -path: "/releases/documentation/2020-mar/working-with-release-files" date: "2018-05-03" -title: "Working with Release Files" description: "This guide focuses on importing release files into Python and R-supported software such as Pegasus, Seurat, and SCANPY, in addition to visualizing and annotating files in Single Cell Portal." draft: true +path: "/releases/documentation/2020-mar/working-with-release-files" +title: "Working with Release Files" --- - # Working with Release Files There are multiple tools to view and manipulate the March 2020 Release files. This guide focuses on importing release files into Python and R-supported software such as Pegasus, Seurat, and SCANPY, in addition to visualizing and annotating files in Single Cell Portal. When applicable, we use file names from the 2020-Mar-Landscape-Adult-Liver-10x dataset as an example, but the following techniques will work for files from any Release dataset. @@ -16,31 +15,31 @@ There are multiple tools to view and manipulate the March 2020 Release files. Th If you would like to know more about how these release files were generated, please see the [March 2020 Release Methods](methods.md) or take the [analysis tutorial](replicating-the-release-analysis.md) which demonstrates how to replicate Release analyses using the 2020-Mar-Landscape-Adult-Liver-10x Release dataset. ## What are the release files? + Descriptions of all the March 2020 Release files can be found in the table below. Each Release dataset has a unique Dataset ID which is listed on the March 2020 Release page in the “Dataset” column. This ID is used as a prefix for all Release dataset files. Some March 2020 Release files are used for interactive visualization in Single Cell Portal (any file with a .scp demarcation). These files are only available in [Single Cell Portal](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release) (SCP) and not on the HCA [Data Coordination Platform](/) (DCP). -| File name | File location: DCP and/or SCP | Description | Format | +| File name | File location: DCP and/or SCP | Description | Format | | --- | --- | --- | --- | -| `Dataset_ID.loom` | DCP/SCP | Gene matrix file generated with DCP standardized pipelines (Optimus and Smart-seq2) and used as Cumulus input. | Loom | -| `Dataset_ID.de.xlsx` | DCP/SCP | Cumulus output file containing differential expression with correction | XLSX | -| `Dataset_ID.de.CSV.zip` | DCP/SCP | Zip of CSV files containing differential expression analyses | CSV | -| `Dataset_ID.filt.xlsx` | DCP/SCP | Cumulus output file containing filtering information | XLSX | -| `Dataset_ID_annoated_v1.loom` | DCP/SCP | Expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | Loom | +| `Dataset_ID.loom` | DCP/SCP | Gene matrix file generated with DCP standardized pipelines (Optimus and Smart-seq2) and used as Cumulus input. | Loom | +| `Dataset_ID.de.xlsx` | DCP/SCP | Cumulus output file containing differential expression with correction | XLSX | +| `Dataset_ID.de.CSV.zip` | DCP/SCP | Zip of CSV files containing differential expression analyses | CSV | +| `Dataset_ID.filt.xlsx` | DCP/SCP | Cumulus output file containing filtering information | XLSX | +| `Dataset_ID_annoated_v1.loom` | DCP/SCP | Expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | Loom | | `Dataset_ID.seurat_annotated_v1.h5ad` | DCP/SCP | Seurat compatible expression matrix generated by Cumulus and annotated using harmonized cell types; contains clustering information, cell annotations, and log-transformed gene expression | h5ad | -| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | SCP | Diffusion map coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fitsne.coords.txt` | SCP | FIt-SNE coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_fle.coords.txt` | SCP | fle cluster coordinates for Single Cell Portal | TXT | -| `Dataset_ID.scp.X_umap.coords.txt` | SCP | UMAP cluster coordinates for Single Cel Portal| TXT | -| `Dataset_ID.scp.barcodes.tsv` | SCP | 10x compatible barcodes file for Single Cell Portal | TSV | -| `Dataset_ID.scp.features.tsv` | SCP | 10x compatible features (genes) file for Single Cell Portal | TSV | +| `Dataset_ID.scp.X_diffmap_pca.coords.txt` | SCP | Diffusion map coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fitsne.coords.txt` | SCP | FIt-SNE coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_fle.coords.txt` | SCP | fle cluster coordinates for Single Cell Portal | TXT | +| `Dataset_ID.scp.X_umap.coords.txt` | SCP | UMAP cluster coordinates for Single Cel Portal| TXT | +| `Dataset_ID.scp.barcodes.tsv` | SCP | 10x compatible barcodes file for Single Cell Portal | TSV | +| `Dataset_ID.scp.features.tsv` | SCP | 10x compatible features (genes) file for Single Cell Portal | TSV | | `Dataset_ID.scp.matrix.mtx` | SCP | 10x compatible mtx expression file for Single Cell Portal | mtx | -| `Dataset_ID.scp.metadata.txt` | SCP | Metadata matrix for Single Cell Portal | TXT | -`Dataset_ID_annotated_v1.scp.metadata.txt` | SCP | Annotated metadata matrix file for Single Cell Portal | TXT |\ +| `Dataset_ID.scp.metadata.txt` | SCP | Metadata matrix for Single Cell Portal | TXT | +| `Dataset_ID_annotated_v1.scp.metadata.txt` | SCP | Annotated metadata matrix file for Single Cell Portal | TXT | *DCP = Data Coordination Platform; SCP = Single Cell Portal* - ## Importing annotated March 2020 Release files into R and Python visualization resources Each Release dataset has gene expression matrices containing cell clustering information and annotated cell types. This annotated expression matrix is provided in two file formats: a loom and a h5ad. Both file types are named with an `“_annotated_v1”` suffix and can be found under Release Files on the March 2020 Release page. The normalized matrices were generated using the Cumulus workflow and annotated using published cell types (see more details in the [Methods](methods.md)). @@ -48,6 +47,7 @@ Each Release dataset has gene expression matrices containing cell clustering inf Use the suggested code below to view annotated loom and h5ad files in [Pegasus](https://pegasus.readthedocs.io/en/latest/), [Seurat](https://satijalab.org/seurat/), and [SCANPY](https://github.com/theislab/scanpy). You can view the [Cumulus documentation](https://cumulus.readthedocs.io/en/latest/cumulus.html#load-cumulus-results-into-pegasus) to learn more about importing the annotated expression matrices into these Python and R resources. For the examples below, we use files from the 2020-Mar-Landscape-Adult-Liver-10x dataset. ### Pegasus + [Pegasus](https://pegasus.readthedocs.io/en/latest/) is a Python package used by Cumulus for analyzing very large single-cell transcriptomes. The following example files were tested using Python v3.7.5 and Pegasus v0.16.11. In Python, load the annotated h5ad file using: @@ -63,8 +63,10 @@ Load the annotated loom file using: import pegasus as pg data = pg.read_input("2020-Mar-Landscape-Adult-Liver-10x_annotated_v1.loom", genome = "GRCh38") ``` + ### Seurat - [Seurat](https://satijalab.org/seurat/) is an R package used for single-cell data quality control, analysis, and exploration. The following example files were tested using R v3.6.3, Python v3.7.5, Seurat v3.1.2, and LoomR v0.2.1. + +[Seurat](https://satijalab.org/seurat/) is an R package used for single-cell data quality control, analysis, and exploration. The following example files were tested using R v3.6.3, Python v3.7.5, Seurat v3.1.2, and LoomR v0.2.1. To load the annotated h5ad file, you will need to have [anndata](https://anndata.readthedocs.io/en/latest/index.html) and [reticulate](https://cran.r-project.org/web/packages/reticulate/index.html) installed. Then use: @@ -74,19 +76,23 @@ ad <- import("anndata", convert = FALSE) test_ad <- ad$read_h5ad("2020-Mar-Landscape-Adult-Liver-10x_annotated_v1.seurat.h5ad") result <- convert_h5ad_to_seurat(test_ad) ``` + To load the annotated loom file, you must first install the LoomR package: ```R install.package("devtools") devtools::install_github("mojaveazure/loomR", ref = "develop") ``` + Next, load the loom file using: ```R source("https://raw.githubusercontent.com/klarman-cell-observatory/cumulus/master/workflows/cumulus/loom2seurat.R") result <- convert_loom_to_seurat("2020-Mar-Landscape-Adult-Liver-10x_annotated_v1.loom") ``` + ### SCANPY + [SCANPY](https://github.com/theislab/scanpy) is a Python-based analysis toolkit for single-cell expression data built with [anndata](https://anndata.readthedocs.io/en/stable/). The following example files were tested using Python v3.7.5, SCANPY v1.4.5.1, and LoomPy v3.0.6. Load the annotated h5ad output file using: @@ -95,6 +101,7 @@ Load the annotated h5ad output file using: import scanpy as sc adata = sc.read_h5ad("2020-Mar-Landscape-Adult-Liver-10x_annotated_v1.seurat.h5ad")’ ``` + Load the annotated loom file using: ```Python @@ -103,11 +110,13 @@ adata = sc.read_loom("2020-Mar-Landscape-Adult-Liver-10x_annotated_v1.loom") ``` ## Loading differential expression results in R or Python + Differential expression analyses were performed on the louvain cell clusters identified with the Cumulus workflow (see the [Methods page](methods.md)). For each Release dataset, there are two files containing differential expression results: -- a .de.xlsx -- a .CSV.zip -Each louvain cluster in the .de.xlsx file has one excel sheet for upregulated genes and one for downregulated genes. To make viewing these results easier in R and Python, we converted all excel sheets into a zip of individual CSV files. You can download the zip file from the Release Files column on the March 2020 Release page, unzip it into your directory of choice, and view the CSVs in R or Python using the instructions below. +- a .de.xlsx +- a .CSV.zip + +Each louvain cluster in the .de.xlsx file has one Excel sheet for upregulated genes and one for downregulated genes. To make viewing these results easier in R and Python, we converted all Excel sheets into a zip of individual CSV files. You can download the zip file from the Release Files column on the March 2020 Release page, unzip it into your directory of choice, and view the CSVs in R or Python using the instructions below. For both R and Python instructions, you will need to list the name of the directory containing the CSV files (specified below with `“DIRECTORY_NAME”`). @@ -120,7 +129,9 @@ files_to_load <- list.files(input_dir, full.names=TRUE) names(files_to_load) <- unlist(lapply(strsplit(basename(files_to_load),'.',fixed=T),'[',1)) de <- lapply(files_to_load, read.csv) ``` + ### Instructions for Python + These instructions use [pandas software](https://pandas.pydata.org/) to read the CSV files. You will need to have pandas installed. To begin these steps, use Terminal to navigate to the directory containing your output folder (`DIRECTORY_NAME`). The output folder should contain all CSV files. ```Python @@ -135,6 +146,7 @@ data[f] ``` ## Creating a Single Cell Portal study page and importing Cumulus results from Terra + You can visualize or annotate DCP Release files by either using the existing March 2020 Release studies in [Single Cell Portal](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release) or by creating a new Single Cell Portal study. Detailed instructions for getting started with Single Cell Portal can be found on the [Single Cell Portal wiki]( https://github.com/broadinstitute/single_cell_portal/wiki/Synchronizing-Study-Data). You will need a Google compatible email account to login. You may also use your institutional account if it is backed by Google. **The following instructions are a continuation from the [replicating release analyses tutorial](replicating-the-release-analysis.md) and focus on importing Cumulus output files from an existing Terra workspace. These steps use the 2020-Mar-Landscape-Adult-Liver-10x example files derived using the tutorial techniques.** @@ -144,9 +156,11 @@ You can visualize or annotate DCP Release files by either using the existing Mar #### 1. Navigate to [Single Cell Portal](https://singlecell.broadinstitute.org/single_cell) and login. #### 2. Go to the profile drop-down menu and select Add Study. + ![image](../../_images/13_SCP_add_study.png) #### 3. Select a unique name for the study page and billing information. + If you want to import files from an existing Terra workspace, the billing information must match the billing information used on Terra. ![image](../../_images/14_SCP_new_study.png) @@ -154,9 +168,11 @@ If you want to import files from an existing Terra workspace, the billing inform #### 4. If you wish to import Terra workspace files, Select Yes for using an existing Terra workspace. #### 5. Enter the Terra workspace name in the “Existing Terra Workspace” field. + For the 2020-Mar-Landscape-Adult-Liver-10x example dataset used in the [analysis tutorial](replicating-the-release-analysis.md), you would type the name “2020-Mar-Landscape-Adult-Liver-10x”. #### 6. Scroll to the bottom of the page and select Create Study. + ![image](../../_images/15_Create_study.png) #### 7. If using an existing Terra workspace, you will be prompted to select the files you would like to import. @@ -164,55 +180,67 @@ For the 2020-Mar-Landscape-Adult-Liver-10x example dataset used in the [analysis The following table describes the recommended Cumulus output files (using the 2020-Mar-Landscape-Adult-Liver-10x dataset files as examples) to import into a Single Cell Portal study. Note that some files are necessary to interactively visualize cells in Single Cell Portal (all files marked with a .scp). When importing from Terra, the order of import can be important. When applicable, we have noted this order in the table “Notes” column, along with additional import specifications. | File Name | Description | File type to specify | Notes | -|---|---|---|---| +| --- | --- | --- | --- | | `2020-Mar-Landscape-Adult-Liver-10x.loom` | Gene matrix file generated with DCP standardized pipelines (Optimus and Smart-seq2) and used as Cumulus input. | Other | --- | -| `output/2020-Mar-Landscape-Adult-Liver-10x.de.xlsx` | Cumulus output file containing differential expression with correction | Other | --- | -| `output/2020-Mar-Landscape-Adult-Liver-10x.filt.xlsx` | File describing cumulus filtering information | Other | --- | -| `output/2020-Mar-Landscape-Adult-Liver-10x.loom` | Expression matrix generated by Cumulus; contains clustering information and log-transformed gene expression (Pegasus, SCANPY, and Seurat compatible) | Other | This is file does not contain cell type annotations | -| `output/2020-Mar-Landscape-Adult-Liver-10x.seurat.h5ad` | Seurat-compatible, normalized expression matrix generated by Cumulus; contains clustering information and log-transformed gene expression | Other | This file does not contain cell type annotations | +| `output/2020-Mar-Landscape-Adult-Liver-10x.de.xlsx` | Cumulus output file containing differential expression with correction | Other | --- | +| `output/2020-Mar-Landscape-Adult-Liver-10x.filt.xlsx` | File describing cumulus filtering information | Other | --- | +| `output/2020-Mar-Landscape-Adult-Liver-10x.loom` | Expression matrix generated by Cumulus; contains clustering information and log-transformed gene expression (Pegasus, SCANPY, and Seurat compatible) | Other | This is file does not contain cell type annotations | +| `output/2020-Mar-Landscape-Adult-Liver-10x.seurat.h5ad` | Seurat-compatible, normalized expression matrix generated by Cumulus; contains clustering information and log-transformed gene expression | Other | This file does not contain cell type annotations | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.X_diffmap_pca.coords.txt` | Diffusion map coordinates | Cluster | This file is necessary Single Cell Portal visualization; Specify 3 axis labels as “Diffmap n” where n=1,2,3 | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.X_fitsne.coords.txt` | FIt-SNE coordinates | Cluster | This file is necessary for Single Cell Portal visualization; Specify 2 axis labels as “FIt-SNE n”, where n=1,2 | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.X_fle.coords.txt` | fle cluster coordinates | Cluster | This file is necessary for Single Cell Portal visualization; Specify 2 axis labels as ”Fle n”, where n=1,2 | -| `output/2020-Mar-Landscape-Adult-Liver-10x.scp.X_umap.coords.txt` | UMAP cluster coordinates | Cluster | This file is necessary for Single Cell Portal visualization; Specify 2 axis labels as “UMAP n” where n=1,2 | +| `output/2020-Mar-Landscape-Adult-Liver-10x.scp.X_umap.coords.txt` | UMAP cluster coordinates | Cluster | This file is necessary for Single Cell Portal visualization; Specify 2 axis labels as “UMAP n” where n=1,2 | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.matrix.mtx` | 10x compatible mtx expression file | MM Coordinate Matrix | This file is necessary for Single Cell Portal. Must import before the two 10x compatible import files (Taxon is Human) | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.barcodes.tsv` | 10x compatible barcodes file | 10x compatible barcodes file | This file is necessary for Single Cell Portal; Must import after .matrix.mtx file | | `output/2020-Mar-Landscape-Adult-Liver-10x.scp.features.tsv` | 10x compatible features (genes) file | 10x Genes Files | This file is necessary for Single Cell Portal; Must import after .matrix.mtx file | -| `output/2020-Mar-Landscape-Adult-Liver-10x.scp.metadata.txt` | Metadata matrix | Metadata | --- | +| `output/2020-Mar-Landscape-Adult-Liver-10x.scp.metadata.txt` | Metadata matrix | Metadata | --- | Any Cumulus files generated using the techniques outlined in the Replicating Release Analyses tutorial do not yet contain cell type annotations. After importing these files from Terra to a Single Cell Portal study, you can create your own annotations using the guide suggested in the next section. ## Annotating cell types in Single Cell Portal + The March 2020 Release used publications and project contributor feedback for cell type annotations. However, we encourage researchers to explore and create new annotations. Get started by viewing this Single Cell Portal [cell annotation guide](https://github.com/broadinstitute/single_cell_portal/wiki/Annotations). ## Viewing and interacting with annotated cells in Single Cell Portal + Each March 2020 Release dataset has a Single Cell Portal study page where you can view annotated cells. You can find a link to these pages on the DCP March 2020 Release or on the [Single Cell Portal HCA Release page ](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release). To visualize the annotated cells for a Release dataset, go to the Explore tab of the Single Cell Portal study and follow the instructions below: #### 1. Select the View Options gear icon in the upper right corner. + ![image](../../_images/17_SCP_explore_view_options.png) #### 2. Open the Select Annotations drop-down menu. + ![image](../../_images/18_SCP_Select_annotation_v2.png) #### 3. Choose either `annotated_cell_identity.text` or `annotated_cell_identity.ontology_label`. + ![image](../../_images/19_cell_type_annotations.png). The `annotated_cell_identity.text` option will list the cell types defined by the project contributors. The `annotated_cell_identity.ontology_label` will list cell types that have been harmonized using ontology services. ## Searching for genes across March 2020 Release datasets using Global Gene Search + The March 2020 Release offers a Global Gene Search option using Single Cell Portal. + #### 1. Go to the [Single Cell Portal HCA Release](https://singlecell.broadinstitute.org/single_cell?scpbr=human-cell-atlas-march-2020-release) page. + When you enter the page, you will be defaulted to the Search Studies tab of the landing page. + #### 2. Select the Search Genes tab. + ![image](../../_images/20_SCP_Release_page.png) + #### 3. Type in the gene of interest. + ![image](../../_images/21_tmem119_search.png) + At the top of the search, the number of datasets that have a positive match for the gene will appear. Below the search, a graphical display of cell clusters containing the gene of interest will display for all datasets containing the gene. ## Next steps + You can replicate the release analyses using your own files by [taking this tutorial](replicating-the-release-analysis.md). If you have any suggestions for the Working with Release Files guide or questions, please see the [Community Feedback page](feedback.md). - -