Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(gatsby): disable loki stuff without dropping it #23646

Merged
merged 3 commits into from
May 1, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ aliases:
- <<: *attach_to_bootstrap
- run: yarn list react
- run: node --max-old-space-size=2048 ./node_modules/.bin/jest -w 1 --ci
- run: GATSBY_DB_NODES=loki node --max-old-space-size=2048 ./node_modules/.bin/jest -w 1 --ci
# - run: GATSBY_DB_NODES=loki node --max-old-space-size=2048 ./node_modules/.bin/jest -w 1 --ci

e2e-test-workflow: &e2e-test-workflow
filters:
Expand Down Expand Up @@ -249,12 +249,12 @@ jobs:
- e2e-test:
test_path: integration-tests/long-term-caching

integration_tests_cache_resilience:
executor: node
steps:
- e2e-test:
test_path: integration-tests/cache-resilience
run_loki: true
# integration_tests_cache_resilience:
# executor: node
# steps:
# - e2e-test:
# test_path: integration-tests/cache-resilience
# run_loki: true

integration_tests_gatsby_pipeline:
executor: node
Expand Down Expand Up @@ -549,8 +549,8 @@ workflows:
- bootstrap
- integration_tests_long_term_caching:
<<: *e2e-test-workflow
- integration_tests_cache_resilience:
<<: *e2e-test-workflow
# - integration_tests_cache_resilience:
# <<: *e2e-test-workflow
- integration_tests_gatsby_pipeline:
<<: *e2e-test-workflow
- integration_tests_structured_logging:
Expand Down
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@
"jest": "jest",
"jest:inspect": "node --inspect node_modules/.bin/jest --runInBand",
"jest:inspect-brk": "node --inspect-brk node_modules/.bin/jest --runInBand",
"jest:loki": "cross-env GATSBY_DB_NODES=loki jest",
"lerna": "lerna",
"lerna-prepare": "lerna run prepare",
"lint": "npm-run-all --continue-on-error -p lint:code lint:docs lint:other",
Expand Down
10 changes: 5 additions & 5 deletions packages/gatsby-telemetry/src/telemetry.js
Original file line number Diff line number Diff line change
Expand Up @@ -196,11 +196,11 @@ module.exports = class AnalyticsTracker {
}

getDbEngine() {
if (process.env.GATSBY_DB_NODES === `loki`) {
return `loki`
} else {
return `redux`
}
// if (process.env.GATSBY_DB_NODES === `loki`) {
// return `loki`
// } else {
return `redux`
// }
}

getMachineId() {
Expand Down
42 changes: 21 additions & 21 deletions packages/gatsby/src/bootstrap/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -282,27 +282,27 @@ module.exports = async (args: BootstrapArgs) => {

activity.end()

if (process.env.GATSBY_DB_NODES === `loki`) {
const loki = require(`../db/loki`)
// Start the nodes database (in memory loki js with interval disk
// saves). If data was saved from a previous build, it will be
// loaded here
activity = report.activityTimer(`start nodes db`, {
parentSpan: bootstrapSpan,
})
activity.start()
const dbSaveFile = `${cacheDirectory}/loki/loki.db`
try {
await loki.start({
saveFile: dbSaveFile,
})
} catch (e) {
report.error(
`Error starting DB. Perhaps try deleting ${path.dirname(dbSaveFile)}`
)
}
activity.end()
}
// if (process.env.GATSBY_DB_NODES === `loki`) {
// const loki = require(`../db/loki`)
// // Start the nodes database (in memory loki js with interval disk
// // saves). If data was saved from a previous build, it will be
// // loaded here
// activity = report.activityTimer(`start nodes db`, {
// parentSpan: bootstrapSpan,
// })
// activity.start()
// const dbSaveFile = `${cacheDirectory}/loki/loki.db`
// try {
// await loki.start({
// saveFile: dbSaveFile,
// })
// } catch (e) {
// report.error(
// `Error starting DB. Perhaps try deleting ${path.dirname(dbSaveFile)}`
// )
// }
// activity.end()
// }

activity = report.activityTimer(`copy gatsby files`, {
parentSpan: bootstrapSpan,
Expand Down
6 changes: 3 additions & 3 deletions packages/gatsby/src/db/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ const { emitter } = redux
// Even if we are using loki, we still include redux in the list of
// dbs since it still has pages, config, etc.
const dbs = [redux]
if (process.env.GATSBY_DB_NODES === `loki`) {
dbs.push(require(`./loki`))
}
// if (process.env.GATSBY_DB_NODES === `loki`) {
// dbs.push(require(`./loki`))
// }

// calls `saveState()` on all DBs
let saveInProgress = false
Expand Down
112 changes: 56 additions & 56 deletions packages/gatsby/src/db/loki/__tests__/nodes-query-test.js
Original file line number Diff line number Diff line change
@@ -1,56 +1,56 @@
if (process.env.GATSBY_DB_NODES === `loki`) {
const _ = require(`lodash`)
const { GraphQLObjectType } = require(`graphql`)
const { store } = require(`../../../redux`)
const runQuery = require(`../nodes-query`)
const { getNodeTypeCollection } = require(`../nodes`)
const lokiDb = require(`../index`)

function makeNodes() {
return [
{
id: `1`,
internal: { type: `Test` },
children: [],
foo: `bar`,
},
]
}

async function runQueries(nodes, n) {
for (const node of nodes) {
store.dispatch({ type: `CREATE_NODE`, payload: node })
}
const gqlType = new GraphQLObjectType({
name: `Test`,
fields: {
foo: { type: `String` },
},
})
const queryArgs = { filter: { foo: { eq: `bar` } } }
const args = { gqlType, queryArgs, nodeTypeNames: [gqlType.name] }
return await Promise.all(_.map(new Array(n), () => runQuery(args)))
}

describe(`query indexing`, () => {
beforeEach(async () => {
await lokiDb.start()
store.dispatch({ type: `DELETE_CACHE` })
})
it(`does not create index when query run 1 time`, async () => {
await runQueries(makeNodes(), 1)
const coll = getNodeTypeCollection(`Test`)
expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(false)
})

it(`creates index when query run 5 times`, async () => {
await runQueries(makeNodes(), 5)
const coll = getNodeTypeCollection(`Test`)
expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(true)
})
})
} else {
it(`skipping loki nodes-query-test`, () => {
expect(true).toEqual(true)
})
}
// if (process.env.GATSBY_DB_NODES === `loki`) {
// const _ = require(`lodash`)
// const { GraphQLObjectType } = require(`graphql`)
// const { store } = require(`../../../redux`)
// const runQuery = require(`../nodes-query`)
// const { getNodeTypeCollection } = require(`../nodes`)
// const lokiDb = require(`../index`)
//
// function makeNodes() {
// return [
// {
// id: `1`,
// internal: { type: `Test` },
// children: [],
// foo: `bar`,
// },
// ]
// }
//
// async function runQueries(nodes, n) {
// for (const node of nodes) {
// store.dispatch({ type: `CREATE_NODE`, payload: node })
// }
// const gqlType = new GraphQLObjectType({
// name: `Test`,
// fields: {
// foo: { type: `String` },
// },
// })
// const queryArgs = { filter: { foo: { eq: `bar` } } }
// const args = { gqlType, queryArgs, nodeTypeNames: [gqlType.name] }
// return await Promise.all(_.map(new Array(n), () => runQuery(args)))
// }
//
// describe(`query indexing`, () => {
// beforeEach(async () => {
// await lokiDb.start()
// store.dispatch({ type: `DELETE_CACHE` })
// })
// it(`does not create index when query run 1 time`, async () => {
// await runQueries(makeNodes(), 1)
// const coll = getNodeTypeCollection(`Test`)
// expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(false)
// })
//
// it(`creates index when query run 5 times`, async () => {
// await runQueries(makeNodes(), 5)
// const coll = getNodeTypeCollection(`Test`)
// expect(coll.binaryIndices.hasOwnProperty(`foo`)).toEqual(true)
// })
// })
// } else {
it(`skipping loki nodes-query-test`, () => {
expect(true).toEqual(true)
})
// }
3 changes: 2 additions & 1 deletion packages/gatsby/src/db/nodes.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ interface NodeStore {
}) => any | undefined;
}

const backend = process.env.GATSBY_DB_NODES || `redux`
// const backend = process.env.GATSBY_DB_NODES || `redux`
const backend = `redux`
let nodesDb: NodeStore
let runQuery
switch (backend) {
Expand Down
90 changes: 45 additions & 45 deletions packages/gatsby/src/redux/__tests__/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -113,59 +113,59 @@ describe(`redux db`, () => {

// yuck - loki and redux will have different shape of redux state (nodes and nodesByType)
// Note: branched skips will keep snapshots with and without loki env var
if (process.env.GATSBY_DB_NODES === `loki`) {
it.skip(`should write redux cache to disk`, async () => {})
it(`should write loki cache to disk`, async () => {
expect(initialComponentsState).toEqual(new Map())

store.getState().nodes = getFakeNodes()

await saveState()
// if (process.env.GATSBY_DB_NODES === `loki`) {
// it.skip(`should write redux cache to disk`, async () => {})
// it(`should write loki cache to disk`, async () => {
// expect(initialComponentsState).toEqual(new Map())
//
// store.getState().nodes = getFakeNodes()
//
// await saveState()
//
// expect(writeToCache).toBeCalled()
//
// // reset state in memory
// store.dispatch({
// type: `DELETE_CACHE`,
// })
// // make sure store in memory is empty
// expect(store.getState().components).toEqual(initialComponentsState)
//
// // read data that was previously cached
// const data = readState()
//
// // make sure data was read and is not the same as our clean redux state
// expect(data.components).not.toEqual(initialComponentsState)
//
// expect(_.omit(data, [`nodes`, `nodesByType`])).toMatchSnapshot()
// })
// } else {
it.skip(`should write loki cache to disk`, async () => {})
it(`should write redux cache to disk`, async () => {
expect(initialComponentsState).toEqual(new Map())

expect(writeToCache).toBeCalled()
store.getState().nodes = getFakeNodes()

// reset state in memory
store.dispatch({
type: `DELETE_CACHE`,
})
// make sure store in memory is empty
expect(store.getState().components).toEqual(initialComponentsState)

// read data that was previously cached
const data = readState()
await saveState()

// make sure data was read and is not the same as our clean redux state
expect(data.components).not.toEqual(initialComponentsState)
expect(writeToCache).toBeCalled()

expect(_.omit(data, [`nodes`, `nodesByType`])).toMatchSnapshot()
// reset state in memory
store.dispatch({
type: `DELETE_CACHE`,
})
} else {
it.skip(`should write loki cache to disk`, async () => {})
it(`should write redux cache to disk`, async () => {
expect(initialComponentsState).toEqual(new Map())

store.getState().nodes = getFakeNodes()
// make sure store in memory is empty
expect(store.getState().components).toEqual(initialComponentsState)

await saveState()
// read data that was previously cached
const data = readState()

expect(writeToCache).toBeCalled()
// make sure data was read and is not the same as our clean redux state
expect(data.components).not.toEqual(initialComponentsState)

// reset state in memory
store.dispatch({
type: `DELETE_CACHE`,
})
// make sure store in memory is empty
expect(store.getState().components).toEqual(initialComponentsState)

// read data that was previously cached
const data = readState()

// make sure data was read and is not the same as our clean redux state
expect(data.components).not.toEqual(initialComponentsState)

expect(data).toMatchSnapshot()
})
}
expect(data).toMatchSnapshot()
})
// }

it(`should drop legacy file if exists`, async () => {
expect(initialComponentsState).toEqual(new Map())
Expand Down
3 changes: 2 additions & 1 deletion packages/gatsby/src/redux/persist.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ export function readFromCache(): ICachedReduxState {

const nodes: [string, IGatsbyNode][] = [].concat(...chunks)

if (!chunks.length && process.env.GATSBY_DB_NODES !== `loki`) {
// if (!chunks.length && process.env.GATSBY_DB_NODES !== `loki`) {
if (!chunks.length) {
report.info(
`Cache exists but contains no nodes. There should be at least some nodes available so it seems the cache was corrupted. Disregarding the cache and proceeding as if there was none.`
)
Expand Down
3 changes: 2 additions & 1 deletion packages/gatsby/src/redux/reducers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ import { webpackReducer } from "./webpack"
import { webpackCompilationHashReducer } from "./webpack-compilation-hash"
import { reducer as logReducer } from "gatsby-cli/lib/reporter/redux/reducer"

const backend = process.env.GATSBY_DB_NODES || `redux`
// const backend = process.env.GATSBY_DB_NODES || `redux`
const backend = `redux`

function getNodesReducer() {
let nodesReducer
Expand Down