diff --git a/README.md b/README.md index ca8d113c..7e7795ed 100644 --- a/README.md +++ b/README.md @@ -172,9 +172,10 @@ OPTIONS -o, --output=output [default: ./output] source package specific output - -r, --repo=repo [default: .] git repository location + -r, --repo=repo [default: ./] git repository + location - -s, --source=source [default: .] source folder focus + -s, --source=source [default: ./] source folder focus location related to --repo -t, --to=to [default: HEAD] commit sha to where diff --git a/__mocks__/child_process.js b/__mocks__/child_process.js index c8ae6926..ce0a24fc 100644 --- a/__mocks__/child_process.js +++ b/__mocks__/child_process.js @@ -17,7 +17,16 @@ childProcess.spawn.mockImplementation(() => { this.push(output.pop().join(EOL)) } this.push(null) - mock.emit(error ? 'error' : 'close') + mock.emit('close') + }, + }) + mock.stderr = new Readable({ + read() { + if (error) { + this.push('error') + } + this.push(null) + mock.emit('close') }, }) return mock diff --git a/__tests__/integration/delta.test.js b/__tests__/integration/delta.test.js index 75729a43..06057b68 100644 --- a/__tests__/integration/delta.test.js +++ b/__tests__/integration/delta.test.js @@ -3,10 +3,19 @@ const fs = require('fs') const child_process = require('child_process') const app = require('../../src/main') const { COMMIT_REF_TYPE, GIT_FOLDER } = require('../../src/utils/gitConstants') -const { outputFile } = require('fs-extra') +const { scanExtension } = require('../../src/utils/fsHelper') jest.mock('fs') jest.mock('fs-extra') jest.mock('child_process') +jest.mock('../../src/utils/fsHelper') +scanExtension.mockImplementation(() => ({ + [Symbol.asyncIterator]: () => ({ + next: () => ({ + value: '', + done: () => true, + }), + }), +})) const lines = [ 'D force-app/main/default/objects/Account/fields/deleted.field-meta.xml', @@ -51,7 +60,7 @@ describe(`test if the appli`, () => { expect( await app({ output: 'output', - repo: '.', + repo: './', source: '', to: 'test', from: 'main', @@ -61,9 +70,6 @@ describe(`test if the appli`, () => { }) test('catch internal warnings', async () => { - outputFile.mockImplementationOnce(() => - Promise.reject(new Error('Not writable')) - ) child_process.__setOutput([ lines, [], diff --git a/__tests__/integration/services.test.js b/__tests__/integration/services.test.js index 640bbdcb..799f4ba5 100644 --- a/__tests__/integration/services.test.js +++ b/__tests__/integration/services.test.js @@ -216,6 +216,16 @@ const testContext = [ ], ], ], + [ + InFolderHandler, + [ + [ + 'dashboards', + 'force-app/main/default/dashboards/folder/file.dashboard-meta.xml', + new Set(['folder/file']), + ], + ], + ], [ InBundleHandler, [ diff --git a/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.js b/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.js index 5cf44e51..47344ff9 100644 --- a/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.js +++ b/__tests__/unit/lib/post-processor/flowTranslationProcessor.test.js @@ -47,7 +47,7 @@ describe('FlowTranslationProcessor', () => { package: new Map(), destructiveChanges: new Map(), }, - config: { source: '.', output: 'output', generateDelta: true }, + config: { source: './', output: 'output', generateDelta: true }, } sut = new FlowTranslationProcessor(work) flap = trueAfter(1) diff --git a/__tests__/unit/lib/utils/childProcessUtils.test.js b/__tests__/unit/lib/utils/childProcessUtils.test.js index fc0f92d1..5b17c2a6 100644 --- a/__tests__/unit/lib/utils/childProcessUtils.test.js +++ b/__tests__/unit/lib/utils/childProcessUtils.test.js @@ -24,6 +24,12 @@ describe('childProcessUtils', () => { stream.emit('close') }, }) + stream.stderr = new Readable({ + read() { + this.push(null) + stream.emit('close') + }, + }) // Act const result = await getStreamContent(stream) @@ -42,7 +48,15 @@ describe('childProcessUtils', () => { stream.stdout = new Readable({ read() { this.push(null) - stream.emit('error') + stream.emit('close') + }, + }) + + stream.stderr = new Readable({ + read() { + this.push('error') + this.push(null) + stream.emit('close') }, }) @@ -52,7 +66,7 @@ describe('childProcessUtils', () => { // Assert } catch (error) { - expect(error).toBeDefined() + expect(error.message).toEqual('error') } }) }) diff --git a/__tests__/unit/lib/utils/fsHelper.test.js b/__tests__/unit/lib/utils/fsHelper.test.js index 15bd5cc8..2ec5be51 100644 --- a/__tests__/unit/lib/utils/fsHelper.test.js +++ b/__tests__/unit/lib/utils/fsHelper.test.js @@ -40,7 +40,7 @@ let work beforeEach(() => { work = { config: { - output: '', + output: '.', source: '', repo: '', generateDelta: false, @@ -105,11 +105,11 @@ describe('readPathFromGit', () => { describe('copyFile', () => { describe('when file is already copied', () => { it('should not copy file', async () => { - await copyFiles(work.config, 'source/file', 'output/file') + await copyFiles(work.config, 'source/file') jest.resetAllMocks() // Act - await copyFiles(work.config, 'source/file', 'output/file') + await copyFiles(work.config, 'source/file') // Assert expect(spawn).not.toBeCalled() @@ -119,14 +119,23 @@ describe('copyFile', () => { }) describe('when source location is empty', () => { - it('should not copy file', async () => { + it('should copy file', async () => { + // Arrange + treatPathSep.mockImplementationOnce(() => 'output/source/copyFile') + getStreamContent.mockImplementation(() => + Promise.resolve(Buffer.from('')) + ) + // Act - await copyFiles(work.config, 'source/doNotCopy', 'output/doNotCopy') + await copyFiles(work.config, 'source/doNotCopy') // Assert expect(spawn).toBeCalled() expect(getStreamContent).toBeCalled() - expect(outputFile).not.toBeCalled() + expect(outputFile).toBeCalledWith( + 'output/source/copyFile', + Buffer.from('') + ) }) }) @@ -143,7 +152,7 @@ describe('copyFile', () => { ) // Act - await copyFiles(work.config, 'source/copyDir', 'output/copyDir') + await copyFiles(work.config, 'source/copyDir') // Assert expect(spawn).toBeCalledTimes(2) @@ -157,14 +166,15 @@ describe('copyFile', () => { }) }) describe('when content is not a git location', () => { - it('should ignore the path', async () => { + it('should ignore this path', async () => { // Arrange + const sourcePath = 'source/warning' getStreamContent.mockImplementation(() => - Promise.resolve(Buffer.from('')) + Promise.reject(`fatal: path '${sourcePath}' does not exist in 'HEAD'`) ) // Act - await copyFiles(work.config, 'source/warning', 'output/warning') + await copyFiles(work.config, sourcePath) // Assert expect(spawn).toBeCalled() @@ -178,18 +188,18 @@ describe('copyFile', () => { getStreamContent.mockImplementation(() => Promise.resolve(Buffer.from('content')) ) - treatPathSep.mockImplementationOnce(() => 'output/copyFile') + treatPathSep.mockImplementationOnce(() => 'output/source/copyFile') }) it('should copy the file', async () => { // Act - await copyFiles(work.config, 'source/copyfile', 'output/copyfile') + await copyFiles(work.config, 'source/copyfile') // Assert expect(spawn).toBeCalled() expect(getStreamContent).toBeCalled() expect(outputFile).toBeCalledTimes(1) expect(outputFile).toHaveBeenCalledWith( - 'output/copyFile', + 'output/source/copyFile', Buffer.from('content') ) expect(treatPathSep).toBeCalledTimes(1) @@ -279,13 +289,12 @@ describe('scan', () => { Promise.reject(new Error('mock')) ) }) - it('should throw', async () => { + it('should not throw', async () => { // Arrange - expect.assertions(1) - const g = scan('dir', work) + const res = await scan('dir', work) // Assert - expect(g.next()).rejects.toEqual(new Error('mock')) + expect(res).toMatchObject({}) }) }) describe('when getStreamContent returns nothing', () => { @@ -498,7 +507,7 @@ describe('isSubDir', () => { // Assert expect(result).toBe(false) }) - it('throws when spawn throws', async () => { + it('do not throws when getStreamContent throws', async () => { expect.assertions(1) // Arrange getStreamContent.mockImplementationOnce(() => @@ -506,12 +515,10 @@ describe('isSubDir', () => { ) // Act - try { - await pathExists('path', work.config) - // Assert - } catch (error) { - expect(error.message).toBe('spawn issue') - } + const exist = await pathExists('path', work.config) + + // Assert + expect(exist).toBe(false) }) }) diff --git a/__tests__/unit/lib/utils/repoSetup.test.js b/__tests__/unit/lib/utils/repoSetup.test.js index 794b873d..72396c64 100644 --- a/__tests__/unit/lib/utils/repoSetup.test.js +++ b/__tests__/unit/lib/utils/repoSetup.test.js @@ -6,7 +6,7 @@ const child_process = require('child_process') describe(`test if repoSetup`, () => { describe('repoConfiguration', () => { test('can set core.quotepath to off', async () => { - const config = { repo: '.', from: 'HEAD~1' } + const config = { repo: './', from: 'HEAD~1' } child_process.__setOutput([['']]) const repoSetup = new RepoSetup(config) await repoSetup.repoConfiguration() @@ -17,7 +17,7 @@ describe(`test if repoSetup`, () => { describe('getCommitRefType', () => { test('returns "commit" when commitRef is a commit', async () => { const shaRef = 'HEAD' - const config = { repo: '.', to: shaRef } + const config = { repo: './', to: shaRef } child_process.__setOutput([['commit']]) const repoSetup = new RepoSetup(config) const commitRef = await repoSetup.getCommitRefType(shaRef) @@ -27,7 +27,7 @@ describe(`test if repoSetup`, () => { test('returns "tag" when commitRef is a tag', async () => { const shaRef = 'tag' - const config = { repo: '.', to: shaRef } + const config = { repo: './', to: shaRef } child_process.__setOutput([['tag']]) const repoSetup = new RepoSetup(config) const commitRef = await repoSetup.getCommitRefType(shaRef) @@ -37,7 +37,7 @@ describe(`test if repoSetup`, () => { test('return empty string when commitRef is a not a git sha', async () => { const shaRef = 'wrong sha' - const config = { repo: '.', to: shaRef } + const config = { repo: './', to: shaRef } child_process.__setOutput([['']]) const repoSetup = new RepoSetup(config) const commitRef = await repoSetup.getCommitRefType(shaRef) diff --git a/package.json b/package.json index 6f18298c..9331ab17 100644 --- a/package.json +++ b/package.json @@ -100,7 +100,7 @@ "shx": "^0.3.4", "sinon": "^15.0.3", "ts-node": "^10.9.1", - "typescript": "^5.0.2", + "typescript": "^5.0.3", "yarn-upgrade-all": "^0.7.2" }, "oclif": { diff --git a/src/utils/childProcessUtils.js b/src/utils/childProcessUtils.js index 7707ad1d..8bcc00cd 100644 --- a/src/utils/childProcessUtils.js +++ b/src/utils/childProcessUtils.js @@ -38,6 +38,13 @@ const getStreamContent = async stream => { for await (const chunk of stream.stdout) { content.push(chunk) } + const error = [] + for await (const chunk of stream.stderr) { + error.push(chunk) + } + if (error.length > 0) { + throw new Error(error.join('')) + } return Buffer.concat(content) } diff --git a/src/utils/cliHelper.js b/src/utils/cliHelper.js index b501b39c..2d62e8b4 100644 --- a/src/utils/cliHelper.js +++ b/src/utils/cliHelper.js @@ -193,8 +193,7 @@ class CLIHelper { static TO_DEFAULT_VALUE = 'HEAD' static OUTPUT_DEFAULT_VALUE = './output' - static SOURCE_DEFAULT_VALUE = '.' - static REPO_DEFAULT_VALUE = '.' - static IGNORE_DEFAULT_VALUE = '.' + static SOURCE_DEFAULT_VALUE = './' + static REPO_DEFAULT_VALUE = './' } module.exports = CLIHelper diff --git a/src/utils/fsHelper.js b/src/utils/fsHelper.js index 4c7e610d..6def0129 100644 --- a/src/utils/fsHelper.js +++ b/src/utils/fsHelper.js @@ -20,25 +20,26 @@ const copyFiles = async (config, src) => { if (copiedFiles.has(src)) return copiedFiles.add(src) - const bufferData = await readPathFromGitAsBuffer(src, config) - const utf8Data = bufferData?.toString(UTF8_ENCODING) - if (!utf8Data) { - return - } - - if (utf8Data.startsWith(FOLDER)) { - const [header, , ...files] = utf8Data.split(EOLRegex) - const folder = header.split(':')[1] - for (const file of files) { - const fileSrc = join(folder, file) - - await copyFiles(config, fileSrc) + try { + const bufferData = await readPathFromGitAsBuffer(src, config) + const utf8Data = bufferData?.toString(UTF8_ENCODING) + + if (utf8Data.startsWith(FOLDER)) { + const [header, , ...files] = utf8Data.split(EOLRegex) + const folder = header.split(':')[1] + for (const file of files) { + const fileSrc = join(folder, file) + + await copyFiles(config, fileSrc) + } + } else { + const dst = join(config.output, treatPathSep(src)) + // Use Buffer to output the file content + // Let fs implementation detect the encoding ("utf8" or "binary") + await outputFile(dst, bufferData) } - } else { - const dst = join(config.output, treatPathSep(src)) - // Use Buffer to output the file content - // Let fs implementation detect the encoding ("utf8" or "binary") - await outputFile(dst, bufferData) + } catch { + /* empty */ } } @@ -54,8 +55,13 @@ const readPathFromGitAsBuffer = async (path, { repo, to }) => { } const readPathFromGit = async (path, config) => { - const bufferData = await readPathFromGitAsBuffer(path, config) - const utf8Data = bufferData.toString(UTF8_ENCODING) + let utf8Data = '' + try { + const bufferData = await readPathFromGitAsBuffer(path, config) + utf8Data = bufferData.toString(UTF8_ENCODING) + } catch { + /* empty */ + } return utf8Data } diff --git a/yarn.lock b/yarn.lock index 6048d986..f825d2e6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7022,11 +7022,16 @@ typescript@^4.5.4: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -"typescript@^4.6.4 || ^5.0.0", typescript@^5.0.2: +"typescript@^4.6.4 || ^5.0.0": version "5.0.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.0.2.tgz#891e1a90c5189d8506af64b9ef929fca99ba1ee5" integrity sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw== +typescript@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.0.3.tgz#fe976f0c826a88d0a382007681cbb2da44afdedf" + integrity sha512-xv8mOEDnigb/tN9PSMTwSEqAnUvkoXMQlicOb0IUVDBSQCgBSaAAROUZYy2IcUy5qU6XajK5jjjO7TMWqBTKZA== + unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e"