From 6cec77f10c50888c058cc6d7ec51618220f96d6f Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Wed, 13 Dec 2023 16:19:47 +0100 Subject: [PATCH 1/8] wip --- node-src/lib/upload.ts | 6 ++++-- node-src/lib/utils.ts | 2 -- node-src/tasks/upload.test.ts | 8 ++++---- node-src/tasks/upload.ts | 5 +++-- node-src/types.ts | 1 + node-src/ui/messages/errors/fatalError.stories.ts | 3 ++- node-src/ui/messages/info/storybookPublished.stories.ts | 1 + 7 files changed, 15 insertions(+), 11 deletions(-) diff --git a/node-src/lib/upload.ts b/node-src/lib/upload.ts index f5ad83571..e9d354c93 100644 --- a/node-src/lib/upload.ts +++ b/node-src/lib/upload.ts @@ -84,7 +84,8 @@ export async function uploadBuild( UploadBuildMutation, { buildId: ctx.announcedBuild.id, - files: files.map(({ contentLength, targetPath }) => ({ + files: files.map(({ contentHash, contentLength, targetPath }) => ({ + contentHash, contentLength, filePath: targetPath, })), @@ -178,7 +179,8 @@ export async function uploadMetadata(ctx: Context, files: FileDesc[]) { UploadMetadataMutation, { buildId: ctx.announcedBuild.id, - files: files.map(({ contentLength, targetPath }) => ({ + files: files.map(({ contentHash, contentLength, targetPath }) => ({ + contentHash, contentLength, filePath: targetPath, })), diff --git a/node-src/lib/utils.ts b/node-src/lib/utils.ts index 7b36c0037..dd783734f 100644 --- a/node-src/lib/utils.ts +++ b/node-src/lib/utils.ts @@ -34,8 +34,6 @@ export const activityBar = (n = 0, size = 20) => { return `[${track.join('')}]`; }; -export const baseStorybookUrl = (url: string) => url?.replace(/\/iframe\.html$/, ''); - export const rewriteErrorMessage = (err: Error, message: string) => { try { // DOMException doesn't allow setting the message, so this might fail diff --git a/node-src/tasks/upload.test.ts b/node-src/tasks/upload.test.ts index 95b34985a..4bf979810 100644 --- a/node-src/tasks/upload.test.ts +++ b/node-src/tasks/upload.test.ts @@ -306,8 +306,8 @@ describe('uploadStorybook', () => { expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), { buildId: '1', files: [ - { contentLength: 42, filePath: 'iframe.html' }, - { contentLength: 42, filePath: 'index.html' }, + { contentHash: undefined, contentLength: 42, filePath: 'iframe.html' }, + { contentHash: undefined, contentLength: 42, filePath: 'index.html' }, ], }); expect(http.fetch).toHaveBeenCalledWith( @@ -468,8 +468,8 @@ describe('uploadStorybook', () => { expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), { buildId: '1', files: [ - { contentLength: 42, filePath: 'iframe.html' }, - { contentLength: 42, filePath: 'index.html' }, + { contentHash: undefined, contentLength: 42, filePath: 'iframe.html' }, + { contentHash: undefined, contentLength: 42, filePath: 'index.html' }, ], zip: true, }); diff --git a/node-src/tasks/upload.ts b/node-src/tasks/upload.ts index 93903c3be..cacb17729 100644 --- a/node-src/tasks/upload.ts +++ b/node-src/tasks/upload.ts @@ -22,7 +22,7 @@ import { success, hashing, } from '../ui/tasks/upload'; -import { Context, Task } from '../types'; +import { Context, FileDesc, Task } from '../types'; import { readStatsFile } from './read-stats-file'; import bailFile from '../ui/messages/warnings/bailFile'; import { findChangedPackageFiles } from '../lib/findChangedPackageFiles'; @@ -205,7 +205,8 @@ export const uploadStorybook = async (ctx: Context, task: Task) => { transitionTo(preparing)(ctx, task); const files = ctx.fileInfo.paths - .map((path) => ({ + .map((path) => ({ + ...(ctx.fileInfo.hashes && { contentHash: ctx.fileInfo.hashes[path] }), contentLength: ctx.fileInfo.lengths.find(({ knownAs }) => knownAs === path).contentLength, localPath: join(ctx.sourceDir, path), targetPath: path, diff --git a/node-src/types.ts b/node-src/types.ts index 5014f3205..4eee98335 100644 --- a/node-src/types.ts +++ b/node-src/types.ts @@ -354,6 +354,7 @@ export interface Stats { } export interface FileDesc { + contentHash?: string; contentLength: number; localPath: string; targetPath: string; diff --git a/node-src/ui/messages/errors/fatalError.stories.ts b/node-src/ui/messages/errors/fatalError.stories.ts index f77db2c3b..77a8a4f6a 100644 --- a/node-src/ui/messages/errors/fatalError.stories.ts +++ b/node-src/ui/messages/errors/fatalError.stories.ts @@ -47,9 +47,10 @@ const context = { build: { id: '5ec5069ae0d35e0022b6a9cc', number: 42, + storybookUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/', webUrl: 'https://www.chromatic.com/build?appId=5d67dc0374b2e300209c41e7&number=1400', }, - storybookUrl: 'https://pfkaemtlit.tunnel.chromaticqa.com/', + storybookUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/', }; const stack = `Error: Oh no! diff --git a/node-src/ui/messages/info/storybookPublished.stories.ts b/node-src/ui/messages/info/storybookPublished.stories.ts index 9dbee8d3a..72826e373 100644 --- a/node-src/ui/messages/info/storybookPublished.stories.ts +++ b/node-src/ui/messages/info/storybookPublished.stories.ts @@ -19,6 +19,7 @@ export const StorybookPrepared = () => errorCount: undefined, componentCount: 5, specCount: 8, + storybookUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/', }, storybookUrl: 'https://5d67dc0374b2e300209c41e7-pfkaemtlit.chromatic.com/', } as any); From 053e7ebd4576d3da4946c770ca17a04823d1e9b4 Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Fri, 15 Dec 2023 13:25:37 +0100 Subject: [PATCH 2/8] Add test for deduping --- node-src/tasks/upload.test.ts | 66 +++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/node-src/tasks/upload.test.ts b/node-src/tasks/upload.test.ts index 713f31012..ae168a5e4 100644 --- a/node-src/tasks/upload.test.ts +++ b/node-src/tasks/upload.test.ts @@ -412,6 +412,72 @@ describe('uploadStorybook', () => { }); }); + describe('with file hashes', () => { + it('retrieves file upload locations and uploads only returned targets', async () => { + const client = { runQuery: vi.fn() }; + client.runQuery.mockReturnValue({ + uploadBuild: { + info: { + targets: [ + { + contentType: 'text/html', + filePath: 'index.html', + formAction: 'https://s3.amazonaws.com/presigned?index.html', + formFields: {}, + }, + ], + }, + userErrors: [], + }, + }); + + createReadStreamMock.mockReturnValue({ pipe: vi.fn() } as any); + http.fetch.mockReturnValue({ ok: true }); + progress.mockReturnValue({ on: vi.fn() } as any); + + const fileInfo = { + lengths: [ + { knownAs: 'iframe.html', contentLength: 42 }, + { knownAs: 'index.html', contentLength: 42 }, + ], + hashes: { 'iframe.html': 'iframe', 'index.html': 'index' }, + paths: ['iframe.html', 'index.html'], + total: 84, + }; + const ctx = { + client, + env, + log, + http, + sourceDir: '/static/', + options: {}, + fileInfo, + announcedBuild: { id: '1' }, + } as any; + await uploadStorybook(ctx, {} as any); + + expect(client.runQuery).toHaveBeenCalledWith(expect.stringMatching(/UploadBuildMutation/), { + buildId: '1', + files: [ + { contentHash: 'iframe', contentLength: 42, filePath: 'iframe.html' }, + { contentHash: 'index', contentLength: 42, filePath: 'index.html' }, + ], + }); + expect(http.fetch).not.toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?iframe.html', + expect.anything(), + expect.anything() + ); + expect(http.fetch).toHaveBeenCalledWith( + 'https://s3.amazonaws.com/presigned?index.html', + expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), + expect.objectContaining({ retries: 0 }) + ); + expect(ctx.uploadedBytes).toBe(42); + expect(ctx.uploadedFiles).toBe(1); + }); + }); + describe('with zip', () => { it('retrieves the upload location, adds the files to an archive and uploads it', async () => { const client = { runQuery: vi.fn() }; From 55dbdb262b7e49e502f2a3ec4a3d14bd69266191 Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Sun, 17 Dec 2023 19:33:51 +0100 Subject: [PATCH 3/8] Show number of skipped files --- node-src/ui/tasks/upload.stories.ts | 10 ++++++++++ node-src/ui/tasks/upload.ts | 7 ++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/node-src/ui/tasks/upload.stories.ts b/node-src/ui/tasks/upload.stories.ts index cf66e6368..6547bb9d3 100644 --- a/node-src/ui/tasks/upload.stories.ts +++ b/node-src/ui/tasks/upload.stories.ts @@ -63,6 +63,16 @@ export const Success = () => startedAt: -54321, uploadedBytes: 1234567, uploadedFiles: 42, + fileInfo: { paths: { length: 42 } }, + } as any); + +export const SuccessSkippedFiles = () => + success({ + now: 0, + startedAt: -54321, + uploadedBytes: 1234567, + uploadedFiles: 42, + fileInfo: { paths: { length: 100 } }, } as any); export const SuccessNoFiles = () => success({} as any); diff --git a/node-src/ui/tasks/upload.ts b/node-src/ui/tasks/upload.ts index c9f169e27..8fc016689 100644 --- a/node-src/ui/tasks/upload.ts +++ b/node-src/ui/tasks/upload.ts @@ -102,10 +102,15 @@ export const uploading = ({ percentage }: { percentage: number }) => ({ export const success = (ctx: Context) => { const files = pluralize('file', ctx.uploadedFiles, true); const bytes = filesize(ctx.uploadedBytes || 0); + const skipped = + ctx.fileInfo.paths.length > ctx.uploadedFiles + ? `, skipped ${pluralize('file', ctx.fileInfo.paths.length - ctx.uploadedFiles, true)}` + : ''; + return { status: 'success', title: ctx.uploadedBytes ? `Publish complete in ${getDuration(ctx)}` : `Publish complete`, - output: ctx.uploadedBytes ? `Uploaded ${files} (${bytes})` : 'No new files to upload', + output: ctx.uploadedBytes ? `Uploaded ${files} (${bytes})${skipped}` : 'No new files to upload', }; }; From b80c2e0b2f71b6c4b47f3b13e8d56793c7027cac Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Sun, 17 Dec 2023 21:33:23 +0100 Subject: [PATCH 4/8] Fix story data --- node-src/ui/tasks/upload.stories.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/node-src/ui/tasks/upload.stories.ts b/node-src/ui/tasks/upload.stories.ts index 6547bb9d3..b6264d9a2 100644 --- a/node-src/ui/tasks/upload.stories.ts +++ b/node-src/ui/tasks/upload.stories.ts @@ -75,6 +75,11 @@ export const SuccessSkippedFiles = () => fileInfo: { paths: { length: 100 } }, } as any); -export const SuccessNoFiles = () => success({} as any); +export const SuccessNoFiles = () => + success({ + uploadedBytes: 0, + uploadedFiles: 0, + fileInfo: { paths: { length: 100 } }, + } as any); export const Failed = () => failed({ path: 'main.9e3e453142da82719bf4.bundle.js' }); From 4ff08e3d94d81b9c577b16dda4c729a4f2e120c1 Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Mon, 18 Dec 2023 14:48:50 +0100 Subject: [PATCH 5/8] Replace form-data with formdata-node and fix progress tracking --- node-src/lib/FileReaderBlob.ts | 20 ++++++++++++++++ node-src/lib/uploadFiles.ts | 15 ++++-------- node-src/lib/uploadZip.ts | 15 ++++-------- node-src/tasks/upload.test.ts | 42 +++++++++++++--------------------- package.json | 2 +- yarn.lock | 14 ++++-------- 6 files changed, 52 insertions(+), 56 deletions(-) create mode 100644 node-src/lib/FileReaderBlob.ts diff --git a/node-src/lib/FileReaderBlob.ts b/node-src/lib/FileReaderBlob.ts new file mode 100644 index 000000000..85129ae78 --- /dev/null +++ b/node-src/lib/FileReaderBlob.ts @@ -0,0 +1,20 @@ +import { ReadStream, createReadStream } from 'fs'; + +export class FileReaderBlob { + readStream: ReadStream; + size: number; + + constructor(filePath: string, contentLength: number, onProgress: (delta: number) => void) { + this.size = contentLength; + this.readStream = createReadStream(filePath); + this.readStream.on('data', (chunk: Buffer | string) => onProgress(chunk.length)); + } + + stream() { + return this.readStream; + } + + get [Symbol.toStringTag]() { + return 'Blob'; + } +} diff --git a/node-src/lib/uploadFiles.ts b/node-src/lib/uploadFiles.ts index b0e41e465..e30badf19 100644 --- a/node-src/lib/uploadFiles.ts +++ b/node-src/lib/uploadFiles.ts @@ -1,10 +1,9 @@ import retry from 'async-retry'; import { filesize } from 'filesize'; -import FormData from 'form-data'; -import { createReadStream } from 'fs'; +import { FormData } from 'formdata-node'; import pLimit from 'p-limit'; -import progress from 'progress-stream'; import { Context, FileDesc, TargetInfo } from '../types'; +import { FileReaderBlob } from './FileReaderBlob'; export async function uploadFiles( ctx: Context, @@ -28,19 +27,15 @@ export async function uploadFiles( return bail(signal.reason || new Error('Aborted')); } - const progressStream = progress(); - - progressStream.on('progress', ({ delta }) => { - fileProgress += delta; // We upload multiple files so we only care about the delta + const blob = new FileReaderBlob(localPath, contentLength, (delta) => { + fileProgress += delta; totalProgress += delta; onProgress?.(totalProgress); }); const formData = new FormData(); Object.entries(formFields).forEach(([k, v]) => formData.append(k, v)); - formData.append('file', createReadStream(localPath).pipe(progressStream), { - knownLength: contentLength, - }); + formData.append('file', blob); const res = await ctx.http.fetch( formAction, diff --git a/node-src/lib/uploadZip.ts b/node-src/lib/uploadZip.ts index 9aff4b712..a8778d139 100644 --- a/node-src/lib/uploadZip.ts +++ b/node-src/lib/uploadZip.ts @@ -1,10 +1,9 @@ import retry from 'async-retry'; import { filesize } from 'filesize'; -import FormData from 'form-data'; -import { createReadStream } from 'fs'; +import { FormData } from 'formdata-node'; import { Response } from 'node-fetch'; -import progress from 'progress-stream'; import { Context, TargetInfo } from '../types'; +import { FileReaderBlob } from './FileReaderBlob'; // A sentinel file is created by a zip-unpack lambda within the Chromatic infrastructure once the // uploaded zip is fully extracted. The contents of this file will consist of 'OK' if the process @@ -28,18 +27,14 @@ export async function uploadZip( return bail(signal.reason || new Error('Aborted')); } - const progressStream = progress(); - - progressStream.on('progress', ({ delta }) => { + const blob = new FileReaderBlob(localPath, contentLength, (delta) => { totalProgress += delta; - onProgress(totalProgress); + onProgress?.(totalProgress); }); const formData = new FormData(); Object.entries(formFields).forEach(([k, v]) => formData.append(k, v)); - formData.append('file', createReadStream(localPath).pipe(progressStream), { - knownLength: contentLength, - }); + formData.append('file', blob); const res = await ctx.http.fetch( formAction, diff --git a/node-src/tasks/upload.test.ts b/node-src/tasks/upload.test.ts index ae168a5e4..792ddd66f 100644 --- a/node-src/tasks/upload.test.ts +++ b/node-src/tasks/upload.test.ts @@ -1,6 +1,5 @@ -import FormData from 'form-data'; +import { FormData } from 'formdata-node'; import { createReadStream, readdirSync, readFileSync, statSync } from 'fs'; -import progressStream from 'progress-stream'; import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { default as compress } from '../lib/compress'; @@ -11,13 +10,21 @@ import { calculateFileHashes, validateFiles, traceChangedFiles, uploadStorybook vi.mock('form-data'); vi.mock('fs'); -vi.mock('progress-stream'); vi.mock('../lib/compress'); vi.mock('../lib/getDependentStoryFiles'); vi.mock('../lib/findChangedDependencies'); vi.mock('../lib/findChangedPackageFiles'); vi.mock('./read-stats-file'); +vi.mock('../lib/FileReaderBlob', () => ({ + FileReaderBlob: class { + constructor(path: string, length: number, onProgress: (delta: number) => void) { + onProgress(length / 2); + onProgress(length / 2); + } + }, +})); + vi.mock('../lib/getFileHashes', () => ({ getFileHashes: (files: string[]) => Promise.resolve(Object.fromEntries(files.map((f) => [f, 'hash']))), @@ -31,7 +38,6 @@ const createReadStreamMock = vi.mocked(createReadStream); const readdirSyncMock = vi.mocked(readdirSync); const readFileSyncMock = vi.mocked(readFileSync); const statSyncMock = vi.mocked(statSync); -const progress = vi.mocked(progressStream); const env = { CHROMATIC_RETRIES: 2, CHROMATIC_OUTPUT_INTERVAL: 0 }; const log = { info: vi.fn(), warn: vi.fn(), debug: vi.fn() }; @@ -286,7 +292,6 @@ describe('uploadStorybook', () => { createReadStreamMock.mockReturnValue({ pipe: vi.fn() } as any); http.fetch.mockReturnValue({ ok: true }); - progress.mockReturnValue({ on: vi.fn() } as any); const fileInfo = { lengths: [ @@ -318,18 +323,18 @@ describe('uploadStorybook', () => { expect(http.fetch).toHaveBeenCalledWith( 'https://s3.amazonaws.com/presigned?iframe.html', expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), - expect.objectContaining({ retries: 0 }) + { retries: 0 } ); expect(http.fetch).toHaveBeenCalledWith( 'https://s3.amazonaws.com/presigned?index.html', expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), - expect.objectContaining({ retries: 0 }) + { retries: 0 } ); expect(ctx.uploadedBytes).toBe(84); expect(ctx.uploadedFiles).toBe(2); }); - it.skip('calls experimental_onTaskProgress with progress', async () => { + it('calls experimental_onTaskProgress with progress', async () => { const client = { runQuery: vi.fn() }; client.runQuery.mockReturnValue({ uploadBuild: { @@ -354,20 +359,7 @@ describe('uploadStorybook', () => { }); createReadStreamMock.mockReturnValue({ pipe: vi.fn((x) => x) } as any); - progress.mockImplementation((() => { - let progressCb; - return { - on: vi.fn((name, cb) => { - progressCb = cb; - }), - sendProgress: (delta: number) => progressCb({ delta }), - }; - }) as any); - http.fetch.mockReset().mockImplementation(async (url, { body }) => { - // How to update progress? - console.log(body); - return { ok: true }; - }); + http.fetch.mockReturnValue({ ok: true }); const fileInfo = { lengths: [ @@ -433,7 +425,6 @@ describe('uploadStorybook', () => { createReadStreamMock.mockReturnValue({ pipe: vi.fn() } as any); http.fetch.mockReturnValue({ ok: true }); - progress.mockReturnValue({ on: vi.fn() } as any); const fileInfo = { lengths: [ @@ -471,7 +462,7 @@ describe('uploadStorybook', () => { expect(http.fetch).toHaveBeenCalledWith( 'https://s3.amazonaws.com/presigned?index.html', expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), - expect.objectContaining({ retries: 0 }) + { retries: 0 } ); expect(ctx.uploadedBytes).toBe(42); expect(ctx.uploadedFiles).toBe(1); @@ -513,7 +504,6 @@ describe('uploadStorybook', () => { makeZipFile.mockReturnValue(Promise.resolve({ path: 'storybook.zip', size: 80 })); createReadStreamMock.mockReturnValue({ pipe: vi.fn() } as any); http.fetch.mockReturnValue({ ok: true, text: () => Promise.resolve('OK') }); - progress.mockReturnValue({ on: vi.fn() } as any); const fileInfo = { lengths: [ @@ -546,7 +536,7 @@ describe('uploadStorybook', () => { expect(http.fetch).toHaveBeenCalledWith( 'https://s3.amazonaws.com/presigned?storybook.zip', expect.objectContaining({ body: expect.any(FormData), method: 'POST' }), - expect.objectContaining({ retries: 0 }) + { retries: 0 } ); expect(http.fetch).not.toHaveBeenCalledWith( 'https://s3.amazonaws.com/presigned?iframe.html', diff --git a/package.json b/package.json index cc1902607..568b1e9f2 100644 --- a/package.json +++ b/package.json @@ -149,7 +149,7 @@ "execa": "^7.2.0", "fake-tag": "^2.0.0", "filesize": "^10.1.0", - "form-data": "^4.0.0", + "formdata-node": "^6.0.3", "fs-extra": "^10.0.0", "https-proxy-agent": "^7.0.2", "husky": "^7.0.0", diff --git a/yarn.lock b/yarn.lock index 54b6b1dea..d8bfa55d8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7893,20 +7893,16 @@ form-data@^3.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" -form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - format@^0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" integrity sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs= +formdata-node@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-6.0.3.tgz#48f8e2206ae2befded82af621ef015f08168dc6d" + integrity sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg== + formdata-polyfill@^4.0.10: version "4.0.10" resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz#24807c31c9d402e002ab3d8c720144ceb8848423" From 04988850062de45d8d4074b190b7aedd07fe0291 Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Mon, 18 Dec 2023 14:57:40 +0100 Subject: [PATCH 6/8] Drop unused dependencies --- package.json | 2 -- yarn.lock | 22 +--------------------- 2 files changed, 1 insertion(+), 23 deletions(-) diff --git a/package.json b/package.json index 568b1e9f2..a3d1327a6 100644 --- a/package.json +++ b/package.json @@ -124,7 +124,6 @@ "@types/listr": "^0.14.4", "@types/node": "18.x", "@types/picomatch": "^2.3.0", - "@types/progress-stream": "^2.0.2", "@types/semver": "^7.3.9", "@typescript-eslint/eslint-plugin": "^6.8.0", "@typescript-eslint/parser": "^6.8.0", @@ -170,7 +169,6 @@ "pkg-up": "^3.1.0", "pluralize": "^8.0.0", "prettier": "^2.3.2", - "progress-stream": "^2.0.0", "prop-types": "^15.7.2", "react": "^17.0.2", "react-dom": "^17.0.2", diff --git a/yarn.lock b/yarn.lock index d8bfa55d8..d2dd8ecfe 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3488,13 +3488,6 @@ resolved "https://registry.yarnpkg.com/@types/pretty-hrtime/-/pretty-hrtime-1.0.1.tgz#72a26101dc567b0d68fd956cf42314556e42d601" integrity sha512-VjID5MJb1eGKthz2qUerWT8+R4b9N+CHvGCzg9fn4kWZgaF9AhdYikQio3R7wV8YY1NsQKPaCwKz1Yff+aHNUQ== -"@types/progress-stream@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/progress-stream/-/progress-stream-2.0.2.tgz#443afb2c29cfde0e6240805364b7715bc5bd03a8" - integrity sha512-u9N40mYX/Nx/Pmt847+G2N72s5QL2jwgXrVKCIcxgOdMBdIzY+e/m3m1gQBNPmgvQQBO79EisLAcVJ/p0qKuvA== - dependencies: - "@types/node" "*" - "@types/prop-types@*": version "15.7.4" resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.4.tgz#fcf7205c25dff795ee79af1e30da2c9790808f11" @@ -12198,14 +12191,6 @@ process@^0.11.10: resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= -progress-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/progress-stream/-/progress-stream-2.0.0.tgz#fac63a0b3d11deacbb0969abcc93b214bce19ed5" - integrity sha1-+sY6Cz0R3qy7CWmrzJOyFLzhntU= - dependencies: - speedometer "~1.0.0" - through2 "~2.0.3" - progress@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" @@ -13860,11 +13845,6 @@ spdx-license-ids@^3.0.0: resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz#0d9becccde7003d6c658d487dd48a32f0bf3014b" integrity sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA== -speedometer@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/speedometer/-/speedometer-1.0.0.tgz#cd671cb06752c22bca3370e2f334440be4fc62e2" - integrity sha1-zWccsGdSwivKM3Di8zREC+T8YuI= - split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" @@ -14519,7 +14499,7 @@ thenify-all@^1.0.0: dependencies: any-promise "^1.0.0" -through2@^2.0.0, through2@~2.0.3: +through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== From 96834e655e6fe0aed0b660de9dc38132fc085711 Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Mon, 18 Dec 2023 14:57:49 +0100 Subject: [PATCH 7/8] Fix tests --- node-src/index.test.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/node-src/index.test.ts b/node-src/index.test.ts index 6f29249a7..072a9c5ef 100644 --- a/node-src/index.test.ts +++ b/node-src/index.test.ts @@ -469,6 +469,7 @@ it('calls out to npm build script passed and uploads files', async () => { expect.any(Object), [ { + contentHash: 'hash', contentLength: 42, contentType: 'text/html', fileKey: '', @@ -479,6 +480,7 @@ it('calls out to npm build script passed and uploads files', async () => { targetPath: 'iframe.html', }, { + contentHash: 'hash', contentLength: 42, contentType: 'text/html', fileKey: '', @@ -502,6 +504,7 @@ it('skips building and uploads directly with storybook-build-dir', async () => { expect.any(Object), [ { + contentHash: 'hash', contentLength: 42, contentType: 'text/html', fileKey: '', @@ -512,6 +515,7 @@ it('skips building and uploads directly with storybook-build-dir', async () => { targetPath: 'iframe.html', }, { + contentHash: 'hash', contentLength: 42, contentType: 'text/html', fileKey: '', From c99cfb45f18a6028c543f6372de91b5962dceeff Mon Sep 17 00:00:00 2001 From: Gert Hengeveld Date: Tue, 19 Dec 2023 11:40:31 +0100 Subject: [PATCH 8/8] Retrieve sentinelUrls from uploadBuild and wait for all of them before finishing upload task --- node-src/lib/upload.ts | 20 +++++++------ node-src/lib/uploadZip.ts | 45 +---------------------------- node-src/lib/waitForSentinel.ts | 45 +++++++++++++++++++++++++++++ node-src/tasks/upload.ts | 13 ++++++++- node-src/types.ts | 1 + node-src/ui/tasks/upload.stories.ts | 3 ++ node-src/ui/tasks/upload.ts | 6 ++++ 7 files changed, 79 insertions(+), 54 deletions(-) create mode 100644 node-src/lib/waitForSentinel.ts diff --git a/node-src/lib/upload.ts b/node-src/lib/upload.ts index e9d354c93..1e43e3260 100644 --- a/node-src/lib/upload.ts +++ b/node-src/lib/upload.ts @@ -1,7 +1,7 @@ import makeZipFile from './compress'; -import { Context, FileDesc, TargetInfo } from '../types'; -import { uploadZip, waitForUnpack } from './uploadZip'; +import { uploadZip } from './uploadZip'; import { uploadFiles } from './uploadFiles'; +import { Context, FileDesc, TargetInfo } from '../types'; import { maxFileCountExceeded } from '../ui/messages/errors/maxFileCountExceeded'; import { maxFileSizeExceeded } from '../ui/messages/errors/maxFileSizeExceeded'; @@ -9,6 +9,7 @@ const UploadBuildMutation = ` mutation UploadBuildMutation($buildId: ObjID!, $files: [FileUploadInput!]!, $zip: Boolean) { uploadBuild(buildId: $buildId, files: $files, zip: $zip) { info { + sentinelUrls targets { contentType fileKey @@ -22,7 +23,6 @@ const UploadBuildMutation = ` filePath formAction formFields - sentinelUrl } } userErrors { @@ -46,8 +46,9 @@ const UploadBuildMutation = ` interface UploadBuildMutationResult { uploadBuild: { info?: { + sentinelUrls: string[]; targets: TargetInfo[]; - zipTarget?: TargetInfo & { sentinelUrl: string }; + zipTarget?: TargetInfo; }; userErrors: ( | { @@ -76,7 +77,7 @@ export async function uploadBuild( options: { onStart?: () => void; onProgress?: (progress: number, total: number) => void; - onComplete?: (uploadedBytes: number, uploadedFiles: number) => void; + onComplete?: (uploadedBytes: number, uploadedFiles: number, sentinelUrls: string[]) => void; onError?: (error: Error, path?: string) => void; } = {} ) { @@ -106,6 +107,8 @@ export async function uploadBuild( return options.onError?.(new Error('Upload rejected due to user error')); } + const { sentinelUrls } = uploadBuild.info; + const targets = uploadBuild.info.targets.map((target) => { const file = files.find((f) => f.targetPath === target.filePath); return { ...file, ...target }; @@ -113,7 +116,7 @@ export async function uploadBuild( if (!targets.length) { ctx.log.debug('No new files to upload, continuing'); - return options.onComplete?.(0, 0); + return options.onComplete?.(0, 0, sentinelUrls); } options.onStart?.(); @@ -127,8 +130,7 @@ export async function uploadBuild( const target = { ...uploadBuild.info.zipTarget, contentLength: size, localPath: path }; await uploadZip(ctx, target, (progress) => options.onProgress?.(progress, size)); - await waitForUnpack(ctx, target.sentinelUrl); - return options.onComplete?.(size, targets.length); + return options.onComplete?.(size, targets.length, sentinelUrls); } catch (err) { ctx.log.debug({ err }, 'Error uploading zip, falling back to uploading individual files'); } @@ -136,7 +138,7 @@ export async function uploadBuild( try { await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total)); - return options.onComplete?.(total, targets.length); + return options.onComplete?.(total, targets.length, sentinelUrls); } catch (e) { return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message); } diff --git a/node-src/lib/uploadZip.ts b/node-src/lib/uploadZip.ts index a8778d139..be1527238 100644 --- a/node-src/lib/uploadZip.ts +++ b/node-src/lib/uploadZip.ts @@ -1,18 +1,12 @@ import retry from 'async-retry'; import { filesize } from 'filesize'; import { FormData } from 'formdata-node'; -import { Response } from 'node-fetch'; import { Context, TargetInfo } from '../types'; import { FileReaderBlob } from './FileReaderBlob'; -// A sentinel file is created by a zip-unpack lambda within the Chromatic infrastructure once the -// uploaded zip is fully extracted. The contents of this file will consist of 'OK' if the process -// completed successfully and 'ERROR' if an error occurred. -const SENTINEL_SUCCESS_VALUE = 'OK'; - export async function uploadZip( ctx: Context, - target: TargetInfo & { contentLength: number; localPath: string; sentinelUrl: string }, + target: TargetInfo & { contentLength: number; localPath: string }, onProgress: (progress: number) => void ) { const { experimental_abortSignal: signal } = ctx.options; @@ -58,40 +52,3 @@ export async function uploadZip( } ); } - -export async function waitForUnpack(ctx: Context, url: string) { - const { experimental_abortSignal: signal } = ctx.options; - - ctx.log.debug(`Waiting for zip unpack sentinel file to appear at '${url}'`); - - return retry( - async (bail) => { - if (signal?.aborted) { - return bail(signal.reason || new Error('Aborted')); - } - - let res: Response; - try { - res = await ctx.http.fetch(url, { signal }, { retries: 0, noLogErrorBody: true }); - } catch (e) { - const { response = {} } = e; - if (response.status === 403) { - return bail(new Error('Provided signature expired.')); - } - throw new Error('Sentinel file not present.'); - } - - const result = await res.text(); - if (result !== SENTINEL_SUCCESS_VALUE) { - return bail(new Error('Zip file failed to unpack remotely.')); - } else { - ctx.log.debug(`Sentinel file present, continuing.`); - } - }, - { - retries: 185, // 3 minutes and some change (matches the lambda timeout with some extra buffer) - minTimeout: 1000, - maxTimeout: 1000, - } - ); -} diff --git a/node-src/lib/waitForSentinel.ts b/node-src/lib/waitForSentinel.ts new file mode 100644 index 000000000..2c7df6ca1 --- /dev/null +++ b/node-src/lib/waitForSentinel.ts @@ -0,0 +1,45 @@ +import retry from 'async-retry'; +import { Response } from 'node-fetch'; +import { Context } from '../types'; + +// A sentinel file is created by a zip-unpack lambda within the Chromatic infrastructure once the +// uploaded zip is fully extracted. The contents of this file will consist of 'OK' if the process +// completed successfully and 'ERROR' if an error occurred. +const SENTINEL_SUCCESS_VALUE = 'OK'; + +export async function waitForSentinel(ctx: Context, url: string) { + const { experimental_abortSignal: signal } = ctx.options; + + ctx.log.debug(`Waiting for sentinel file to appear at ${url}`); + + return retry( + async (bail) => { + if (signal?.aborted) { + return bail(signal.reason || new Error('Aborted')); + } + + let res: Response; + try { + res = await ctx.http.fetch(url, { signal }, { retries: 0, noLogErrorBody: true }); + } catch (e) { + const { response = {} } = e; + if (response.status === 403) { + return bail(new Error('Provided signature expired.')); + } + throw new Error('Sentinel file not present.'); + } + + const result = await res.text(); + if (result !== SENTINEL_SUCCESS_VALUE) { + ctx.log.debug(`Sentinel file not OK, got ${result}`); + return bail(new Error('Sentinel file error.')); + } + ctx.log.debug(`Sentinel file OK.`); + }, + { + retries: 185, // 3 minutes and some change (matches the lambda timeout with some extra buffer) + minTimeout: 1000, + maxTimeout: 1000, + } + ); +} diff --git a/node-src/tasks/upload.ts b/node-src/tasks/upload.ts index cacb17729..c1271c476 100644 --- a/node-src/tasks/upload.ts +++ b/node-src/tasks/upload.ts @@ -21,6 +21,7 @@ import { uploading, success, hashing, + finalizing, } from '../ui/tasks/upload'; import { Context, FileDesc, Task } from '../types'; import { readStatsFile } from './read-stats-file'; @@ -29,6 +30,7 @@ import { findChangedPackageFiles } from '../lib/findChangedPackageFiles'; import { findChangedDependencies } from '../lib/findChangedDependencies'; import { uploadBuild } from '../lib/upload'; import { getFileHashes } from '../lib/getFileHashes'; +import { waitForSentinel } from '../lib/waitForSentinel'; interface PathSpec { pathname: string; @@ -225,7 +227,8 @@ export const uploadStorybook = async (ctx: Context, task: Task) => { // Avoid spamming the logs with progress updates in non-interactive mode ctx.options.interactive ? 100 : ctx.env.CHROMATIC_OUTPUT_INTERVAL ), - onComplete: (uploadedBytes: number, uploadedFiles: number) => { + onComplete: (uploadedBytes: number, uploadedFiles: number, sentinelUrls: string[]) => { + ctx.sentinelUrls = sentinelUrls; ctx.uploadedBytes = uploadedBytes; ctx.uploadedFiles = uploadedFiles; }, @@ -235,6 +238,13 @@ export const uploadStorybook = async (ctx: Context, task: Task) => { }); }; +export const waitForSentinels = async (ctx: Context, task: Task) => { + if (ctx.skip || !ctx.sentinelUrls?.length) return; + transitionTo(finalizing)(ctx, task); + + await Promise.all(ctx.sentinelUrls.map((url) => waitForSentinel(ctx, url))); +}; + export default createTask({ name: 'upload', title: initial.title, @@ -249,6 +259,7 @@ export default createTask({ traceChangedFiles, calculateFileHashes, uploadStorybook, + waitForSentinels, transitionTo(success, true), ], }); diff --git a/node-src/types.ts b/node-src/types.ts index 4eee98335..7b3d6b285 100644 --- a/node-src/types.ts +++ b/node-src/types.ts @@ -302,6 +302,7 @@ export interface Context { }[]; total: number; }; + sentinelUrls?: string[]; uploadedBytes?: number; uploadedFiles?: number; turboSnap?: Partial<{ diff --git a/node-src/ui/tasks/upload.stories.ts b/node-src/ui/tasks/upload.stories.ts index b6264d9a2..44a8da5d8 100644 --- a/node-src/ui/tasks/upload.stories.ts +++ b/node-src/ui/tasks/upload.stories.ts @@ -3,6 +3,7 @@ import { bailed, dryRun, failed, + finalizing, hashing, initial, invalid, @@ -57,6 +58,8 @@ export const Starting = () => starting(); export const Uploading = () => uploading({ percentage: 42 }); +export const Finalizing = () => finalizing(); + export const Success = () => success({ now: 0, diff --git a/node-src/ui/tasks/upload.ts b/node-src/ui/tasks/upload.ts index 8fc016689..64c6c4ece 100644 --- a/node-src/ui/tasks/upload.ts +++ b/node-src/ui/tasks/upload.ts @@ -99,6 +99,12 @@ export const uploading = ({ percentage }: { percentage: number }) => ({ output: `${progressBar(percentage)} ${percentage}%`, }); +export const finalizing = () => ({ + status: 'pending', + title: 'Publishing your built Storybook', + output: `Finalizing upload`, +}); + export const success = (ctx: Context) => { const files = pluralize('file', ctx.uploadedFiles, true); const bytes = filesize(ctx.uploadedBytes || 0);