Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Retrieve sentinelUrls from uploadBuild and wait for all of them before finishing upload task #878

4 changes: 4 additions & 0 deletions node-src/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,7 @@ it('calls out to npm build script passed and uploads files', async () => {
expect.any(Object),
[
{
contentHash: 'hash',
contentLength: 42,
contentType: 'text/html',
fileKey: '',
Expand All @@ -479,6 +480,7 @@ it('calls out to npm build script passed and uploads files', async () => {
targetPath: 'iframe.html',
},
{
contentHash: 'hash',
contentLength: 42,
contentType: 'text/html',
fileKey: '',
Expand All @@ -502,6 +504,7 @@ it('skips building and uploads directly with storybook-build-dir', async () => {
expect.any(Object),
[
{
contentHash: 'hash',
contentLength: 42,
contentType: 'text/html',
fileKey: '',
Expand All @@ -512,6 +515,7 @@ it('skips building and uploads directly with storybook-build-dir', async () => {
targetPath: 'iframe.html',
},
{
contentHash: 'hash',
contentLength: 42,
contentType: 'text/html',
fileKey: '',
Expand Down
20 changes: 20 additions & 0 deletions node-src/lib/FileReaderBlob.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { ReadStream, createReadStream } from 'fs';

export class FileReaderBlob {
readStream: ReadStream;
size: number;

constructor(filePath: string, contentLength: number, onProgress: (delta: number) => void) {
this.size = contentLength;
this.readStream = createReadStream(filePath);
this.readStream.on('data', (chunk: Buffer | string) => onProgress(chunk.length));
}

stream() {
return this.readStream;
}

get [Symbol.toStringTag]() {
return 'Blob';
}
}
26 changes: 15 additions & 11 deletions node-src/lib/upload.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import makeZipFile from './compress';
import { Context, FileDesc, TargetInfo } from '../types';
import { uploadZip, waitForUnpack } from './uploadZip';
import { uploadZip } from './uploadZip';
import { uploadFiles } from './uploadFiles';
import { Context, FileDesc, TargetInfo } from '../types';
import { maxFileCountExceeded } from '../ui/messages/errors/maxFileCountExceeded';
import { maxFileSizeExceeded } from '../ui/messages/errors/maxFileSizeExceeded';

const UploadBuildMutation = `
mutation UploadBuildMutation($buildId: ObjID!, $files: [FileUploadInput!]!, $zip: Boolean) {
uploadBuild(buildId: $buildId, files: $files, zip: $zip) {
info {
sentinelUrls
targets {
contentType
fileKey
Expand All @@ -22,7 +23,6 @@ const UploadBuildMutation = `
filePath
formAction
formFields
sentinelUrl
}
}
userErrors {
Expand All @@ -46,8 +46,9 @@ const UploadBuildMutation = `
interface UploadBuildMutationResult {
uploadBuild: {
info?: {
sentinelUrls: string[];
targets: TargetInfo[];
zipTarget?: TargetInfo & { sentinelUrl: string };
zipTarget?: TargetInfo;
};
userErrors: (
| {
Expand Down Expand Up @@ -76,15 +77,16 @@ export async function uploadBuild(
options: {
onStart?: () => void;
onProgress?: (progress: number, total: number) => void;
onComplete?: (uploadedBytes: number, uploadedFiles: number) => void;
onComplete?: (uploadedBytes: number, uploadedFiles: number, sentinelUrls: string[]) => void;
onError?: (error: Error, path?: string) => void;
} = {}
) {
const { uploadBuild } = await ctx.client.runQuery<UploadBuildMutationResult>(
UploadBuildMutation,
{
buildId: ctx.announcedBuild.id,
files: files.map(({ contentLength, targetPath }) => ({
files: files.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
Expand All @@ -105,14 +107,16 @@ export async function uploadBuild(
return options.onError?.(new Error('Upload rejected due to user error'));
}

const { sentinelUrls } = uploadBuild.info;

const targets = uploadBuild.info.targets.map((target) => {
const file = files.find((f) => f.targetPath === target.filePath);
return { ...file, ...target };
});

if (!targets.length) {
ctx.log.debug('No new files to upload, continuing');
return options.onComplete?.(0, 0);
return options.onComplete?.(0, 0, sentinelUrls);
}

options.onStart?.();
Expand All @@ -126,16 +130,15 @@ export async function uploadBuild(

const target = { ...uploadBuild.info.zipTarget, contentLength: size, localPath: path };
await uploadZip(ctx, target, (progress) => options.onProgress?.(progress, size));
await waitForUnpack(ctx, target.sentinelUrl);
return options.onComplete?.(size, targets.length);
return options.onComplete?.(size, targets.length, sentinelUrls);
} catch (err) {
ctx.log.debug({ err }, 'Error uploading zip, falling back to uploading individual files');
}
}

try {
await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total));
return options.onComplete?.(total, targets.length);
return options.onComplete?.(total, targets.length, sentinelUrls);
} catch (e) {
return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message);
}
Expand Down Expand Up @@ -178,7 +181,8 @@ export async function uploadMetadata(ctx: Context, files: FileDesc[]) {
UploadMetadataMutation,
{
buildId: ctx.announcedBuild.id,
files: files.map(({ contentLength, targetPath }) => ({
files: files.map(({ contentHash, contentLength, targetPath }) => ({
contentHash,
contentLength,
filePath: targetPath,
})),
Expand Down
15 changes: 5 additions & 10 deletions node-src/lib/uploadFiles.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import retry from 'async-retry';
import { filesize } from 'filesize';
import FormData from 'form-data';
import { createReadStream } from 'fs';
import { FormData } from 'formdata-node';
import pLimit from 'p-limit';
import progress from 'progress-stream';
import { Context, FileDesc, TargetInfo } from '../types';
import { FileReaderBlob } from './FileReaderBlob';

export async function uploadFiles(
ctx: Context,
Expand All @@ -28,19 +27,15 @@ export async function uploadFiles(
return bail(signal.reason || new Error('Aborted'));
}

const progressStream = progress();

progressStream.on('progress', ({ delta }) => {
fileProgress += delta; // We upload multiple files so we only care about the delta
const blob = new FileReaderBlob(localPath, contentLength, (delta) => {
fileProgress += delta;
totalProgress += delta;
onProgress?.(totalProgress);
});

const formData = new FormData();
Object.entries(formFields).forEach(([k, v]) => formData.append(k, v));
formData.append('file', createReadStream(localPath).pipe(progressStream), {
knownLength: contentLength,
});
formData.append('file', blob);

const res = await ctx.http.fetch(
formAction,
Expand Down
60 changes: 6 additions & 54 deletions node-src/lib/uploadZip.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,12 @@
import retry from 'async-retry';
import { filesize } from 'filesize';
import FormData from 'form-data';
import { createReadStream } from 'fs';
import { Response } from 'node-fetch';
import progress from 'progress-stream';
import { FormData } from 'formdata-node';
import { Context, TargetInfo } from '../types';

// A sentinel file is created by a zip-unpack lambda within the Chromatic infrastructure once the
// uploaded zip is fully extracted. The contents of this file will consist of 'OK' if the process
// completed successfully and 'ERROR' if an error occurred.
const SENTINEL_SUCCESS_VALUE = 'OK';
import { FileReaderBlob } from './FileReaderBlob';

export async function uploadZip(
ctx: Context,
target: TargetInfo & { contentLength: number; localPath: string; sentinelUrl: string },
target: TargetInfo & { contentLength: number; localPath: string },
onProgress: (progress: number) => void
) {
const { experimental_abortSignal: signal } = ctx.options;
Expand All @@ -28,18 +21,14 @@ export async function uploadZip(
return bail(signal.reason || new Error('Aborted'));
}

const progressStream = progress();

progressStream.on('progress', ({ delta }) => {
const blob = new FileReaderBlob(localPath, contentLength, (delta) => {
totalProgress += delta;
onProgress(totalProgress);
onProgress?.(totalProgress);
});

const formData = new FormData();
Object.entries(formFields).forEach(([k, v]) => formData.append(k, v));
formData.append('file', createReadStream(localPath).pipe(progressStream), {
knownLength: contentLength,
});
formData.append('file', blob);

const res = await ctx.http.fetch(
formAction,
Expand All @@ -63,40 +52,3 @@ export async function uploadZip(
}
);
}

export async function waitForUnpack(ctx: Context, url: string) {
const { experimental_abortSignal: signal } = ctx.options;

ctx.log.debug(`Waiting for zip unpack sentinel file to appear at '${url}'`);

return retry(
async (bail) => {
if (signal?.aborted) {
return bail(signal.reason || new Error('Aborted'));
}

let res: Response;
try {
res = await ctx.http.fetch(url, { signal }, { retries: 0, noLogErrorBody: true });
} catch (e) {
const { response = {} } = e;
if (response.status === 403) {
return bail(new Error('Provided signature expired.'));
}
throw new Error('Sentinel file not present.');
}

const result = await res.text();
if (result !== SENTINEL_SUCCESS_VALUE) {
return bail(new Error('Zip file failed to unpack remotely.'));
} else {
ctx.log.debug(`Sentinel file present, continuing.`);
}
},
{
retries: 185, // 3 minutes and some change (matches the lambda timeout with some extra buffer)
minTimeout: 1000,
maxTimeout: 1000,
}
);
}
2 changes: 0 additions & 2 deletions node-src/lib/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@ export const activityBar = (n = 0, size = 20) => {
return `[${track.join('')}]`;
};

export const baseStorybookUrl = (url: string) => url?.replace(/\/iframe\.html$/, '');

export const rewriteErrorMessage = (err: Error, message: string) => {
try {
// DOMException doesn't allow setting the message, so this might fail
Expand Down
45 changes: 45 additions & 0 deletions node-src/lib/waitForSentinel.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import retry from 'async-retry';
import { Response } from 'node-fetch';
import { Context } from '../types';

// A sentinel file is created by a zip-unpack lambda within the Chromatic infrastructure once the
// uploaded zip is fully extracted. The contents of this file will consist of 'OK' if the process
// completed successfully and 'ERROR' if an error occurred.
const SENTINEL_SUCCESS_VALUE = 'OK';

export async function waitForSentinel(ctx: Context, url: string) {
const { experimental_abortSignal: signal } = ctx.options;

ctx.log.debug(`Waiting for sentinel file to appear at ${url}`);

return retry(
async (bail) => {
if (signal?.aborted) {
return bail(signal.reason || new Error('Aborted'));
}

let res: Response;
try {
res = await ctx.http.fetch(url, { signal }, { retries: 0, noLogErrorBody: true });
} catch (e) {
const { response = {} } = e;
if (response.status === 403) {
return bail(new Error('Provided signature expired.'));
}
throw new Error('Sentinel file not present.');
}

const result = await res.text();
if (result !== SENTINEL_SUCCESS_VALUE) {
ctx.log.debug(`Sentinel file not OK, got ${result}`);
return bail(new Error('Sentinel file error.'));
}
ctx.log.debug(`Sentinel file OK.`);
},
{
retries: 185, // 3 minutes and some change (matches the lambda timeout with some extra buffer)
minTimeout: 1000,
maxTimeout: 1000,
}
);
}
Loading
Loading