From 06b200be10efe4e28cc9a0fbd659d8a76171a8fe Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Wed, 13 Dec 2023 17:03:25 +0200 Subject: [PATCH 1/6] storage-node: temp folder refactor --- storage-node/package.json | 3 ++ storage-node/src/commands/server.ts | 51 +++++++++++-------- .../src/commands/{dev => util}/cleanup.ts | 8 ++- .../{dev/sync.ts => util/fetch-bucket.ts} | 37 +++++++------- .../src/commands/{dev => util}/multihash.ts | 7 ++- .../commands/{dev => util}/verify-bag-id.ts | 5 +- .../src/services/caching/localDataObjects.ts | 24 +++------ storage-node/src/services/sync/tasks.ts | 2 +- 8 files changed, 67 insertions(+), 70 deletions(-) rename storage-node/src/commands/{dev => util}/cleanup.ts (90%) rename storage-node/src/commands/{dev/sync.ts => util/fetch-bucket.ts} (63%) rename storage-node/src/commands/{dev => util}/multihash.ts (80%) rename storage-node/src/commands/{dev => util}/verify-bag-id.ts (80%) diff --git a/storage-node/package.json b/storage-node/package.json index feda8925db..4fae3846c1 100644 --- a/storage-node/package.json +++ b/storage-node/package.json @@ -132,6 +132,9 @@ }, "operator": { "description": "Storage provider(operator) commands." + }, + "util": { + "description": "Useful utility commands." } } }, diff --git a/storage-node/src/commands/server.ts b/storage-node/src/commands/server.ts index 880566d7b6..3997ba4fce 100644 --- a/storage-node/src/commands/server.ts +++ b/storage-node/src/commands/server.ts @@ -1,14 +1,11 @@ import { flags } from '@oclif/command' import { ApiPromise } from '@polkadot/api' -import { KeyringPair } from '@polkadot/keyring/types' -import { PalletStorageStorageBucketRecord } from '@polkadot/types/lookup' -import fs from 'fs' +import sleep from 'sleep-promise' import _ from 'lodash' import path from 'path' -import rimraf from 'rimraf' -import sleep from 'sleep-promise' -import { promisify } from 'util' -import ApiCommandBase from '../command-base/ApiCommandBase' +import fs from 'fs' +import { PalletStorageStorageBucketRecord } from '@polkadot/types/lookup' +import { KeyringPair } from '@polkadot/keyring/types' import { customFlags } from '../command-base/CustomFlags' import { loadDataObjectIdCache } from '../services/caching/localDataObjects' import logger, { DatePatternByFrequency, Frequency, initNewLogger } from '../services/logger' @@ -23,6 +20,8 @@ import { getStorageBucketIdsByWorkerId } from '../services/sync/storageObligatio import { PendingDirName, performSync, TempDirName } from '../services/sync/synchronizer' import { createApp } from '../services/webApi/app' import ExitCodes from './../command-base/ExitCodes' +import ApiCommandBase from '../command-base/ApiCommandBase' + const fsPromises = fs.promises /** @@ -52,6 +51,10 @@ export default class Server extends ApiCommandBase { required: true, description: 'Data uploading directory (absolute path).', }), + tempFolder: flags.string({ + description: + 'Directory to store tempory files during sync and upload (absolute path).\n,Temporary directory (absolute path). If not specified a subfolder under the uploads directory will be used.', + }), port: flags.integer({ char: 'o', required: true, @@ -219,10 +222,21 @@ Supported values: warn, error, debug, info. Default:debug`, const enableUploadingAuth = false const operatorRoleKey = undefined - await recreateTempDirectory(flags.uploads, TempDirName) + const tempFolder = flags.tempFolder || path.join(flags.uploads, TempDirName) + + // Prevent tempFolder and uploadsFolder being at the same location. This is a simple check + // and doesn't deal with possibility that different path can point to the same location. eg. symlinks or + // a volume being mounted on multiple paths + if (tempFolder === flags.uploads) { + this.error('Please use unique paths for temp and uploads folder paths.') + } + + // TODO: Check that uploads and temp folders are writeable + + await createTempDirectory(tempFolder) if (fs.existsSync(flags.uploads)) { - await loadDataObjectIdCache(flags.uploads, TempDirName, PendingDirName) + await loadDataObjectIdCache(flags.uploads) } if (flags.dev) { @@ -256,7 +270,7 @@ Supported values: warn, error, debug, info. Default:debug`, selectedBuckets, qnApi, flags.uploads, - TempDirName, + tempFolder, flags.syncWorkersNumber, flags.syncWorkersTimeout, flags.syncInterval, @@ -292,7 +306,6 @@ Supported values: warn, error, debug, info. Default:debug`, try { const port = flags.port const maxFileSize = await api.consts.storage.maxDataObjectSize.toNumber() - const tempFileUploadingDir = path.join(flags.uploads, TempDirName) logger.debug(`Max file size runtime parameter: ${maxFileSize}`) const app = await createApp({ @@ -303,7 +316,7 @@ Supported values: warn, error, debug, info. Default:debug`, workerId, maxFileSize, uploadsDir: flags.uploads, - tempFileUploadingDir, + tempFileUploadingDir: tempFolder, pendingDataObjectsDir, acceptPendingObjectsService, process: this.config, @@ -418,23 +431,17 @@ async function runCleanupWithInterval( } /** - * Removes and recreates the temporary directory from the uploading directory. - * All files in the temp directory are deleted. + * Creates the temporary directory. + * If folder exists, all files with extension `.temp` are deleted. * * @param uploadsDirectory - data uploading directory * @param tempDirName - temporary directory name within the uploading directory * @returns void promise. */ -async function recreateTempDirectory(uploadsDirectory: string, tempDirName: string): Promise { +async function createTempDirectory(tempDirName: string): Promise { try { - const tempFileUploadingDir = path.join(uploadsDirectory, tempDirName) - - logger.info(`Removing temp directory ...`) - const rimrafAsync = promisify(rimraf) - await rimrafAsync(tempFileUploadingDir) - logger.info(`Creating temp directory ...`) - await fsPromises.mkdir(tempFileUploadingDir) + await fsPromises.mkdir(tempDirName) } catch (err) { logger.error(`Temp directory IO error: ${err}`) } diff --git a/storage-node/src/commands/dev/cleanup.ts b/storage-node/src/commands/util/cleanup.ts similarity index 90% rename from storage-node/src/commands/dev/cleanup.ts rename to storage-node/src/commands/util/cleanup.ts index 377685b276..2a46cd8b7f 100644 --- a/storage-node/src/commands/dev/cleanup.ts +++ b/storage-node/src/commands/util/cleanup.ts @@ -7,13 +7,11 @@ import { performCleanup } from '../../services/sync/cleanupService' /** * CLI command: * Prunes outdated data objects: removes all the local stored data objects that the operator is no longer obliged to store. - * storage. * * @remarks - * Should be run only during the development. - * Shell command: "dev:cleanup" + * Shell command: "util:cleanup" */ -export default class DevCleanup extends Command { +export default class Cleanup extends Command { static description = `Runs the data objects cleanup/pruning workflow. It removes all the local stored data objects that the operator is no longer obliged to store` static flags = { @@ -48,7 +46,7 @@ export default class DevCleanup extends Command { } async run(): Promise { - const { flags } = this.parse(DevCleanup) + const { flags } = this.parse(Cleanup) const bucketId = flags.bucketId.toString() const qnApi = new QueryNodeApi(flags.queryNodeEndpoint) logger.info('Cleanup...') diff --git a/storage-node/src/commands/dev/sync.ts b/storage-node/src/commands/util/fetch-bucket.ts similarity index 63% rename from storage-node/src/commands/dev/sync.ts rename to storage-node/src/commands/util/fetch-bucket.ts index 63f33d2fe4..31242bd9c7 100644 --- a/storage-node/src/commands/dev/sync.ts +++ b/storage-node/src/commands/util/fetch-bucket.ts @@ -1,21 +1,20 @@ import { Command, flags } from '@oclif/command' -import stringify from 'fast-safe-stringify' -import logger from '../../services/logger' -import { QueryNodeApi } from '../../services/queryNode/api' import { performSync } from '../../services/sync/synchronizer' +import { QueryNodeApi } from '../..//services/queryNode/api' +import logger from '../../services/logger' +import stringify from 'fast-safe-stringify' +import path from 'path' /** * CLI command: - * Synchronizes data: fixes the difference between node obligations and local - * storage. + * Fetch all data objects from a bucket into local store. * * @remarks - * Should be run only during the development. - * Shell command: "dev:sync" + * Should not be executed while server is running. + * Shell command: "util:fetch-bucket" */ -export default class DevSync extends Command { - static description = - 'Synchronizes the data - it fixes the differences between local data folder and worker ID obligations from the runtime.' +export default class FetchBucket extends Command { + static description = 'Downloads all data objects of specified bucket, that matches worker ID obligations.' static flags = { help: flags.help({ char: 'h' }), @@ -27,10 +26,10 @@ export default class DevSync extends Command { bucketId: flags.integer({ char: 'b', required: true, - description: 'The buckerId to sync', + description: 'The buckerId to fetch', }), syncWorkersNumber: flags.integer({ - char: 'p', + char: 'n', required: false, description: 'Sync workers number (max async operations in progress).', default: 20, @@ -44,27 +43,30 @@ export default class DevSync extends Command { queryNodeEndpoint: flags.string({ char: 'q', required: false, - default: 'http://localhost:8081/graphql', - description: 'Query node endpoint (e.g.: http://some.com:8081/graphql)', + default: 'https://query.joystream.org/graphql', + description: 'Query node endpoint (e.g.: https://query.joystream.org/graphql)', }), dataSourceOperatorUrl: flags.string({ char: 'o', required: false, description: 'Storage node url base (e.g.: http://some.com:3333) to get data from.', - default: 'http://localhost:3333', }), uploads: flags.string({ char: 'd', required: true, description: 'Data uploading directory (absolute path).', }), + tempFolder: flags.string({ + description: + 'Directory to store tempory files during sync and upload (absolute path).\n,Temporary directory (absolute path). If not specified a subfolder under the uploads directory will be used.', + }), } async run(): Promise { - const { flags } = this.parse(DevSync) + const { flags } = this.parse(FetchBucket) const bucketId = flags.bucketId.toString() const qnApi = new QueryNodeApi(flags.queryNodeEndpoint) - logger.info('Syncing...') + logger.info('Fetching bucket...') try { await performSync( @@ -75,6 +77,7 @@ export default class DevSync extends Command { flags.syncWorkersTimeout, qnApi, flags.uploads, + flags.tempFolder ? flags.tempFolder : path.join(flags.uploads, 'temp'), flags.dataSourceOperatorUrl ) } catch (err) { diff --git a/storage-node/src/commands/dev/multihash.ts b/storage-node/src/commands/util/multihash.ts similarity index 80% rename from storage-node/src/commands/dev/multihash.ts rename to storage-node/src/commands/util/multihash.ts index a76b8fc196..a1c4fa515e 100644 --- a/storage-node/src/commands/dev/multihash.ts +++ b/storage-node/src/commands/util/multihash.ts @@ -9,10 +9,9 @@ import { print } from '../../services/helpers/stdout' * format. * * @remarks - * Should be run only during the development. - * Shell command: "dev:multihash" + * Shell command: "util:multihash" */ -export default class DevMultihash extends Command { +export default class Multihash extends Command { static description = 'Creates a multihash (blake3) for a file.' static flags = { @@ -25,7 +24,7 @@ export default class DevMultihash extends Command { } async run(): Promise { - const { flags } = this.parse(DevMultihash) + const { flags } = this.parse(Multihash) logger.info(`Hashing ${flags.file} ....`) diff --git a/storage-node/src/commands/dev/verify-bag-id.ts b/storage-node/src/commands/util/verify-bag-id.ts similarity index 80% rename from storage-node/src/commands/dev/verify-bag-id.ts rename to storage-node/src/commands/util/verify-bag-id.ts index 71033e0a58..5bd2721a67 100644 --- a/storage-node/src/commands/dev/verify-bag-id.ts +++ b/storage-node/src/commands/util/verify-bag-id.ts @@ -6,10 +6,9 @@ import { customFlags } from '../../command-base/CustomFlags' * Verifies supported bag ID types in the string format. * * @remarks - * Should be run only during the development. * Shell command: "dev:verify-bag-id" */ -export default class DevVerifyBagId extends Command { +export default class VerifyBagId extends Command { static description = 'The command verifies bag id supported by the storage node. Requires chain connection.' static flags = { @@ -21,7 +20,7 @@ export default class DevVerifyBagId extends Command { } async run(): Promise { - const { flags } = this.parse(DevVerifyBagId) + const { flags } = this.parse(VerifyBagId) logger.info(`Parsed: ${flags.bagId}`) } diff --git a/storage-node/src/services/caching/localDataObjects.ts b/storage-node/src/services/caching/localDataObjects.ts index 407b8b063c..9ea5cf381d 100644 --- a/storage-node/src/services/caching/localDataObjects.ts +++ b/storage-node/src/services/caching/localDataObjects.ts @@ -1,6 +1,5 @@ import AwaitLock from 'await-lock' import fs from 'fs' -import path from 'path' import logger from '../logger' const fsPromises = fs.promises @@ -32,23 +31,11 @@ export async function getDataObjectIDs(): Promise { * @returns empty promise. * * @param uploadDir - uploading directory - * @param tempDirName - temp directory name */ -export async function loadDataObjectIdCache( - uploadDir: string, - tempDirName: string, - pendingDirName: string -): Promise { +export async function loadDataObjectIdCache(uploadDir: string): Promise { await lock.acquireAsync() - const localIds = await getLocalFileNames(uploadDir) - // Filter temporary & pending directory name. - const tempDirectoryName = path.parse(tempDirName).name - const pendingDirectoryName = path.parse(pendingDirName).name - const ids = localIds.filter( - (dataObjectId) => dataObjectId !== tempDirectoryName && dataObjectId !== pendingDirectoryName - ) - + const ids = await getLocalFileNames(uploadDir) ids.forEach((id) => idCache.set(id, 0)) logger.debug(`Local ID cache loaded.`) @@ -132,10 +119,11 @@ export async function getDataObjectIdFromCache( } /** - * Returns file names from the local directory. + * Returns file names from the local directory, ignoring subfolders. * * @param directory - local directory to get file names from */ -function getLocalFileNames(directory: string): Promise { - return fsPromises.readdir(directory) +async function getLocalFileNames(directory: string): Promise { + const result = await fsPromises.readdir(directory, { withFileTypes: true }) + return result.filter((entry) => entry.isFile()).map((entry) => entry.name) } diff --git a/storage-node/src/services/sync/tasks.ts b/storage-node/src/services/sync/tasks.ts index b09eb9dce0..e2ab8c5fbd 100644 --- a/storage-node/src/services/sync/tasks.ts +++ b/storage-node/src/services/sync/tasks.ts @@ -108,7 +108,7 @@ export class DownloadFileTask implements SyncTask { const filepath = path.join(this.uploadsDirectory, this.dataObjectId) // We create tempfile first to mitigate partial downloads on app (or remote node) crash. // This partial downloads will be cleaned up during the next sync iteration. - const tempFilePath = path.join(this.uploadsDirectory, this.tempDirectory, uuidv4()) + const tempFilePath = path.join(this.tempDirectory, uuidv4()) try { const timeoutMs = this.downloadTimeout * 60 * 1000 // Casting because of: From cb1412aa3029a7baf389f6ac0efcd42495cbc0db Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 15 Dec 2023 23:22:02 +0200 Subject: [PATCH 2/6] improve initializing uploads and temp directories --- storage-node/src/commands/server.ts | 37 +++++++------------ .../src/services/caching/localDataObjects.ts | 13 ++++++- 2 files changed, 24 insertions(+), 26 deletions(-) diff --git a/storage-node/src/commands/server.ts b/storage-node/src/commands/server.ts index 3997ba4fce..4597f59179 100644 --- a/storage-node/src/commands/server.ts +++ b/storage-node/src/commands/server.ts @@ -160,11 +160,9 @@ Supported values: warn, error, debug, info. Default:debug`, async run(): Promise { const { flags } = this.parse(Server) - const logSource = `StorageProvider_${flags.worker}` - if (!_.isEmpty(flags.elasticSearchEndpoint) || !_.isEmpty(flags.logFilePath)) { initNewLogger({ - elasticSearchlogSource: logSource, + elasticSearchlogSource: `StorageProvider_${flags.worker}`, elasticSearchEndpoint: flags.elasticSearchEndpoint, elasticSearchIndexPrefix: flags.elasticSearchIndexPrefix, elasticSearchUser: flags.elasticSearchUser, @@ -180,6 +178,10 @@ Supported values: warn, error, debug, info. Default:debug`, const api = await this.getApi() + if (flags.dev) { + await this.ensureDevelopmentChain() + } + const workerId = flags.worker if (!(await verifyWorkerId(api, workerId))) { @@ -231,17 +233,10 @@ Supported values: warn, error, debug, info. Default:debug`, this.error('Please use unique paths for temp and uploads folder paths.') } - // TODO: Check that uploads and temp folders are writeable - - await createTempDirectory(tempFolder) + await createDirectory(flags.uploads) + await loadDataObjectIdCache(flags.uploads) - if (fs.existsSync(flags.uploads)) { - await loadDataObjectIdCache(flags.uploads) - } - - if (flags.dev) { - await this.ensureDevelopmentChain() - } + await createDirectory(tempFolder) const pendingDataObjectsDir = path.join(flags.uploads, PendingDirName) @@ -431,20 +426,14 @@ async function runCleanupWithInterval( } /** - * Creates the temporary directory. - * If folder exists, all files with extension `.temp` are deleted. + * Creates a directory recursivly. Like `mkdir -p` * - * @param uploadsDirectory - data uploading directory - * @param tempDirName - temporary directory name within the uploading directory + * @param tempDirName - full path to temporary directory * @returns void promise. */ -async function createTempDirectory(tempDirName: string): Promise { - try { - logger.info(`Creating temp directory ...`) - await fsPromises.mkdir(tempDirName) - } catch (err) { - logger.error(`Temp directory IO error: ${err}`) - } +async function createDirectory(dirName: string): Promise { + logger.info(`Creating directory ${dirName}`) + await fsPromises.mkdir(dirName, { recursive: true }) } async function verifyWorkerId(api: ApiPromise, workerId: number): Promise { diff --git a/storage-node/src/services/caching/localDataObjects.ts b/storage-node/src/services/caching/localDataObjects.ts index 9ea5cf381d..f7b36896ec 100644 --- a/storage-node/src/services/caching/localDataObjects.ts +++ b/storage-node/src/services/caching/localDataObjects.ts @@ -35,8 +35,17 @@ export async function getDataObjectIDs(): Promise { export async function loadDataObjectIdCache(uploadDir: string): Promise { await lock.acquireAsync() - const ids = await getLocalFileNames(uploadDir) - ids.forEach((id) => idCache.set(id, 0)) + const names = await getLocalFileNames(uploadDir) + + names + .filter((name) => { + // Just incase the directory is polluted with other files, + // filter out filenames that do not match with an objectid (number) + const num = Number(name) + return Number.isInteger(num) + }) + .forEach((id) => idCache.set(id, 0)) + logger.debug(`Local ID cache loaded.`) lock.release() From 967ee7e462901b243bd748d39a066a95fddbd477 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Mon, 18 Dec 2023 23:01:52 +0200 Subject: [PATCH 3/6] storage-node: cleaner code --- storage-node/src/services/caching/localDataObjects.ts | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/storage-node/src/services/caching/localDataObjects.ts b/storage-node/src/services/caching/localDataObjects.ts index f7b36896ec..9e906208aa 100644 --- a/storage-node/src/services/caching/localDataObjects.ts +++ b/storage-node/src/services/caching/localDataObjects.ts @@ -38,12 +38,9 @@ export async function loadDataObjectIdCache(uploadDir: string): Promise { const names = await getLocalFileNames(uploadDir) names - .filter((name) => { - // Just incase the directory is polluted with other files, - // filter out filenames that do not match with an objectid (number) - const num = Number(name) - return Number.isInteger(num) - }) + // Just incase the directory is polluted with other files, + // filter out filenames that do not match with an objectid (number) + .filter((name) => Number.isInteger(Number(name))) .forEach((id) => idCache.set(id, 0)) logger.debug(`Local ID cache loaded.`) From e4903cedd7cd495ef3d587bbf097e5725d169201 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Thu, 21 Dec 2023 17:37:51 +0400 Subject: [PATCH 4/6] storage-node: ensure logs folder is not same as uploads folder --- storage-node/src/commands/server.ts | 31 +++++++++++++++++++---------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/storage-node/src/commands/server.ts b/storage-node/src/commands/server.ts index 4597f59179..dc051abcbc 100644 --- a/storage-node/src/commands/server.ts +++ b/storage-node/src/commands/server.ts @@ -160,6 +160,16 @@ Supported values: warn, error, debug, info. Default:debug`, async run(): Promise { const { flags } = this.parse(Server) + const api = await this.getApi() + + if (flags.dev) { + await this.ensureDevelopmentChain() + } + + if (flags.logFilePath && path.relative(flags.logFilePath, flags.uploads) === '') { + this.error('Paths for logs and uploads must be unique.') + } + if (!_.isEmpty(flags.elasticSearchEndpoint) || !_.isEmpty(flags.logFilePath)) { initNewLogger({ elasticSearchlogSource: `StorageProvider_${flags.worker}`, @@ -176,12 +186,6 @@ Supported values: warn, error, debug, info. Default:debug`, logger.info(`Query node endpoint set: ${flags.queryNodeEndpoint}`) - const api = await this.getApi() - - if (flags.dev) { - await this.ensureDevelopmentChain() - } - const workerId = flags.worker if (!(await verifyWorkerId(api, workerId))) { @@ -224,13 +228,18 @@ Supported values: warn, error, debug, info. Default:debug`, const enableUploadingAuth = false const operatorRoleKey = undefined + if (!flags.tempFolder) { + logger.warn( + 'It is recommended to specify a unique file path for temporary files.' + + 'For now a temp folder under the uploads folder will be used.' + + 'In future this will be warning will become and error!' + ) + } + const tempFolder = flags.tempFolder || path.join(flags.uploads, TempDirName) - // Prevent tempFolder and uploadsFolder being at the same location. This is a simple check - // and doesn't deal with possibility that different path can point to the same location. eg. symlinks or - // a volume being mounted on multiple paths - if (tempFolder === flags.uploads) { - this.error('Please use unique paths for temp and uploads folder paths.') + if (path.relative(tempFolder, flags.uploads) === '') { + this.error('Paths for temporary and uploads folders must be unique.') } await createDirectory(flags.uploads) From a287fdc229c42642bb89b838ae0fe795f8c21ffe Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Sat, 23 Dec 2023 23:32:59 +0400 Subject: [PATCH 5/6] storage-node: update warning and argument description for tempFolder --- storage-node/src/commands/server.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/storage-node/src/commands/server.ts b/storage-node/src/commands/server.ts index a41dd11865..f64c67bc1d 100644 --- a/storage-node/src/commands/server.ts +++ b/storage-node/src/commands/server.ts @@ -53,7 +53,7 @@ export default class Server extends ApiCommandBase { }), tempFolder: flags.string({ description: - 'Directory to store tempory files during sync and upload (absolute path).\n,Temporary directory (absolute path). If not specified a subfolder under the uploads directory will be used.', + 'Directory to store tempory files during sync and upload (absolute path).\nIf not specified a subfolder under the uploads directory will be used.', }), port: flags.integer({ char: 'o', @@ -230,9 +230,10 @@ Supported values: warn, error, debug, info. Default:debug`, if (!flags.tempFolder) { logger.warn( - 'It is recommended to specify a unique file path for temporary files.' + - 'For now a temp folder under the uploads folder will be used.' + - 'In future this will be warning will become and error!' + 'You did not specify a path to the temporary directory. ' + + 'A temp folder under the uploads folder willl be used. ' + + 'In a future release passing an absolute path to a temporary directory with the ' + + '"tempFolder" argument will be required.' ) } From 3dd8e0aa733430733d5a23d9b23cee3ade388fb2 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Sat, 23 Dec 2023 23:33:35 +0400 Subject: [PATCH 6/6] storage-node: use tempFolder arg in docker-compose --- docker-compose-no-bind-volumes.yml | 10 ++++++---- docker-compose.yml | 10 ++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/docker-compose-no-bind-volumes.yml b/docker-compose-no-bind-volumes.yml index 87f6d0d7d8..f03d006785 100644 --- a/docker-compose-no-bind-volumes.yml +++ b/docker-compose-no-bind-volumes.yml @@ -36,11 +36,12 @@ services: # - OTEL_RESOURCE_ATTRIBUTES=service.name=colossus-1,deployment.environment=production entrypoint: ['yarn'] command: [ - 'start', '--worker=${COLOSSUS_1_WORKER_ID}', '--port=3333', '--uploads=/data', + 'start', '--worker=${COLOSSUS_1_WORKER_ID}', '--port=3333', '--uploads=/data/uploads', '--sync', '--syncInterval=1', '--queryNodeEndpoint=${COLOSSUS_QUERY_NODE_URL}', '--apiUrl=${JOYSTREAM_NODE_WS}', - '--logFilePath=/logs' + '--logFilePath=/logs', + '--tempFolder=/data/temp/' ] distributor-1: @@ -103,11 +104,12 @@ services: - ACCOUNT_URI=${COLOSSUS_2_TRANSACTOR_URI} entrypoint: ['yarn', 'storage-node'] command: [ - 'server', '--worker=${COLOSSUS_2_WORKER_ID}', '--port=3333', '--uploads=/data', + 'server', '--worker=${COLOSSUS_2_WORKER_ID}', '--port=3333', '--uploads=/data/uploads', '--sync', '--syncInterval=1', '--queryNodeEndpoint=${COLOSSUS_QUERY_NODE_URL}', '--apiUrl=${JOYSTREAM_NODE_WS}', - '--logFilePath=/logs' + '--logFilePath=/logs', + '--tempFolder=/data/temp/' ] distributor-2: diff --git a/docker-compose.yml b/docker-compose.yml index dd3f6e9348..b5cd89d338 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -36,11 +36,12 @@ services: - OTEL_RESOURCE_ATTRIBUTES=service.name=colossus-1,deployment.environment=production entrypoint: ['/joystream/entrypoints/storage.sh'] command: [ - 'server', '--worker=${COLOSSUS_1_WORKER_ID}', '--port=3333', '--uploads=/data', + 'server', '--worker=${COLOSSUS_1_WORKER_ID}', '--port=3333', '--uploads=/data/uploads/', '--sync', '--syncInterval=1', '--queryNodeEndpoint=${COLOSSUS_QUERY_NODE_URL}', '--apiUrl=${JOYSTREAM_NODE_WS}', - '--logFilePath=/logs' + '--logFilePath=/logs', + '--tempFolder=/data/temp/' ] distributor-1: @@ -106,11 +107,12 @@ services: - ACCOUNT_URI=${COLOSSUS_2_TRANSACTOR_URI} entrypoint: ['yarn', 'storage-node'] command: [ - 'server', '--worker=${COLOSSUS_2_WORKER_ID}', '--port=3333', '--uploads=/data', + 'server', '--worker=${COLOSSUS_2_WORKER_ID}', '--port=3333', '--uploads=/data/uploads', '--sync', '--syncInterval=1', '--queryNodeEndpoint=${COLOSSUS_QUERY_NODE_URL}', '--apiUrl=${JOYSTREAM_NODE_WS}', - '--logFilePath=/logs' + '--logFilePath=/logs', + '--tempFolder=/data/temp/' ] distributor-2: