diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0fc45c252..c2c68bcc2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -105,7 +105,7 @@ jobs: ACCESS_GROUPS_STATIC_VALUES: "ess" CREATE_DATASET_WITH_PID_GROUPS: "group2,group3" DATASET_CREATION_VALIDATION_ENABLED: true - DATASET_CREATION_VALIDATION_REGEX: "^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$" + DATASET_CREATION_VALIDATION_REGEX: "^[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89ABab][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12}$" # Start mongo container and app before running api tests run: | diff --git a/src/common/dto/ownable.dto.ts b/src/common/dto/ownable.dto.ts index 6ba440fb7..db3a5eafc 100644 --- a/src/common/dto/ownable.dto.ts +++ b/src/common/dto/ownable.dto.ts @@ -1,15 +1,31 @@ +import { ApiProperty } from "@nestjs/swagger"; import { IsOptional, IsString } from "class-validator"; export class OwnableDto { + @ApiProperty({ + type: String, + required: true, + description: "Name of the group owning this item.", + }) @IsString() readonly ownerGroup: string; + @ApiProperty({ + type: [String], + required: false, + description: "List of groups which have access to this item.", + }) @IsOptional() @IsString({ each: true, }) readonly accessGroups?: string[]; + @ApiProperty({ + type: String, + required: false, + description: "Group of the instrument which this item was acquired on.", + }) @IsOptional() @IsString() readonly instrumentGroup?: string; diff --git a/src/datasets/datasets.controller.ts b/src/datasets/datasets.controller.ts index 2a1e82265..99d6812a0 100644 --- a/src/datasets/datasets.controller.ts +++ b/src/datasets/datasets.controller.ts @@ -756,7 +756,7 @@ export class DatasetsController { } case "origdatablocks": { dataset.origdatablocks = await this.origDatablocksService.findAll( - { datasetId: dataset.pid }, + { where: { datasetId: dataset.pid } }, ); break; } @@ -1388,7 +1388,7 @@ export class DatasetsController { ): Promise { await this.checkPermissionsForDataset(request, id); - return this.origDatablocksService.findAll({ datasetId: id }); + return this.origDatablocksService.findAll({ where: { datasetId: id } }); } // PATCH /datasets/:id/origdatablocks/:fk @@ -1500,7 +1500,7 @@ export class DatasetsController { }); // all the remaing orig datablocks for this dataset const odb = await this.origDatablocksService.findAll({ - datasetId: datasetId, + where: { datasetId: datasetId }, }); // update dataset size and files number const updateDatasetDto: PartialUpdateDatasetDto = { diff --git a/src/datasets/datasets.module.ts b/src/datasets/datasets.module.ts index b66e04a4a..0272f17d5 100644 --- a/src/datasets/datasets.module.ts +++ b/src/datasets/datasets.module.ts @@ -48,6 +48,7 @@ import { PoliciesModule } from "src/policies/policies.module"; this.accessGroups, this.ownerEmail ?? "", av, + this.createdBy, ); } this.classification = `IN=medium,AV=${av},CO=low`; diff --git a/src/origdatablocks/dto/create-origdatablock.dto.ts b/src/origdatablocks/dto/create-origdatablock.dto.ts index fb513d5e6..ae5fe56ee 100644 --- a/src/origdatablocks/dto/create-origdatablock.dto.ts +++ b/src/origdatablocks/dto/create-origdatablock.dto.ts @@ -35,7 +35,7 @@ export class CreateOrigDatablockDto extends OwnableDto { type: String, required: false, description: - "Name of the hasing algorithm used to compute the hash for each file.", + "Name of the hashing algorithm used to compute the hash for each file.", }) @IsOptional() @IsString() @@ -46,12 +46,22 @@ export class CreateOrigDatablockDto extends OwnableDto { type: "array", items: { $ref: getSchemaPath(DataFile) }, required: true, - description: - "Name of the hasing algorithm used to compute the hash for each file.", + description: "List of the files contained in this orig datablock.", }) @IsArray() @ArrayNotEmpty() @ValidateNested({ each: true }) @Type(() => DataFileDto) readonly dataFileList: DataFile[]; + + @ApiProperty({ + type: String, + required: false, + description: + "Name of the group owning this item. If it is not specified, the datasets owner group is used.", + }) + @IsOptional() + @IsString() + @IsNotEmpty() + readonly ownerGroup: string; } diff --git a/src/origdatablocks/origdatablocks.controller.ts b/src/origdatablocks/origdatablocks.controller.ts index 72f84c7de..3f8dd1f9d 100644 --- a/src/origdatablocks/origdatablocks.controller.ts +++ b/src/origdatablocks/origdatablocks.controller.ts @@ -20,6 +20,7 @@ import { ApiBearerAuth, ApiBody, ApiOperation, + ApiParam, ApiQuery, ApiResponse, ApiTags, @@ -38,6 +39,8 @@ import { AllowAny } from "src/auth/decorators/allow-any.decorator"; import { plainToInstance } from "class-transformer"; import { validate, ValidationError } from "class-validator"; import { DatasetsService } from "src/datasets/datasets.service"; +import { PartialUpdateDatasetDto } from "src/datasets/dto/update-dataset.dto"; +import { filterDescription, filterExample } from "src/common/utils"; @ApiBearerAuth() @ApiTags("origdatablocks") @@ -53,7 +56,7 @@ export class OrigDatablocksController { @CheckPolicies((ability: AppAbility) => ability.can(Action.Create, OrigDatablock), ) - @HttpCode(HttpStatus.OK) + @HttpCode(HttpStatus.CREATED) @Post() @ApiOperation({ summary: "It creates a new orig datablock for the specified dataset.", @@ -68,7 +71,8 @@ export class OrigDatablocksController { @ApiResponse({ status: 201, type: OrigDatablock, - description: "Create a new dataset and return its representation in SciCat", + description: + "Create a new origdataset and return its representation in SciCat", }) async create( @Body() createOrigDatablockDto: CreateOrigDatablockDto, @@ -79,7 +83,47 @@ export class OrigDatablocksController { if (!dataset) { throw new BadRequestException("Invalid datasetId"); } - return this.origDatablocksService.create(createOrigDatablockDto); + + createOrigDatablockDto = { + ...createOrigDatablockDto, + ownerGroup: createOrigDatablockDto.ownerGroup + ? createOrigDatablockDto.ownerGroup + : dataset.ownerGroup, + accessGroups: createOrigDatablockDto.accessGroups + ? createOrigDatablockDto.accessGroups + : JSON.parse(JSON.stringify(dataset.accessGroups)), + instrumentGroup: createOrigDatablockDto.instrumentGroup + ? createOrigDatablockDto.instrumentGroup + : dataset.instrumentGroup, + }; + + const origdatablock = await this.origDatablocksService.create( + createOrigDatablockDto, + ); + + await this.updateDatasetSizeAndFiles(dataset.pid); + + return origdatablock; + } + + async updateDatasetSizeAndFiles(pid: string) { + // updates datasets size + const parsedFilters: IFilters = + { where: { datasetId: pid } }; + const datasetOrigdatablocks = await this.origDatablocksService.findAll( + parsedFilters, + ); + + const updateDatasetDto: PartialUpdateDatasetDto = { + size: datasetOrigdatablocks + .map((od) => od.size) + .reduce((ps, a) => ps + a, 0), + numberOfFiles: datasetOrigdatablocks + .map((od) => od.dataFileList.length) + .reduce((ps, a) => ps + a, 0), + }; + + await this.datasetsService.findByIdAndUpdate(pid, updateDatasetDto); } @AllowAny() @@ -129,9 +173,13 @@ export class OrigDatablocksController { "It returns a list of orig datablocks. The list returned can be modified by providing a filter.", }) @ApiQuery({ - name: "filters", - description: "Database filters to apply when retrieving all origdatablocks", + name: "filter", + description: + "Database filters to apply when retrieving all origdatablocks\n" + + filterDescription, required: false, + type: String, + example: filterExample, }) @ApiResponse({ status: 200, @@ -139,9 +187,9 @@ export class OrigDatablocksController { isArray: true, description: "Return the orig datablocks requested", }) - async findAll(@Query("filters") filters?: string): Promise { + async findAll(@Query("filter") filter?: string): Promise { const parsedFilters: IFilters = - JSON.parse(filters ?? "{}"); + JSON.parse(filter ?? "{}"); return this.origDatablocksService.findAll(parsedFilters); } @@ -252,6 +300,20 @@ export class OrigDatablocksController { ability.can(Action.Read, OrigDatablock), ) @Get("/:id") + @ApiOperation({ + summary: "It retrieve the origdatablock.", + description: "It retrieve the original datablock with the id specified.", + }) + @ApiParam({ + name: "id", + description: "Id of the origdatablock to be retrieved", + type: String, + }) + @ApiResponse({ + status: 200, + description: "The origdatablock requested", + type: OrigDatablock, + }) async findById(@Param("id") id: string): Promise { return this.origDatablocksService.findOne({ _id: id }); } @@ -262,23 +324,67 @@ export class OrigDatablocksController { ability.can(Action.Update, OrigDatablock), ) @Patch("/:id") + @ApiOperation({ + summary: "It updates the origdatablock.", + description: "It updates the original datablock with the id specified.", + }) + @ApiParam({ + name: "id", + description: "Id of the origdatablock to be updated", + type: String, + }) + @ApiBody({ + description: + "OrigDatablock object that needs to be updated. Only the origdatablock object fields that needs to be updated, should be passed in.", + required: true, + type: UpdateOrigDatablockDto, + }) + @ApiResponse({ + status: 200, + description: "The updated origdatablock", + type: OrigDatablock, + }) async update( @Param("id") id: string, @Body() updateOrigDatablockDto: UpdateOrigDatablockDto, ): Promise { - return this.origDatablocksService.update( + const origdatablock = (await this.origDatablocksService.update( { _id: id }, updateOrigDatablockDto, - ); + )) as OrigDatablock; + + await this.updateDatasetSizeAndFiles(origdatablock.datasetId); + + return origdatablock; } // DELETE /origdatablocks/:id - @UseGuards() + @UseGuards(PoliciesGuard) @CheckPolicies((ability: AppAbility) => ability.can(Action.Delete, OrigDatablock), ) @Delete("/:id") + @ApiOperation({ + summary: "It deletes the origdatablock.", + description: + "It delete the original datablock specified through the id specified.", + }) + @ApiParam({ + name: "id", + description: "Id of the origdatablock to be deleted", + type: String, + }) + @ApiResponse({ + status: 200, + description: "No value is returned", + }) async remove(@Param("id") id: string): Promise { - return this.origDatablocksService.remove({ _id: id }); + const origdatablock = (await this.origDatablocksService.remove({ + _id: id, + })) as OrigDatablock; + + await this.updateDatasetSizeAndFiles(origdatablock.datasetId); + + return origdatablock; } } diff --git a/src/origdatablocks/origdatablocks.service.ts b/src/origdatablocks/origdatablocks.service.ts index 734c6c5e5..967a4ede0 100644 --- a/src/origdatablocks/origdatablocks.service.ts +++ b/src/origdatablocks/origdatablocks.service.ts @@ -42,7 +42,20 @@ export class OrigDatablocksService { async findAll( filter: FilterQuery, ): Promise { - return this.origDatablockModel.find(filter).exec(); + const whereFilter: FilterQuery = filter.where ?? {}; + const fieldsProjection: FilterQuery = + filter.fields ?? {}; + const { limit, skip, sort } = parseLimitFilters(filter.limits); + + const origdatablockPromise = this.origDatablockModel + .find(whereFilter, fieldsProjection) + .limit(limit) + .skip(skip) + .sort(sort); + + const origdatablock = await origdatablockPromise.exec(); + + return origdatablock; } async findOne( diff --git a/src/policies/policies.service.ts b/src/policies/policies.service.ts index ca56c0e1d..d3e43b7b2 100644 --- a/src/policies/policies.service.ts +++ b/src/policies/policies.service.ts @@ -52,22 +52,6 @@ export class PoliciesService implements OnModuleInit { "===================================================", "PoliciesService", ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================\n", - "PoliciesService", - ); Logger.warn( " Warning: your DB contains old ID format ", "PoliciesService", @@ -84,22 +68,6 @@ export class PoliciesService implements OnModuleInit { " on your mongo DB ! \n", "PoliciesService", ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================", - "PoliciesService", - ); - Logger.warn( - "===================================================", - "PoliciesService", - ); Logger.warn( "===================================================\n", "PoliciesService", @@ -112,8 +80,13 @@ export class PoliciesService implements OnModuleInit { } } - async create(createPolicyDto: CreatePolicyDto): Promise { - const username = (this.request.user as JWTUser)?.username; + async create( + createPolicyDto: CreatePolicyDto, + policyUsername: string | null = null, + ): Promise { + const username = policyUsername + ? policyUsername + : (this.request.user as JWTUser)?.username; if (!username) { throw new UnauthorizedException("User not present in the request"); } @@ -246,6 +219,7 @@ export class PoliciesService implements OnModuleInit { accessGroups: string[], ownerEmail: string, tapeRedundancy: string, + policyUsername: string | null = null, ) { const policy = await this.policyModel.findOne({ ownerGroup }).exec(); @@ -275,7 +249,7 @@ export class PoliciesService implements OnModuleInit { }; try { - await this.create(defaultPolicy); + await this.create(defaultPolicy, policyUsername); } catch (error) { throw new InternalServerErrorException( error, diff --git a/src/users/users.service.ts b/src/users/users.service.ts index ad06a3082..4be7dd30a 100644 --- a/src/users/users.service.ts +++ b/src/users/users.service.ts @@ -2,7 +2,7 @@ import { Injectable, Logger, OnModuleInit } from "@nestjs/common"; import { ConfigService } from "@nestjs/config"; import { InjectModel } from "@nestjs/mongoose"; import { genSalt, hash } from "bcrypt"; -import { FilterQuery, Model } from "mongoose"; +import { FilterQuery, Model, ObjectId } from "mongoose"; import { CreateUserIdentityDto } from "./dto/create-user-identity.dto"; import { CreateUserDto } from "./dto/create-user.dto"; import { RolesService } from "./roles.service"; @@ -52,10 +52,11 @@ export class UsersService implements OnModuleInit { } if (functionalAccounts && functionalAccounts.length > 0) { - functionalAccounts.forEach(async (account) => { + const accountPromises = functionalAccounts.map(async (account) => { const { role, global, ...createAccount } = account; createAccount.authStrategy = "local"; const user = await this.findOrCreate(createAccount); + const roles: Record> = {}; if (user) { const userPayload: UserPayload = { @@ -70,35 +71,17 @@ export class UsersService implements OnModuleInit { if (role) { // add role as access group accessGroups.push(role); - const createRole: CreateRoleDto = { - name: role, - }; - const createdRole = await this.rolesService.findOrCreate( - createRole, - ); - if (createdRole) { - const createUserRole: CreateUserRoleDto = { - userId: user._id, - roleId: createdRole._id, - }; - await this.rolesService.findOrCreateUserRole(createUserRole); + if (!(role in roles)) { + roles[role] = []; } + roles[role].push(user._id.toString()); } if (global) { accessGroups.push("globalaccess"); - const createRole: CreateRoleDto = { - name: "globalaccess", - }; - const createdRole = await this.rolesService.findOrCreate( - createRole, - ); - if (createdRole) { - const createUserRole: CreateUserRoleDto = { - userId: user._id, - roleId: createdRole._id, - }; - await this.rolesService.findOrCreateUserRole(createUserRole); + if (!("globalaccess" in roles)) { + roles["globalaccess"] = []; } + roles["globalaccess"].push(user._id.toString()); } // creates user identity to store access groups @@ -126,7 +109,37 @@ export class UsersService implements OnModuleInit { await this.createUserIdentity(createUserIdentity); } } + return roles; }); + + const results = await Promise.all(accountPromises); + const roles = results.reduce((a, b) => { + Object.keys(b).forEach((k) => { + if (k in a) { + a[k] = a[k].concat(b[k]); + } else { + a[k] = b[k]; + } + }); + return a; + }, {}); + if (roles) { + for (const [role, userIds] of Object.entries(roles)) { + const createRole: CreateRoleDto = { + name: role, + }; + const createdRole = await this.rolesService.findOrCreate(createRole); + if (createdRole && userIds) { + userIds.forEach(async (userId) => { + const createUserRole: CreateUserRoleDto = { + userId: userId, + roleId: createdRole._id, + }; + await this.rolesService.findOrCreateUserRole(createUserRole); + }); + } + } + } } } diff --git a/test/Auth.js b/test/Auth.js index 2a334c444..768821199 100644 --- a/test/Auth.js +++ b/test/Auth.js @@ -2,8 +2,8 @@ var accessToken = null; -describe("Authorization functionalities", () => { - it("Ingestor login fails with incorrect credentials", async () => { +describe("0100: Authorization functionalities", () => { + it("0010: Ingestor login fails with incorrect credentials", async () => { return request(appUrl) .post("/api/v3/auth/login") .send({ @@ -16,14 +16,14 @@ describe("Authorization functionalities", () => { }); }); - it("Logout with unauthenticated user should fail", async () => { + it("0020: Logout with unauthenticated user should fail", async () => { return request(appUrl) .post("/api/v3/auth/logout") .set("Accept", "application/json") .expect(401); }); - it("Login should succeed with correct credentials", async () => { + it("0030: Login should succeed with correct credentials", async () => { return request(appUrl) .post("/api/v3/auth/login") .send({ @@ -39,7 +39,7 @@ describe("Authorization functionalities", () => { }); }); - it("Logout with authenticated user should succeed", async () => { + it("0040: Logout with authenticated user should succeed", async () => { return request(appUrl) .post("/api/v3/auth/logout") .set("Accept", "application/json") diff --git a/test/CheckDifferentDatasetTypes.js b/test/CheckDifferentDatasetTypes.js index f2f8d32d1..8dc453cdd 100644 --- a/test/CheckDifferentDatasetTypes.js +++ b/test/CheckDifferentDatasetTypes.js @@ -2,7 +2,7 @@ var utils = require("./LoginUtils"); const { TestData } = require("./TestData"); -describe("CheckDifferentDatasetTypes: Check different dataset types and their inheritance", () => { +describe("0200: CheckDifferentDatasetTypes: Check different dataset types and their inheritance", () => { let countDataset = 0; let countRawDataset = 0; let countDerivedDataset = 0; diff --git a/test/DerivedDataset.js b/test/DerivedDataset.js index f153784f7..11a837c00 100644 --- a/test/DerivedDataset.js +++ b/test/DerivedDataset.js @@ -1,6 +1,7 @@ /* eslint-disable @typescript-eslint/no-var-requires */ "use strict"; +const { Test } = require("mocha"); var utils = require("./LoginUtils"); const { TestData } = require("./TestData"); @@ -12,7 +13,7 @@ var pid = null; var explicitPid = "B69A6239-5FD1-4244-8363-58C2DA1B6915"; var minPid = null; -describe("DerivedDataset: Derived Datasets", () => { +describe("0700: DerivedDataset: Derived Datasets", () => { beforeEach((done) => { utils.getToken( appUrl, @@ -58,7 +59,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); // check if dataset is valid - it("check if valid derived dataset is valid", async () => { + it("0100: check if valid derived dataset is valid", async () => { return request(appUrl) .post("/api/v3/Datasets/isValid") .send(TestData.DerivedCorrect) @@ -70,7 +71,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("adds a new minimal derived dataset", async () => { + it("0110: adds a new minimal derived dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(TestData.DerivedCorrectMin) @@ -86,7 +87,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("adds a new derived dataset", async () => { + it("0120: adds a new derived dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(TestData.DerivedCorrect) @@ -95,31 +96,38 @@ describe("DerivedDataset: Derived Datasets", () => { .expect(200) .expect("Content-Type", /json/) .then((res) => { - res.body.should.have.property("owner").and.be.string; - res.body.should.have.property("type").and.equal("derived"); + res.body.should.have.property("owner") + .and.be.equal(TestData.DerivedCorrect.owner); + res.body.should.have.property("type").and.be.equal("derived"); res.body.should.have.property("pid").and.be.string; pid = res.body["pid"]; }); }); - it("should not be able to add new derived dataset with incorrect explicit pid", async () => { + it("0130: should be able to add new derived dataset with explicit pid", async () => { const derivedDatasetWithExplicitPID = { ...TestData.DerivedCorrect, - pid: "test-pid-1", + pid: "9b1bb7eb-5aae-4d86-8aff-9b034b60e1d8", }; return request(appUrl) .post("/api/v3/Datasets") .send(derivedDatasetWithExplicitPID) .set("Accept", "application/json") .set({ Authorization: `Bearer ${accessToken}` }) - .expect(400) - .expect("Content-Type", /json/); + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("owner").and.be.equal(derivedDatasetWithExplicitPID.owner); + res.body.should.have.property("type").and.be.equal("derived"); + res.body.should.have.property("pid").and.be.equal(derivedDatasetWithExplicitPID.pid); + pid = res.body["pid"]; + }); }); - it("should not be able to add new derived dataset with group that is not part of allowed groups", async () => { + it("0135: should not be able to add new derived dataset with user that is not in create dataset list", async () => { const derivedDatasetWithExplicitPID = { ...TestData.DerivedCorrect, - pid: "test-pid-1", + pid: "fcab185e-4600-49ae-bcd5-41b2f9934d82", }; return request(appUrl) .post("/api/v3/Datasets") @@ -130,7 +138,37 @@ describe("DerivedDataset: Derived Datasets", () => { .expect("Content-Type", /json/); }); - it("should be able to add new derived dataset with group that is part of allowed groups and correct explicit PID", async () => { + it("0140: should not be able to add new derived dataset with group that is not part of allowed groups", async () => { + const derivedDatasetWithExplicitPID = { + ...TestData.DerivedCorrect, + pid: "fcab185e-4600-49ae-bcd5-41b2f9934d82", + ownerGroup: "group1", + }; + return request(appUrl) + .post("/api/v3/Datasets") + .send(derivedDatasetWithExplicitPID) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenUser2}` }) + .expect(403) + .expect("Content-Type", /json/); + }); + + it("0145: should not be able to add new derived dataset with correct group but explicit PID that does not pass validation", async () => { + const derivedDatasetWithExplicitPID = { + ...TestData.DerivedCorrect, + ownerGroup: "group2", + pid: "strange-pid", + }; + return request(appUrl) + .post("/api/v3/Datasets") + .send(derivedDatasetWithExplicitPID) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenUser2}` }) + .expect(400) + .expect("Content-Type", /json/); + }); + + it("0150: should be able to add new derived dataset with group that is part of allowed groups and correct explicit PID", async () => { const derivedDatasetWithExplicitPID = { ...TestData.DerivedCorrect, ownerGroup: "group2", @@ -154,7 +192,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); // check if dataset is valid - it("check if invalid derived dataset is valid", async () => { + it("0160: check if invalid derived dataset is valid", async () => { return request(appUrl) .post("/api/v3/Datasets/isValid") .send(TestData.DerivedWrong) @@ -166,7 +204,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("tries to add an incomplete derived dataset", async () => { + it("0170: tries to add an incomplete derived dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(TestData.DerivedWrong) @@ -178,7 +216,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("should fetch several derived datasets", async () => { + it("0180: should fetch several derived datasets", async () => { const filter = { where: { type: "derived", @@ -200,7 +238,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("should fetch this derived dataset", async () => { + it("0190: should fetch this derived dataset", async () => { const filter = { where: { pid: pid, @@ -221,7 +259,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("should fetch all derived datasets", async () => { + it("0200: should fetch all derived datasets", async () => { const filter = { where: { type: "derived", @@ -241,7 +279,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("should contain an array of facets", async () => { + it("0210: should contain an array of facets", async () => { const filter = { where: { type: "derived", @@ -261,7 +299,7 @@ describe("DerivedDataset: Derived Datasets", () => { }); }); - it("should delete a derived dataset", async () => { + it("0220: should delete a derived dataset", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + encodeURIComponent(pid)) .set("Accept", "application/json") @@ -270,7 +308,7 @@ describe("DerivedDataset: Derived Datasets", () => { .expect("Content-Type", /json/); }); - it("should delete a minimal derived dataset", async () => { + it("0230: should delete a minimal derived dataset", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + minPid) .set("Accept", "application/json") @@ -278,7 +316,8 @@ describe("DerivedDataset: Derived Datasets", () => { .expect(200) .expect("Content-Type", /json/); }); - it("should delete a derived dataset with explicit PID", async () => { + + it("0240: should delete a derived dataset with explicit PID", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + explicitPid) .set("Accept", "application/json") diff --git a/test/DerivedDatasetOrigDatablock.js b/test/DerivedDatasetOrigDatablock.js index 1518d4f15..55ae30ae2 100644 --- a/test/DerivedDatasetOrigDatablock.js +++ b/test/DerivedDatasetOrigDatablock.js @@ -2,7 +2,7 @@ var utils = require("./LoginUtils"); const { TestData } = require("./TestData"); -describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to derived Datasets", () => { +describe("0800: DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to derived Datasets", () => { let accessTokenIngestor = null; let accessTokenArchiveManager = null; @@ -35,7 +35,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to ); }); - it("adds a new derived dataset", async () => { + it("0010: adds a new derived dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(TestData.DerivedCorrect) @@ -52,7 +52,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("validate correct origDatablock data used later", async () => { + it("0020: validate correct origDatablock data used later", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/origdatablocks/isValid`) .send(TestData.OrigDataBlockCorrect1) @@ -69,7 +69,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("validate wrong origDatablock and expect false", async () => { + it("0030: validate wrong origDatablock and expect false", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/origdatablocks/isValid`) .send(TestData.OrigDataBlockWrong) @@ -86,7 +86,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("adds a new origDatablock with wrong account which should fail", async () => { + it("0040: adds a new origDatablock with wrong account which should fail", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(TestData.OrigDataBlockCorrect1) @@ -96,7 +96,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to .expect("Content-Type", /json/); }); - it("adds a new origDatablock with correct account", async () => { + it("0050: adds a new origDatablock with correct account", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(TestData.OrigDataBlockCorrect1) @@ -113,7 +113,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("adds a second origDatablock", async () => { + it("0060: adds a second origDatablock", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(TestData.OrigDataBlockCorrect2) @@ -130,7 +130,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Should fetch all origdatablocks belonging to the new dataset", async () => { + it("0070: Should fetch all origdatablocks belonging to the new dataset", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}/OrigDatablocks`) .set("Accept", "application/json") @@ -144,7 +144,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("The new dataset should be the sum of the size of the origDatablocks", async () => { + it("0080: The new dataset should be the sum of the size of the origDatablocks", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}`) .set("Accept", "application/json") @@ -159,7 +159,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("should fetch one dataset including related data", async () => { + it("0090: should fetch one dataset including related data", async () => { const limits = { skip: 0, limit: 10, @@ -200,7 +200,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Should fetch some origDatablock by the full filename and dataset pid", async () => { + it("0100: Should fetch some origDatablock by the full filename and dataset pid", async () => { const fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "N1039-B410377.tif", @@ -225,7 +225,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Should fetch some origDatablock by the partial filename and dataset pid", async () => { + it("0110: Should fetch some origDatablock by the partial filename and dataset pid", async () => { const fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": { $regex: "B410" }, @@ -250,7 +250,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Should fetch no origDatablock using a non existing filename", async () => { + it("0120: Should fetch no origDatablock using a non existing filename", async () => { const fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "this_file_does_not_exists.txt", @@ -275,7 +275,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Should fetch one origDatablock using a specific filename and dataset id", async () => { + it("0130: Should fetch one origDatablock using a specific filename and dataset id", async () => { const fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "this_unique_file.txt", @@ -300,7 +300,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("Fetch origDatablock datafiles should include datasetExist field", async () => { + it("0140: Fetch origDatablock datafiles should include datasetExist field", async () => { const fields = {}; const limits = { skip: 0, @@ -324,7 +324,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("The size and numFiles fields in the dataset should be correctly updated", async () => { + it("0150: The size and numFiles fields in the dataset should be correctly updated", async () => { return request(appUrl) .get("/api/v3/Datasets/" + datasetPid) .set("Accept", "application/json") @@ -347,7 +347,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("should delete first OrigDatablock", async () => { + it("0160: should delete first OrigDatablock", async () => { return request(appUrl) .delete( `/api/v3/datasets/${datasetPid}/OrigDatablocks/${origDatablockId1}`, @@ -357,7 +357,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to .expect(200); }); - it("should delete second OrigDatablock", async () => { + it("0170: should delete second OrigDatablock", async () => { return request(appUrl) .delete( `/api/v3/datasets/${datasetPid}/OrigDatablocks/${origDatablockId2}`, @@ -367,7 +367,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to .expect(200); }); - it("Should fetch no origdatablocks belonging to the new dataset", async () => { + it("0180: Should fetch no origdatablocks belonging to the new dataset", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}/OrigDatablocks`) .set("Accept", "application/json") @@ -379,7 +379,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("The size and numFiles fields in the dataset should be zero", async () => { + it("0190: The size and numFiles fields in the dataset should be zero", async () => { return request(appUrl) .get("/api/v3/Datasets/" + datasetPid) .set("Accept", "application/json") @@ -392,7 +392,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("add a new origDatablock with invalid pid should fail", async () => { + it("0200: add a new origDatablock with invalid pid should fail", async () => { return request(appUrl) .post(`/api/v3/origdatablocks`) .send({ ...TestData.OrigDataBlockCorrect1, datasetId: "wrong" }) @@ -405,7 +405,7 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }); }); - it("add a new origDatablock with valid pid should success", async () => { + it("0210: add a new origDatablock with valid pid should success", async () => { return request(appUrl) .post(`/api/v3/origdatablocks`) .send({ @@ -415,14 +415,14 @@ describe("DerivedDatasetOrigDatablock: Test OrigDatablocks and their relation to }) .set("Accept", "application/json") .set({ Authorization: `Bearer ${accessTokenIngestor}` }) - .expect(200) + .expect(201) .expect("Content-Type", /json/) .then((res) => { res.body.should.have.property("id").and.be.string; }); }); - it("should delete the newly created dataset", async () => { + it("0220: should delete the newly created dataset", async () => { return request(appUrl) .delete(`/api/v3/Datasets/${datasetPid}`) .set("Accept", "application/json") diff --git a/test/OrigDatablockForRawDataset.js b/test/OrigDatablockForRawDataset.js new file mode 100644 index 000000000..f461b4138 --- /dev/null +++ b/test/OrigDatablockForRawDataset.js @@ -0,0 +1,847 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ +const utils = require("./LoginUtils"); +const { TestData } = require("./TestData"); + +var accessTokenIngestor = null, + accessTokenArchiveManager = null, + datasetPid1 = null, + encodedDatasetPid1 = null, + datasetPid2 = null, + encodedDatasetPid2 = null, + origDatablockId1 = null, + origDatablockId2 = null, + origDatablockId3 = null, + origDatablockData1 = null, + origDatablockData1Modified = null, + origDatablockData2 = null, + origDatablockData3 = null, + origDatablockWithEmptyChkAlg = null, + origDatablockWithValidChkAlg = null; + +describe("OrigDatablockForRawDataset: Test OrigDatablocks and their relation to raw Datasets using origdatablocks endpoint", () => { + + beforeEach((done) => { + utils.getToken( + appUrl, + { + username: "ingestor", + password: "aman", + }, + (tokenVal) => { + accessTokenIngestor = tokenVal; + utils.getToken( + appUrl, + { + username: "archiveManager", + password: "aman", + }, + (tokenVal) => { + accessTokenArchiveManager = tokenVal; + done(); + }, + ); + }, + ); + + origDatablockData1 = { + ...TestData.OrigDataBlockCorrect1, + "datasetId": null + } + const dataFileList = TestData.OrigDataBlockCorrect1.dataFileList.slice(0,-1); + const origDatablocSize = dataFileList + .map(e => e.size) + .reduce((a,v) => { + return a+v; + },0); + origDatablockData1Modified = { + ...TestData.OrigDataBlockCorrect1, + 'dataFileList': dataFileList, + 'size': origDatablocSize + } + origDatablockData2 = { + ...TestData.OrigDataBlockCorrect2, + "datasetId": null + } + origDatablockData3 = { + ...TestData.OrigDataBlockCorrect3, + "datasetId": null + } + + origDatablockWithEmptyChkAlg = { ...TestData.OrigDataBlockWrongChkAlg }; + }); + + it("0010: adds a first new raw dataset (dataset 1)", async () => { + return request(appUrl) + .post("/api/v3/Datasets") + .send(TestData.RawCorrect) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("owner").and.be.string; + res.body.should.have.property("type").and.equal("raw"); + res.body.should.have.property("pid").and.be.string; + // store link to this dataset in datablocks + datasetPid1 = res.body["pid"]; + encodedDatasetPid1 = encodeURIComponent(datasetPid1); + }); + }); + + it("0020: adds a second new raw dataset (dataset 2)", async () => { + return request(appUrl) + .post("/api/v3/Datasets") + .send(TestData.RawCorrectRandom) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("owner").and.be.string; + res.body.should.have.property("type").and.equal("raw"); + res.body.should.have.property("pid").and.be.string; + // store link to this dataset in datablocks + datasetPid2 = res.body["pid"]; + encodedDatasetPid2 = encodeURIComponent(datasetPid2); + }); + }); + + it("0030: validate correct origDatablock 1 data used later", async () => { + origDatablockData1.datasetId = datasetPid1; + return request(appUrl) + .post(`/api/v3/origdatablocks/isValid`) + .send(origDatablockData1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("valid").and.equal(true); + res.body.should.have + .property("errors") + .and.be.instanceof(Array) + .and.to.have.lengthOf(0); + }); + }); + + it("0040: validate wrong origDatablock and expect false", async () => { + return request(appUrl) + .post(`/api/v3/origdatablocks/isValid`) + .send(TestData.OrigDataBlockWrong) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("valid").and.equal(false); + res.body.should.have + .property("errors") + .and.be.instanceof(Array) + .and.to.have.lengthOf.above(0); + }); + }); + + it("0050: adds a new origDatablock with wrong account which should fail", async () => { + origDatablockData1.datasetId = datasetPid1; + return request(appUrl) + .post(`/api/v3/OrigDatablocks`) + .send(origDatablockData1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(403) + .expect("Content-Type", /json/); + }); + + it("0060: adds a new origDatablock with correct account (origdatablock 1)", async () => { + origDatablockData1.datasetId = datasetPid1; + return request(appUrl) + .post(`/api/v3/OrigDatablocks`) + .send(origDatablockData1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(201) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(origDatablockData1.size); + res.body.should.have.property("id").and.be.string; + origDatablockId1 = res.body["id"]; + }); + }); + + it("0070: adds a second origDatablock (origdatablock 2)", async () => { + origDatablockData2.datasetId = datasetPid1; + return request(appUrl) + .post(`/api/v3/OrigDatablocks`) + .send(origDatablockData2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(201) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(origDatablockData2.size); + res.body.should.have.property("id").and.be.string; + origDatablockId2 = res.body["id"]; + }); + }); + + it("0080: add a new origDatablock with empty chkAlg should fail", async () => { + return request(appUrl) + .post(`/api/v3/OrigDatablocks`) + .send(origDatablockWithEmptyChkAlg) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(400) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("error"); + }); + }); + + it("0090: add a new origDatablock with valid chkAlg should success (origdatablock 3)", async () => { + origDatablockData3.datasetId = datasetPid2; + return request(appUrl) + .post(`/api/v3/OrigDatablocks`) + .send(origDatablockData3) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(201) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(origDatablockData3.size); + res.body.should.have + .property("chkAlg") + .and.equal(origDatablockData3.chkAlg); + res.body.should.have.property("id").and.be.string; + origDatablockId3 = res.body["id"]; + }); + }); + + it("0100: Should fetch all origdatablocks belonging to the new dataset 1", async () => { + const filter= { where: { datasetId: datasetPid1 } }; + + return request(appUrl) + .get(`/api/v3/OrigDatablocks`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .query({ filter: JSON.stringify(filter) }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(2); + res.body[0]["id"].should.be.oneOf([ + origDatablockId1, + origDatablockId2, + ]); + res.body[1]["id"].should.be.oneOf([ + origDatablockId1, + origDatablockId2, + ]); + }); + }); + + it("0110: Should fetch all origdatablocks belonging to the new dataset 2", async () => { + const filter= { where: { datasetId: datasetPid2 } }; + + return request(appUrl) + .get(`/api/v3/OrigDatablocks`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .query({ filter: JSON.stringify(filter) }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(1); + res.body[0]["id"].should.be.oneOf([ + origDatablockId3, + ]); + }); + }); + + it("0120: Dataset 1 should be the sum of the size of the origDatablocks 1 and 2", async () => { + return request(appUrl) + .get(`/api/v3/Datasets/${encodedDatasetPid1}`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body["size"].should.be.equal( + origDatablockData1.size + + origDatablockData2.size, + ); + }); + }); + + it("0130: Dataset 2 should be the size of the origDatablocks 3", async () => { + return request(appUrl) + .get(`/api/v3/Datasets/${encodedDatasetPid2}`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body["size"].should.be.equal( + origDatablockData3.size, + ); + }); + }); + + it("0140: should fetch dataset 1 including related data", async () => { + var limits = { + skip: 0, + limit: 10, + }; + var filter = { + where: { + pid: datasetPid1, + }, + include: [ + { + relation: "origdatablocks", + }, + ], + }; + + return request(appUrl) + .get( + "/api/v3/Datasets/findOne?filter=" + + encodeURIComponent(JSON.stringify(filter)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body["pid"].should.be.equal(decodeURIComponent(datasetPid1)); + res.body.origdatablocks.should.be + .instanceof(Array) + .and.to.have.length(2); + res.body.origdatablocks[0].should.have + .property("dataFileList") + .and.be.instanceof(Array); + res.body.origdatablocks[0].dataFileList[0].path + .should.oneOf([ + origDatablockData1.dataFileList[0].path, + origDatablockData2.dataFileList[1].path, + ]); + res.body.origdatablocks[0].dataFileList[0].size + .should.oneOf([ + origDatablockData1.dataFileList[0].size, + origDatablockData2.dataFileList[1].size + ]); + }); + }); + + it("0150: Should fetch some origDatablock by the full filename and dataset pid from dataset 1", async () => { + var fields = { + datasetId: datasetPid1, + "dataFileList.path": "N1039-B410377.tif", + }; + var limits = { + skip: 0, + limit: 20, + }; + return request(appUrl) + .get( + "/api/v3/OrigDatablocks/fullQuery?fields=" + + encodeURIComponent(JSON.stringify(fields)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(2); + }); + }); + + it("0160: Should fetch some origDatablock by the partial filename and dataset pid 1", async () => { + var fields = { + datasetId: datasetPid1, + "dataFileList.path": { $regex: "B410" }, + }; + var limits = { + skip: 0, + limit: 20, + }; + return request(appUrl) + .get( + "/api/v3/OrigDatablocks/fullQuery?fields=" + + encodeURIComponent(JSON.stringify(fields)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(2); + }); + }); + + it("0170: Should fetch no origDatablock using a non existing filename", async () => { + var fields = { + datasetId: datasetPid1, + "dataFileList.path": "this_file_does_not_exists.txt", + }; + var limits = { + skip: 0, + limit: 20, + }; + return request(appUrl) + .get( + "/api/v3/OrigDatablocks/fullQuery?fields=" + + encodeURIComponent(JSON.stringify(fields)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(0); + }); + }); + + it("0190: Should fetch one origDatablock using a specific filename and dataset id", async () => { + var fields = { + datasetId: datasetPid1, + "dataFileList.path": "this_unique_file.txt", + }; + var limits = { + skip: 0, + limit: 20, + }; + return request(appUrl) + .get( + "/api/v3/OrigDatablocks/fullQuery?fields=" + + encodeURIComponent(JSON.stringify(fields)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(1); + }); + }); + + it("0200: Fetch origDatablock datafiles should include datasetExist field", async () => { + const fields = {}; + const limits = { + skip: 0, + limit: 20, + }; + return request(appUrl) + .get( + "/api/v3/OrigDatablocks/fullQuery/files?fields=" + + encodeURIComponent(JSON.stringify(fields)) + + "&limits=" + + encodeURIComponent(JSON.stringify(limits)), + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.forEach((origdatablock) => + origdatablock.should.have.property("datasetExist"), + ); + }); + }); + + it("0210: Verify that size and numFiles fields are correct in the dataset 1, pass 1", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData1.size + + origDatablockData2.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData1.dataFileList.length + + origDatablockData2.dataFileList.length, + ); + }); + }); + + it("0220: Verify that size and numFiles fields are correct in the dataset 2, pass 1", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData3.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData3.dataFileList.length, + ); + }); + }); + + it("0230: should update file list and size of the origdatablock 1", async () => { + const origDatablock1Updates = { + 'size' : origDatablockData1Modified.size, + 'dataFileList': origDatablockData1Modified.dataFileList, + } + return request(appUrl) + .patch("/api/v3/origdatablocks/" + origDatablockId1) + .send(origDatablock1Updates) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.be.equal(origDatablockData1Modified.size); + res.body.should.have + .property("dataFileList") + .and.have.length(origDatablockData1Modified.dataFileList.length); + }); + }) + + it("0240: Verify that size and numFiles fields are correct in the dataset 1, pass 2", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData1Modified.size + + origDatablockData2.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData1Modified.dataFileList.length + + origDatablockData2.dataFileList.length, + ); + }); + }); + + it("0250: Verify that size and numFiles fields are correct in the dataset 2, pass 2", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData3.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData3.dataFileList.length, + ); + }); + }); + + it("0260: should update file list and size of the origdatablock 1 to original", async () => { + const origDatablock1Updates = { + 'size' : origDatablockData1.size, + 'dataFileList': origDatablockData1.dataFileList, + } + return request(appUrl) + .patch("/api/v3/origdatablocks/" + origDatablockId1) + .send(origDatablock1Updates) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.be.equal(origDatablockData1.size); + res.body.should.have + .property("dataFileList") + .and.have.length(origDatablockData1.dataFileList.length); + }); + }) + + it("0270: Verify that size and numFiles fields are correct in the dataset 1, pass 3", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData1.size + + origDatablockData2.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData1.dataFileList.length + + origDatablockData2.dataFileList.length, + ); + }); + }); + + it("0280: Verify that size and numFiles fields are correct in the dataset 2, pass 3", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData3.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData3.dataFileList.length, + ); + }); + }); + + it("0290: should delete OrigDatablock 1", async () => { + return request(appUrl) + .delete( + `/api/v3/OrigDatablocks/${origDatablockId1}`, + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(200); + }); + + it("0300: Verify that size and numFiles fields are correct in the dataset 1, pass 4", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData2.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData2.dataFileList.length, + ); + }); + }); + + it("0310: Verify that size and numFiles fields are correct in the dataset 2, pass 4", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData3.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData3.dataFileList.length, + ); + }); + }); + + it("0320: should delete OrigDatablock 2", async () => { + return request(appUrl) + .delete( + `/api/v3/OrigDatablocks/${origDatablockId2}`, + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(200); + }); + + it("0330: Verify that size and numFiles fields are correct in the dataset 1, pass 5", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(0); + res.body.should.have + .property("numberOfFiles") + .and.equal(0); + }); + }); + + it("0340: Verify that size and numFiles fields are correct in the dataset 2, pass 5", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal( + origDatablockData3.size, + ); + res.body.should.have + .property("numberOfFiles") + .and.equal( + origDatablockData3.dataFileList.length, + ); + }); + }); + + it("0350: should delete OrigDatablock 3", async () => { + return request(appUrl) + .delete( + `/api/v3/OrigDatablocks/${origDatablockId3}`, + ) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(200); + }); + + it("0360: Verify that size and numFiles fields are correct in the dataset 1, pass 6", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(0); + res.body.should.have + .property("numberOfFiles") + .and.equal(0); + }); + }); + + it("0370: Verify that size and numFiles fields are correct in the dataset 2, pass 6", async () => { + return request(appUrl) + .get("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have + .property("size") + .and.equal(0); + res.body.should.have + .property("numberOfFiles") + .and.equal(0); + }); + }); + + it("0380: Should fetch no origdatablocks belonging to the dataset 1", async () => { + const filter= { where: { datasetId: datasetPid1 } }; + + return request(appUrl) + .get(`/api/v3/OrigDatablocks`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .query({ filter: JSON.stringify(filter) }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(0); + }); + }); + + it("0390: Should fetch no origdatablocks belonging to the dataset 2", async () => { + const filter= { where: { datasetId: datasetPid2 } }; + + return request(appUrl) + .get(`/api/v3/OrigDatablocks`) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .query({ filter: JSON.stringify(filter) }) + .expect(200) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.be.instanceof(Array).and.to.have.length(0); + }); + }); + + it("0400: add a new origDatablock with invalid pid should fail", async () => { + return request(appUrl) + .post(`/api/v3/origdatablocks`) + .send({ ...origDatablockData1, datasetId: "wrong" }) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenIngestor}` }) + .expect(400) + .expect("Content-Type", /json/) + .then((res) => { + res.body.should.have.property("error"); + }); + }); + + it("0410: should delete the dataset 1", async () => { + return request(appUrl) + .delete("/api/v3/Datasets/" + encodedDatasetPid1) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(200) + .expect("Content-Type", /json/); + }); + + it("0420: should delete the dataset 2", async () => { + return request(appUrl) + .delete("/api/v3/Datasets/" + encodedDatasetPid2) + .set("Accept", "application/json") + .set({ Authorization: `Bearer ${accessTokenArchiveManager}` }) + .expect(200) + .expect("Content-Type", /json/); + }); +}); diff --git a/test/PublishedData.js b/test/PublishedData.js index c7bffdf9a..e4e0580b6 100644 --- a/test/PublishedData.js +++ b/test/PublishedData.js @@ -63,7 +63,7 @@ describe("PublishedData: Test of access to published data", () => { done(); }); - it("adds a published data", async () => { + it("0010: adds a published data", async () => { return request(appUrl) .post("/api/v3/PublishedData") .send(publishedData) @@ -76,7 +76,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fetch this new published data without authorization", async () => { + it("0020: should fetch this new published data without authorization", async () => { return request(appUrl) .get("/api/v3/PublishedData/" + doi) .set("Accept", "application/json") @@ -90,7 +90,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fetch this new published data", async () => { + it("0030: should fetch this new published data", async () => { return request(appUrl) .get("/api/v3/PublishedData/" + doi) .set("Accept", "application/json") @@ -106,7 +106,7 @@ describe("PublishedData: Test of access to published data", () => { }); // NOTE: This is added because we need dataset for registering published data - it("adds a new raw dataset", async () => { + it("0040: adds a new raw dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(testdataset) @@ -123,7 +123,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should register this new published data", async () => { + it("0050: should register this new published data", async () => { return request(appUrl) .post("/api/v3/PublishedData/" + doi + "/register") .set("Accept", "application/json") @@ -132,7 +132,7 @@ describe("PublishedData: Test of access to published data", () => { .expect("Content-Type", /json/); }); - it("should fetch this new published data", async () => { + it("0060: should fetch this new published data", async () => { return request(appUrl) .get("/api/v3/PublishedData/" + doi) .set("Accept", "application/json") @@ -162,7 +162,7 @@ describe("PublishedData: Test of access to published data", () => { // }); // }); - it("should fetch this new published data", async () => { + it("0070: should fetch this new published data", async () => { return request(appUrl) .get("/api/v3/PublishedData/" + doi) .set("Accept", "application/json") @@ -171,7 +171,7 @@ describe("PublishedData: Test of access to published data", () => { .expect("Content-Type", /json/); }); - it("adds a new nonpublic dataset", async () => { + it("0080: adds a new nonpublic dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(nonpublictestdataset) @@ -219,7 +219,7 @@ describe("PublishedData: Test of access to published data", () => { // }); // }); - it("should delete this published data", async () => { + it("0090: should delete this published data", async () => { return request(appUrl) .delete("/api/v3/PublishedData/" + doi) .set("Accept", "application/json") @@ -228,7 +228,7 @@ describe("PublishedData: Test of access to published data", () => { .expect("Content-Type", /json/); }); - it("should fetch this new dataset", async () => { + it("0100: should fetch this new dataset", async () => { return request(appUrl) .get("/api/v3/Datasets/" + pid) .set("Accept", "application/json") @@ -240,7 +240,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fetch the non public dataset as ingestor", async () => { + it("0110: should fetch the non public dataset as ingestor", async () => { return request(appUrl) .get("/api/v3/Datasets/" + pidnonpublic) .set("Accept", "application/json") @@ -252,13 +252,13 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("adds a new origDatablock", async () => { + it("0120: adds a new origDatablock", async () => { return request(appUrl) .post("/api/v3/OrigDatablocks") .send(origDataBlock) .set("Accept", "application/json") .set({ Authorization: `Bearer ${accessToken}` }) - .expect(200) + .expect(201) .expect("Content-Type", /json/) .then((res) => { res.body.should.have.property("size").and.equal(41780189); @@ -267,7 +267,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should add a new attachment to this dataset", async () => { + it("0130: should add a new attachment to this dataset", async () => { const testAttachment = { thumbnail: "data/abc123", caption: "Some caption", @@ -305,7 +305,7 @@ describe("PublishedData: Test of access to published data", () => { }); // NOTE: Getting dataset attachment by id is missing but we modify the test little bit and check if created attachment is part of the array of attachments returned by /datasets/{id}/attachments - it("should fetch this dataset attachment", async () => { + it("0140: should fetch this dataset attachment", async () => { return request(appUrl) .get("/api/v3/Datasets/" + pid + "/attachments") .set("Accept", "application/json") @@ -318,7 +318,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fetch some published datasets anonymously", async () => { + it("0150: should fetch some published datasets anonymously", async () => { var fields = { ownerGroup: ["p13388"], }; @@ -342,7 +342,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fail to fetch non-public dataset anonymously", async () => { + it("0160: should fail to fetch non-public dataset anonymously", async () => { var fields = { ownerGroup: [nonpublictestdataset.ownerGroup], }; @@ -366,7 +366,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should fetch one dataset including related data anonymously", async () => { + it("0170: should fetch one dataset including related data anonymously", async () => { var limits = { skip: 0, limit: 2, @@ -406,7 +406,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should delete this dataset attachment", async () => { + it("0180: should delete this dataset attachment", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + pid + "/attachments/" + attachmentId) .set("Accept", "application/json") @@ -414,7 +414,7 @@ describe("PublishedData: Test of access to published data", () => { .expect(200); }); - it("should delete a OrigDatablock", async () => { + it("0190: should delete a OrigDatablock", async () => { return request(appUrl) .delete("/api/v3/OrigDatablocks/" + idOrigDatablock) .set("Accept", "application/json") @@ -426,7 +426,7 @@ describe("PublishedData: Test of access to published data", () => { }); }); - it("should delete the nonpublic dataset", async () => { + it("0200: should delete the nonpublic dataset", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + pidnonpublic) .set("Accept", "application/json") @@ -435,7 +435,7 @@ describe("PublishedData: Test of access to published data", () => { .expect("Content-Type", /json/); }); - it("should delete this dataset", async () => { + it("0210: should delete this dataset", async () => { return request(appUrl) .delete("/api/v3/Datasets/" + pid) .set("Accept", "application/json") diff --git a/test/RandomizedDatasetPermissions.js b/test/RandomizedDatasetPermissions.js index 4e29d935a..3415a773f 100644 --- a/test/RandomizedDatasetPermissions.js +++ b/test/RandomizedDatasetPermissions.js @@ -80,7 +80,7 @@ function generateRandomDataset() { "Number of inter-flats": 0, }, }, - owner: faker.name.fullName(), + owner: faker.person.fullName(), ownerEmail: faker.internet.email(), orcidOfOwner: faker.database.mongodbObjectId(), contactEmail: faker.internet.email(), diff --git a/test/RawDatasetOrigDatablock.js b/test/RawDatasetOrigDatablock.js index b749ade9e..c8cbb92f4 100644 --- a/test/RawDatasetOrigDatablock.js +++ b/test/RawDatasetOrigDatablock.js @@ -2,7 +2,7 @@ const utils = require("./LoginUtils"); const { TestData } = require("./TestData"); -describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw Datasets", () => { +describe("2000: RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw Datasets", () => { let accessTokenIngestor = null, accessTokenArchiveManager = null, datasetPid = null, @@ -43,7 +43,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw origDatablockWithValidChkAlg = { ...TestData.OrigDataBlockCorrect3 }; }); - it("adds a new raw dataset", async () => { + it("0010: adds a new raw dataset", async () => { return request(appUrl) .post("/api/v3/Datasets") .send(TestData.RawCorrect) @@ -60,7 +60,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("validate correct origDatablock data used later", async () => { + it("0020: validate correct origDatablock data used later", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/origdatablocks/isValid`) .send(origDatablockData1) @@ -77,7 +77,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("validate wrong origDatablock and expect false", async () => { + it("0030: validate wrong origDatablock and expect false", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/origdatablocks/isValid`) .send(TestData.OrigDataBlockWrong) @@ -94,7 +94,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("adds a new origDatablock with wrong account which should fail", async () => { + it("0040: adds a new origDatablock with wrong account which should fail", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(origDatablockData1) @@ -104,7 +104,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw .expect("Content-Type", /json/); }); - it("add a new origDatablock with empty chkAlg should fail", async () => { + it("0050: add a new origDatablock with empty chkAlg should fail", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(origDatablockWithEmptyChkAlg) @@ -117,7 +117,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("add a new origDatablock with valid chkAlg should success", async () => { + it("0060: add a new origDatablock with valid chkAlg should success", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(origDatablockWithValidChkAlg) @@ -134,7 +134,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("adds a new origDatablock with correct account", async () => { + it("0070: adds a new origDatablock with correct account", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(origDatablockData1) @@ -151,7 +151,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("adds a second origDatablock", async () => { + it("0080: adds a second origDatablock", async () => { return request(appUrl) .post(`/api/v3/datasets/${datasetPid}/OrigDatablocks`) .send(origDatablockData2) @@ -168,7 +168,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Should fetch all origdatablocks belonging to the new dataset", async () => { + it("0090: Should fetch all origdatablocks belonging to the new dataset", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}/OrigDatablocks`) .set("Accept", "application/json") @@ -195,7 +195,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("The new dataset should be the sum of the size of the origDatablocks", async () => { + it("0100: The new dataset should be the sum of the size of the origDatablocks", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}`) .set("Accept", "application/json") @@ -211,7 +211,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("should fetch one dataset including related data", async () => { + it("0110: should fetch one dataset including related data", async () => { var limits = { skip: 0, limit: 10, @@ -252,7 +252,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Should fetch some origDatablock by the full filename and dataset pid", async () => { + it("0130: Should fetch some origDatablock by the full filename and dataset pid", async () => { var fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "N1039-B410377.tif", @@ -277,7 +277,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Should fetch some origDatablock by the partial filename and dataset pid", async () => { + it("0140: Should fetch some origDatablock by the partial filename and dataset pid", async () => { var fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": { $regex: "B410" }, @@ -302,7 +302,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Should fetch no origDatablock using a non existing filename", async () => { + it("0150: Should fetch no origDatablock using a non existing filename", async () => { var fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "this_file_does_not_exists.txt", @@ -327,7 +327,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Should fetch one origDatablock using a specific filename and dataset id", async () => { + it("0160: Should fetch one origDatablock using a specific filename and dataset id", async () => { var fields = { datasetId: decodeURIComponent(datasetPid), "dataFileList.path": "this_unique_file.txt", @@ -352,7 +352,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("Fetch origDatablock datafiles should include datasetExist field", async () => { + it("0170: Fetch origDatablock datafiles should include datasetExist field", async () => { const fields = {}; const limits = { skip: 0, @@ -376,7 +376,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("The size and numFiles fields in the dataset should be correctly updated", async () => { + it("0180: The size and numFiles fields in the dataset should be correctly updated", async () => { return request(appUrl) .get("/api/v3/Datasets/" + datasetPid) .set("Accept", "application/json") @@ -401,7 +401,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("should delete first OrigDatablock", async () => { + it("0190: should delete first OrigDatablock", async () => { return request(appUrl) .delete( `/api/v3/datasets/${datasetPid}/OrigDatablocks/${origDatablockId1}`, @@ -411,7 +411,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw .expect(200); }); - it("should delete second OrigDatablock", async () => { + it("0200: should delete second OrigDatablock", async () => { return request(appUrl) .delete( `/api/v3/datasets/${datasetPid}/OrigDatablocks/${origDatablockId2}`, @@ -421,7 +421,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw .expect(200); }); - it("should delete third OrigDatablock", async () => { + it("0210: should delete third OrigDatablock", async () => { return request(appUrl) .delete( `/api/v3/datasets/${datasetPid}/OrigDatablocks/${origDatablockId3}`, @@ -431,7 +431,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw .expect(200); }); - it("Should fetch no origdatablocks belonging to the new dataset", async () => { + it("0220: Should fetch no origdatablocks belonging to the new dataset", async () => { return request(appUrl) .get(`/api/v3/Datasets/${datasetPid}/OrigDatablocks`) .set("Accept", "application/json") @@ -443,7 +443,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("The size and numFiles fields in the dataset should be zero", async () => { + it("0230: The size and numFiles fields in the dataset should be zero", async () => { return request(appUrl) .get("/api/v3/Datasets/" + datasetPid) .set("Accept", "application/json") @@ -456,7 +456,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("add a new origDatablock with invalid pid should fail", async () => { + it("0240: add a new origDatablock with invalid pid should fail", async () => { return request(appUrl) .post(`/api/v3/origdatablocks`) .send({ ...origDatablockData1, datasetId: "wrong" }) @@ -469,7 +469,7 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }); }); - it("add a new origDatablock with valid pid should success", async () => { + it("0250: add a new origDatablock with valid pid should success", async () => { return request(appUrl) .post(`/api/v3/origdatablocks`) .send({ @@ -479,14 +479,14 @@ describe("RawDatasetOrigDatablock: Test OrigDatablocks and their relation to raw }) .set("Accept", "application/json") .set({ Authorization: `Bearer ${accessTokenIngestor}` }) - .expect(200) + .expect(201) .expect("Content-Type", /json/) .then((res) => { res.body.should.have.property("id").and.be.string; }); }); - it("should delete the newly created dataset", async () => { + it("0260: should delete the newly created dataset", async () => { return request(appUrl) .delete(`/api/v3/Datasets/${datasetPid}`) .set("Accept", "application/json") diff --git a/test/TestData.js b/test/TestData.js index cd00b0283..e46f06192 100644 --- a/test/TestData.js +++ b/test/TestData.js @@ -219,7 +219,7 @@ const TestData = { "Number of inter-flats": 0, }, }, - owner: faker.name.fullName(), + owner: faker.person.fullName(), ownerEmail: faker.internet.email(), orcidOfOwner: faker.database.mongodbObjectId(), contactEmail: faker.internet.email(),