diff --git a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobGraphQLIT.java b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobGraphQLIT.java index 641dfd6f22..f7baa139ca 100644 --- a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobGraphQLIT.java +++ b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobGraphQLIT.java @@ -199,13 +199,18 @@ public void testGraphQLFields() throws Exception { // Test requesting of fields that are computed String contentAsString = mockMvc.perform(get(String.format("/data-jobs/for-team/%s/jobs", TEST_TEAM_NAME)) .with(user("user")) - .param("query", "query($filter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" + + .param("query", + "query($filter: [Predicate], $executionFilter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" + " jobs(pageNumber: $pageNumber, pageSize: $pageSize, filter: $filter, search: $search) {" + " content {" + " jobName" + " deployments {" + " id" + " enabled" + + " executions(pageNumber: 1, pageSize: 5, filter: $executionFilter) {" + + " id" + + " status" + + " }" + " }" + " config {" + " team" + @@ -223,7 +228,13 @@ public void testGraphQLFields() throws Exception { .param("variables", "{" + "\"search\": \"" + TEST_JOB_1 + "\"," + "\"pageNumber\": 1," + - "\"pageSize\": 10" + + "\"pageSize\": 10," + + "\"executionFilter\": [" + + " {" + + " \"sort\": \"DESC\"," + + " \"property\": \"deployments.executions.status\"" + + " }" + + " ]" + "}") .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/JobExecutionRepository.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/JobExecutionRepository.java index 4dcdcf2a85..02f39063c9 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/JobExecutionRepository.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/JobExecutionRepository.java @@ -8,6 +8,7 @@ import com.vmware.taurus.service.model.DataJobExecution; import com.vmware.taurus.service.model.DataJobExecutionIdAndEndTime; import com.vmware.taurus.service.model.ExecutionStatus; +import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import java.time.OffsetDateTime; @@ -30,6 +31,8 @@ public interface JobExecutionRepository extends JpaRepository findDataJobExecutionsByDataJobName(String jobName); + List findDataJobExecutionsByDataJobName(String jobName, Pageable pageable); + List findDataJobExecutionsByDataJobNameAndStatusIn(String jobName, List statuses); List findByDataJobNameAndStatusNotInOrderByEndTime(String jobName, List statuses); diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/ExecutionDataFetcher.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/ExecutionDataFetcher.java new file mode 100644 index 0000000000..119ce2b9ba --- /dev/null +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/ExecutionDataFetcher.java @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2021 VMware, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +package com.vmware.taurus.service.graphql; + +import com.vmware.taurus.datajobs.ToApiModelConverter; +import com.vmware.taurus.service.JobExecutionRepository; +import com.vmware.taurus.service.graphql.model.V2DataJob; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy; +import com.vmware.taurus.service.model.DataJobExecution; +import com.vmware.taurus.service.graphql.model.Filter; +import com.vmware.taurus.service.graphql.model.ExecutionQueryVariables; +import graphql.GraphQLException; +import graphql.schema.DataFetchingEnvironment; +import graphql.schema.SelectedField; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Data fetcher class for Data Job Executions + * + * Data fetchers are classes that provides to graphql api the needed data while it modify the source data: + * By providing list of data jobs it alter each job and attach its execution while reading requested + * information from graphql query to specify how many executions, are they sorted by specific field, etc. + * + * Currently, execution data fetcher does not provide filtering by specifying fields due + * to its post-pagination loading - the execution data is attached after slicing of the requested page. + */ +@Component +public class ExecutionDataFetcher { + + private final JobExecutionRepository jobsExecutionRepository; + + public ExecutionDataFetcher(JobExecutionRepository jobsExecutionRepository) { + this.jobsExecutionRepository = jobsExecutionRepository; + } + + List populateExecutions(List allDataJob, DataFetchingEnvironment dataFetchingEnvironment) { + final ExecutionQueryVariables queryVariables = fetchQueryVariables(dataFetchingEnvironment); + final Pageable pageable = constructPageable(queryVariables); + allDataJob.forEach(dataJob -> { + if (dataJob.getDeployments() != null) { + List executionsPerJob = jobsExecutionRepository.findDataJobExecutionsByDataJobName(dataJob.getJobName(), pageable); + dataJob.getDeployments() + .stream() + .findFirst() + .ifPresent(deployment -> deployment.setExecutions( + executionsPerJob + .stream() + .map(ToApiModelConverter::jobExecutionToConvert) + .collect(Collectors.toList()))); + } + }); + return allDataJob; + } + + @SuppressWarnings("unchecked") + private ExecutionQueryVariables fetchQueryVariables(DataFetchingEnvironment dataFetchingEnvironment) { + ExecutionQueryVariables queryVariables = new ExecutionQueryVariables(); + SelectedField executionFields = dataFetchingEnvironment + .getSelectionSet().getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath()); + + Map execArgs = executionFields.getArguments(); + if (execArgs.get("pageNumber") == null || execArgs.get("pageSize") == null) { + throw new GraphQLException("Executions field must contain pageSize and pageNumber"); + } + queryVariables.setPageNumber((int) execArgs.get("pageNumber")); + queryVariables.setPageSize((int) execArgs.get("pageSize")); + GraphQLUtils.validatePageInput(queryVariables.getPageSize(), queryVariables.getPageNumber()); + queryVariables.setFilters(GraphQLUtils.convertFilters((ArrayList>) execArgs.get("filter"))); + validateFilterInputForExecutions(queryVariables.getFilters()); + + return queryVariables; + } + + /** + * As we receive filters as custom GraphQL object, this method translated it to Spring data Pageable element + * By default if there isn't any fields specified we return only paginating details + * If sorting is not provided we use the default (ASC), by design it take maximum 1 sorting + * @param queryVar Query variables which holds multiple Filter object + * @return Pageable element containing page and sort + */ + private Pageable constructPageable(ExecutionQueryVariables queryVar) { + Sort.Direction direction = queryVar.getFilters().stream() + .map(Filter::getSort) + .filter(Objects::nonNull) + .findFirst() + .orElse(Sort.Direction.ASC); + + List order = queryVar.getFilters().stream() + .map(Filter::getProperty) + .filter(Objects::nonNull) + .map(s -> s.replace(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getField() + ".", "")) + .map(s -> new Sort.Order(direction, s)) + .collect(Collectors.toList()); + + PageRequest pageRequest = PageRequest.of(queryVar.getPageNumber() - 1, queryVar.getPageSize()); + return order.isEmpty() ? pageRequest : pageRequest.withSort(Sort.by(order)); + } + + void validateFilterInputForExecutions(List executionsFilter) { + final Optional filterNotSupported = executionsFilter.stream() + .filter(e -> e.getPattern() != null) + .findAny(); + if (filterNotSupported.isPresent()) { + throw new GraphQLException("Using patterns for execution filtering is currently not supported"); + } + } + +} diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLDataFetchers.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLDataFetchers.java index f1c1304e36..7866c49442 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLDataFetchers.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLDataFetchers.java @@ -9,21 +9,28 @@ import com.vmware.taurus.service.JobsRepository; import com.vmware.taurus.service.deploy.DeploymentService; import com.vmware.taurus.service.graphql.model.Criteria; +import com.vmware.taurus.service.graphql.model.DataJobQueryVariables; +import com.vmware.taurus.service.graphql.model.Filter; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; import com.vmware.taurus.service.graphql.strategy.JobFieldStrategyFactory; import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy; import com.vmware.taurus.service.model.DataJobPage; -import com.vmware.taurus.service.model.Filter; import com.vmware.taurus.service.model.JobDeploymentStatus; -import graphql.GraphQLException; import graphql.GraphqlErrorException; import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; import graphql.schema.DataFetchingFieldSelectionSet; import lombok.AllArgsConstructor; import org.springframework.stereotype.Component; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -39,46 +46,63 @@ public class GraphQLDataFetchers { private final JobFieldStrategyFactory strategyFactory; private final JobsRepository jobsRepository; private final DeploymentService deploymentService; + private final ExecutionDataFetcher executionDataFetcher; public DataFetcher findAllAndBuildDataJobPage() { return dataFetchingEnvironment -> { - int pageNumber = dataFetchingEnvironment.getArgument("pageNumber"); - int pageSize = dataFetchingEnvironment.getArgument("pageSize"); - String search = dataFetchingEnvironment.getArgument("search"); - List filters = convertFilters(dataFetchingEnvironment.getArgument("filter")); - validateInput(pageSize, pageNumber); - + DataJobQueryVariables queryVar = fetchDataJobQueryVariables(dataFetchingEnvironment); List allDataJob = StreamSupport.stream(jobsRepository.findAll().spliterator(), false) .map(ToApiModelConverter::toV2DataJob) .collect(Collectors.toList()); - DataFetchingFieldSelectionSet requestedFields = dataFetchingEnvironment.getSelectionSet(); - final Criteria filterCriteria = populateCriteria(filters); - - List dataJobsFiltered = populateDataJobsByRequestedFields(requestedFields, allDataJob).stream() + final Criteria filterCriteria = populateCriteria(queryVar.getFilters()); + List dataJobsFiltered = populateDataJobsByRequestedFields(dataFetchingEnvironment, allDataJob).stream() .filter(filterCriteria.getPredicate()) - .filter(computeSearch(requestedFields, search)) + .filter(computeSearch(dataFetchingEnvironment.getSelectionSet(), queryVar.getSearch())) .sorted(filterCriteria.getComparator()) .collect(Collectors.toList()); int count = dataJobsFiltered.size(); - List resultList = dataJobsFiltered.stream() - .skip((long) (pageNumber - 1) * pageSize) - .limit(pageSize) + List dataJobList = dataJobsFiltered.stream() + .skip((long) (queryVar.getPageNumber() - 1) * queryVar.getPageSize()) + .limit(queryVar.getPageSize()) .collect(Collectors.toList()); - return buildDataJobPage(pageSize, count, resultList); + List resultList = populateDataJobsPostPagination(dataJobList, dataFetchingEnvironment); + + return buildDataJobPage(queryVar.getPageSize(), count, new ArrayList<>(resultList)); }; } + private List populateDataJobsPostPagination(List allDataJob, DataFetchingEnvironment dataFetchingEnvironment) { + if (dataFetchingEnvironment.getSelectionSet().contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) { + executionDataFetcher.populateExecutions(allDataJob, dataFetchingEnvironment); + } + + return allDataJob; + } + + private DataJobQueryVariables fetchDataJobQueryVariables(DataFetchingEnvironment dataFetchingEnvironment) { + DataJobQueryVariables queryVariables = new DataJobQueryVariables(); + + queryVariables.setPageNumber(dataFetchingEnvironment.getArgument("pageNumber")); + queryVariables.setPageSize(dataFetchingEnvironment.getArgument("pageSize")); + GraphQLUtils.validatePageInput(queryVariables.getPageSize(), queryVariables.getPageNumber()); + queryVariables.setSearch(dataFetchingEnvironment.getArgument("search")); + queryVariables.setFilters(GraphQLUtils.convertFilters(dataFetchingEnvironment.getArgument("filter"))); + + return queryVariables; + } + /** * Alter each data job in order to populate fields that are requested from the GraphQL body - * @param requestedFields Requested fields from GraphQL query, parsed from the env + * @param dataFetchingEnvironment Environment holder of the graphql requests * @param allDataJob List of the data jobs which will be altered * @return Altered data job list */ - private List populateDataJobsByRequestedFields(DataFetchingFieldSelectionSet requestedFields, List allDataJob) { + private List populateDataJobsByRequestedFields(DataFetchingEnvironment dataFetchingEnvironment, List allDataJob) { + DataFetchingFieldSelectionSet requestedFields = dataFetchingEnvironment.getSelectionSet(); if (requestedFields.contains(JobFieldStrategyBy.DEPLOYMENT.getPath())) { populateDeployments(allDataJob); } @@ -90,19 +114,6 @@ private List populateDataJobsByRequestedFields(DataFetchingFieldSelec return allDataJob; } - private List convertFilters(ArrayList> rawFilters) { - List filters = new ArrayList<>(); - if (rawFilters != null && !rawFilters.isEmpty()) { - rawFilters.forEach(map -> { - if (map != null && !map.isEmpty()) { - Filter.Direction direction = map.get("sort") == null ? null : Filter.Direction.valueOf(map.get("sort")); - filters.add(new Filter(map.get("property"), map.get("pattern"), direction)); - } - }); - } - return filters; - } - private Criteria populateCriteria(List filterList) { // concurrent result, calculation might be using Fork-Join API to speed-up final AtomicReference> criteriaResult = new AtomicReference<>(JOB_CRITERIA_DEFAULT); @@ -145,23 +156,6 @@ private Predicate computeSearch(DataFetchingFieldSelectionSet request return predicate == null ? Objects::nonNull : predicate; } - private static void validateInput(int pageSize, int pageNumber) { - if (pageSize < 1) { - throw new GraphQLException("Page size cannot be less than 1"); - } - if (pageNumber < 1) { - throw new GraphQLException("Page cannot be less than 1"); - } - } - - private static DataJobPage buildDataJobPage(int pageSize, int count, List pageList) { - var dataJobPage = new DataJobPage(); - dataJobPage.setContent(pageList); - dataJobPage.setTotalPages(((count - 1) / pageSize + 1)); - dataJobPage.setTotalItems(count); - return dataJobPage; - } - private List populateDeployments(List allDataJob) { Map deploymentStatuses = deploymentService.readDeployments() .stream().collect(Collectors.toMap(JobDeploymentStatus::getDataJobName, cronJob -> cronJob)); @@ -176,4 +170,12 @@ private List populateDeployments(List allDataJob) { }); return allDataJob; } + + private static DataJobPage buildDataJobPage(int pageSize, int count, List pageList) { + var dataJobPage = new DataJobPage(); + dataJobPage.setContent(pageList); + dataJobPage.setTotalPages(((count - 1) / pageSize + 1)); + dataJobPage.setTotalItems(count); + return dataJobPage; + } } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLUtils.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLUtils.java new file mode 100644 index 0000000000..fa3ee23367 --- /dev/null +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/GraphQLUtils.java @@ -0,0 +1,46 @@ +package com.vmware.taurus.service.graphql; + +import com.vmware.taurus.service.graphql.model.Filter; +import graphql.GraphQLException; +import lombok.experimental.UtilityClass; +import org.springframework.data.domain.Sort; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +@UtilityClass +public class GraphQLUtils { + + /** + * GraphQL's library parses json query variables to List of LinkedHashMaps. In order to use later the + * filtering and sorting by specific field easily we convert them to list of Filters + * @see Filter + * @see GraphQLDataFetchers + * + * @param rawFilters filters object in fetched from graphql environment to convert + * @return List of converted filters + */ + public static List convertFilters(List> rawFilters) { + List filters = new ArrayList<>(); + if (rawFilters != null && !rawFilters.isEmpty()) { + rawFilters.forEach(map -> { + if (map != null && !map.isEmpty()) { + Sort.Direction direction = map.get("sort") == null ? null : Sort.Direction.valueOf(map.get("sort")); + filters.add(new Filter(map.get("property"), map.get("pattern"), direction)); + } + }); + } + return filters; + } + + public static void validatePageInput(int pageSize, int pageNumber) { + if (pageSize < 1) { + throw new GraphQLException("Page size cannot be less than 1"); + } + if (pageNumber < 1) { + throw new GraphQLException("Page cannot be less than 1"); + } + } + +} diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/DataJobQueryVariables.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/DataJobQueryVariables.java new file mode 100644 index 0000000000..91e83e2cfe --- /dev/null +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/DataJobQueryVariables.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2021 VMware, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +package com.vmware.taurus.service.graphql.model; + +import lombok.Data; + +import java.util.List; + +@Data +public class DataJobQueryVariables { + private int pageSize; + private int pageNumber; + private String search; + private List filters; +} diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/ExecutionQueryVariables.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/ExecutionQueryVariables.java new file mode 100644 index 0000000000..16a11d5d41 --- /dev/null +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/ExecutionQueryVariables.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2021 VMware, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ + +package com.vmware.taurus.service.graphql.model; + +import lombok.Data; + +import java.util.List; + +@Data +public class ExecutionQueryVariables { + private int pageNumber; + private int pageSize; + private List filters; +} diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/model/Filter.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/Filter.java similarity index 58% rename from projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/model/Filter.java rename to projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/Filter.java index 3078ff0af8..241ccf9301 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/model/Filter.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/model/Filter.java @@ -3,23 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ -package com.vmware.taurus.service.model; +package com.vmware.taurus.service.graphql.model; import lombok.AllArgsConstructor; import lombok.Data; +import org.springframework.data.domain.Sort; @Data @AllArgsConstructor public class Filter { private String property; private String pattern; - private Direction sort; + private Sort.Direction sort; - public static Filter of(String property, String pattern, Direction sort) { + public static Filter of(String property, String pattern, Sort.Direction sort) { return new Filter(property, pattern, sort); } - public enum Direction { - ASC, DESC - } } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/FieldStrategy.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/FieldStrategy.java index c81fffcf69..1a790cb248 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/FieldStrategy.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/FieldStrategy.java @@ -7,7 +7,8 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; +import org.springframework.data.domain.Sort; import org.springframework.lang.NonNull; import java.util.Comparator; @@ -74,8 +75,8 @@ protected boolean sortingProvided(Filter filter) { * @param direction ASC or DESC direction * @return true if DESC, false if ASC */ - protected boolean invertSorting(Filter.Direction direction) { - return Filter.Direction.DESC.equals(direction); + protected boolean invertSorting(Sort.Direction direction) { + return Sort.Direction.DESC.equals(direction); } /** diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBy.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBy.java index 0888d3e778..9a1011aa43 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBy.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBy.java @@ -19,6 +19,7 @@ public enum JobFieldStrategyBy { JOB_NAME("jobName", "content/jobName"), DEPLOYMENT("deployments", "content/deployments"), DEPLOYMENT_ENABLED("deployments.enabled", "content/deployments/enabled"), + DEPLOYMENT_EXECUTIONS("deployments.executions", "content/deployments/executions"), TEAM("config.team", "content/config/team"), DESCRIPTION("config.description", "content/config/description"), SOURCE_URL("config.sourceUrl", "content/config/sourceUrl"), diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatus.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatus.java index 57e16af4c7..6aaa1dfa8a 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatus.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatus.java @@ -8,7 +8,7 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescription.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescription.java index 1b739ea10d..e547c3f0ea 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescription.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescription.java @@ -9,7 +9,7 @@ import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.apache.commons.lang3.StringUtils; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByName.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByName.java index bd448e9b6a..4531feec2b 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByName.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByName.java @@ -8,7 +8,7 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.apache.commons.lang3.StringUtils; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRun.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRun.java index 6abfde8ad5..38f1b0d9d4 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRun.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRun.java @@ -14,7 +14,7 @@ import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.model.V2DataJobSchedule; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import graphql.GraphqlErrorException; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByScheduleCron.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByScheduleCron.java index 6ad642cb21..c50de554a1 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByScheduleCron.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByScheduleCron.java @@ -8,7 +8,7 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.apache.commons.lang3.StringUtils; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBySourceUrl.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBySourceUrl.java index 57ba04f92c..b1261f41d0 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBySourceUrl.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyBySourceUrl.java @@ -9,7 +9,7 @@ import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import com.vmware.taurus.service.upload.GitWrapper; import org.springframework.beans.factory.annotation.Value; import org.springframework.lang.NonNull; diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeam.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeam.java index c4a9b08f16..ef5f763a5a 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeam.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeam.java @@ -9,7 +9,7 @@ import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.apache.commons.lang3.StringUtils; import org.springframework.lang.NonNull; import org.springframework.stereotype.Component; diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/JobExecutionRepositoryIT.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/JobExecutionRepositoryIT.java index 3495fea9ec..7be900a398 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/JobExecutionRepositoryIT.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/JobExecutionRepositoryIT.java @@ -14,6 +14,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.boot.test.context.SpringBootTest; import java.util.List; @@ -104,7 +107,30 @@ public void testFindDataJobExecutionsByDataJobNameAndStatusIn_existingDataJobExe } @Test - public void testFindDataJobExecutionsByDataJobNameAndStatusIn_nonExistingDataJobExecution_shouldReturnEmptyResult() { + void testFindFirst5ByDataJobNameOrderByStartTimeDesc_existingDataJobExecutions_shouldReturnValidResult() { + DataJob actualDataJob = RepositoryUtil.createDataJob(jobsRepository); + + RepositoryUtil.createDataJobExecution(jobExecutionRepository, "test-execution-id-1", actualDataJob, ExecutionStatus.CANCELLED); + RepositoryUtil.createDataJobExecution(jobExecutionRepository, "test-execution-id-2", actualDataJob, ExecutionStatus.RUNNING); + DataJobExecution expectedJobExecution3 = + RepositoryUtil.createDataJobExecution(jobExecutionRepository, "test-execution-id-3", actualDataJob, ExecutionStatus.SUBMITTED); + DataJobExecution expectedJobExecution4 = + RepositoryUtil.createDataJobExecution(jobExecutionRepository, "test-execution-id-4", actualDataJob, ExecutionStatus.FAILED); + + + Pageable pageable = PageRequest.of(0, 2, Sort.by(Sort.Order.desc("id"))); + var actualJobExecutions = + jobExecutionRepository.findDataJobExecutionsByDataJobName(actualDataJob.getName(), pageable); + + Assertions.assertNotNull(actualJobExecutions); + Assertions.assertEquals(2, actualJobExecutions.size()); + Assertions.assertEquals(expectedJobExecution4, actualJobExecutions.get(0)); + Assertions.assertEquals(expectedJobExecution3, actualJobExecutions.get(1)); + + } + + @Test + void testFindDataJobExecutionsByDataJobNameAndStatusIn_nonExistingDataJobExecution_shouldReturnEmptyResult() { DataJob actualDataJob = RepositoryUtil.createDataJob(jobsRepository); var actualJobExecutions = diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/ExecutionDataFetcherTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/ExecutionDataFetcherTest.java new file mode 100644 index 0000000000..d45c149046 --- /dev/null +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/ExecutionDataFetcherTest.java @@ -0,0 +1,152 @@ +package com.vmware.taurus.service.graphql; + +import com.vmware.taurus.controlplane.model.data.DataJobMode; +import com.vmware.taurus.service.JobExecutionRepository; +import com.vmware.taurus.service.graphql.model.Filter; +import com.vmware.taurus.service.graphql.model.V2DataJob; +import com.vmware.taurus.service.graphql.model.V2DataJobConfig; +import com.vmware.taurus.service.graphql.model.V2DataJobDeployment; +import com.vmware.taurus.service.graphql.model.V2DataJobSchedule; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy; +import graphql.GraphQLException; +import graphql.schema.DataFetchingEnvironment; +import graphql.schema.DataFetchingFieldSelectionSet; +import graphql.schema.SelectedField; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import static com.vmware.taurus.service.graphql.GraphQLDataFetchersTest.constructFilter; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class ExecutionDataFetcherTest { + + private ExecutionDataFetcher executionDataFetcher; + @Mock + private JobExecutionRepository jobExecutionRepository; + @Mock + private DataFetchingEnvironment dataFetchingEnvironment; + @Mock + private DataFetchingFieldSelectionSet dataFetchingFieldSelectionSet; + @Mock + private SelectedField selectedField; + + @BeforeEach + public void init() { + executionDataFetcher = new ExecutionDataFetcher(jobExecutionRepository); + } + + @Test + void testDataFetcherOfJobs_whenInvalidExecutionPageNumberIsProvided_shouldThrowException() { + when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); + when(dataFetchingFieldSelectionSet.getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) + .thenReturn(selectedField); + Map executionArgs = new HashMap<>(); + executionArgs.put("pageNumber", 0); + executionArgs.put("pageSize", 25); + when(selectedField.getArguments()).thenReturn(executionArgs); + List v2DataJobs = mockListOfV2DataJobs(); + + assertThrows(GraphQLException.class, () -> executionDataFetcher.populateExecutions(v2DataJobs, dataFetchingEnvironment)); + } + + @Test + void testDataFetcherOfJobs_whenInvalidExecutionPageSizeIsProvided_shouldThrowException() { + when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); + when(dataFetchingFieldSelectionSet.getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) + .thenReturn(selectedField); + Map executionArgs = new HashMap<>(); + executionArgs.put("pageNumber", 1); + executionArgs.put("pageSize", 0); + when(selectedField.getArguments()).thenReturn(executionArgs); + List v2DataJobs = mockListOfV2DataJobs(); + + assertThrows(GraphQLException.class, () -> executionDataFetcher.populateExecutions(v2DataJobs, dataFetchingEnvironment)); + } + + @Test + void testDataFetcherOfJobs_whenRequestIncludesExecutions_shouldInvokeExecutions() { + when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); + when(dataFetchingFieldSelectionSet.getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) + .thenReturn(selectedField); + Map executionArgs = new HashMap<>(); + executionArgs.put("pageNumber", 1); + executionArgs.put("pageSize", 25); + when(selectedField.getArguments()).thenReturn(executionArgs); + List v2DataJobs = mockListOfV2DataJobs(); + + List result = executionDataFetcher.populateExecutions(v2DataJobs, dataFetchingEnvironment); + + assertEquals(3, result.size()); + verify(jobExecutionRepository, times(2)).findDataJobExecutionsByDataJobName(anyString(), any(Pageable.class)); + } + + @Test + void testDataFetcherOfJobs_whenUnsupportedFilterInExecutionOperation_shouldThrowException() { + when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); + when(dataFetchingFieldSelectionSet.getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) + .thenReturn(selectedField); + Map executionArgs = new HashMap<>(); + executionArgs.put("pageNumber", 1); + executionArgs.put("pageSize", 25); + + executionArgs.put("filter", constructFilter( + Filter.of(UUID.randomUUID().toString(), "anypattern", Sort.Direction.ASC) + )); + when(selectedField.getArguments()).thenReturn(executionArgs); + List v2DataJobs = mockListOfV2DataJobs(); + + assertThrows(GraphQLException.class, () -> executionDataFetcher.populateExecutions(v2DataJobs, dataFetchingEnvironment)); + } + + static List mockListOfV2DataJobs() { + List dataJobs = new ArrayList<>(); + + dataJobs.add(mockSampleDataJob("sample-job-1", "Import SQL", true)); + dataJobs.add(mockSampleDataJob("sample-job-2", "Dump SQL", true)); + dataJobs.add(mockSampleDataJob("sample-job-3", "Delete users", false)); + + return dataJobs; + } + + static V2DataJob mockSampleDataJob(String jobName, String description, boolean includeDeployment) { + V2DataJob dataJob = new V2DataJob(); + V2DataJobConfig jobConfig = new V2DataJobConfig(); + jobConfig.setSchedule(new V2DataJobSchedule()); + jobConfig.setDescription(description); + dataJob.setConfig(jobConfig); + if (includeDeployment) { + dataJob.setDeployments(Collections.singletonList(mockSampleDeployment(jobName))); + } + dataJob.setJobName(jobName); + + return dataJob; + } + + static V2DataJobDeployment mockSampleDeployment(String jobName) { + V2DataJobDeployment status = new V2DataJobDeployment(); + status.setEnabled(true); + status.setId(jobName + "-latest"); + status.setMode(DataJobMode.RELEASE); + return status; + } + +} diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/GraphQLDataFetchersTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/GraphQLDataFetchersTest.java index 7a25890e8a..1fe5a4cd33 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/GraphQLDataFetchersTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/GraphQLDataFetchersTest.java @@ -7,37 +7,55 @@ import com.vmware.taurus.service.JobsRepository; import com.vmware.taurus.service.deploy.DeploymentService; +import com.vmware.taurus.service.graphql.model.Filter; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.strategy.FieldStrategy; import com.vmware.taurus.service.graphql.strategy.JobFieldStrategyFactory; -import com.vmware.taurus.service.graphql.strategy.datajob.*; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyByDescription; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyByName; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyByNextRun; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyByScheduleCron; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBySourceUrl; +import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyByTeam; import com.vmware.taurus.service.model.DataJob; import com.vmware.taurus.service.model.DataJobPage; -import com.vmware.taurus.service.model.Filter; import com.vmware.taurus.service.model.JobConfig; +import com.vmware.taurus.service.model.JobDeploymentStatus; import graphql.GraphQLException; -import graphql.GraphqlErrorException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import graphql.schema.DataFetchingFieldSelectionSet; +import graphql.schema.SelectedField; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; - -import java.util.*; +import org.springframework.data.domain.Sort; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.AdditionalMatchers.not; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) class GraphQLDataFetchersTest { - + @Mock + private ExecutionDataFetcher executionDataFetcher; @Mock private JobsRepository jobsRepository; @Mock @@ -46,13 +64,14 @@ class GraphQLDataFetchersTest { private DataFetchingEnvironment dataFetchingEnvironment; @Mock private DataFetchingFieldSelectionSet dataFetchingFieldSelectionSet; + @Mock + private SelectedField selectedField; private DataFetcher findDataJobs; - private ArrayList> rawFilters; @BeforeEach public void before() { JobFieldStrategyFactory strategyFactory = new JobFieldStrategyFactory(collectSupportedFieldStrategies()); - GraphQLDataFetchers graphQLDataFetchers = new GraphQLDataFetchers(strategyFactory, jobsRepository, deploymentService); + GraphQLDataFetchers graphQLDataFetchers = new GraphQLDataFetchers(strategyFactory, jobsRepository, deploymentService, executionDataFetcher); findDataJobs = graphQLDataFetchers.findAllAndBuildDataJobPage(); } @@ -62,7 +81,8 @@ void testDataFetcherOfJobs_whenGettingFullList_shouldReturnAllDataJobs() throws when(dataFetchingEnvironment.getArgument("pageSize")).thenReturn(10); when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); - when(dataFetchingFieldSelectionSet.contains(anyString())).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(not(eq(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())))).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())).thenReturn(false); DataJobPage dataJobPage = (DataJobPage) findDataJobs.get(dataFetchingEnvironment); @@ -75,7 +95,8 @@ void testDataFetcherOfJobs_whenGettingPagedResult_shouldReturnPagedJobs() throws when(dataFetchingEnvironment.getArgument("pageSize")).thenReturn(2); when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); - when(dataFetchingFieldSelectionSet.contains(anyString())).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(not(eq(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())))).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())).thenReturn(false); DataJobPage dataJobPage = (DataJobPage) findDataJobs.get(dataFetchingEnvironment); @@ -92,7 +113,7 @@ void testDataFetcherOfJobs_whenSupportedFieldProvidedWithSorting_shouldReturnJob when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); when(dataFetchingEnvironment.getArgument("filter")).thenReturn(constructFilter( - Filter.of("jobName", "sample-job", Filter.Direction.DESC) + Filter.of("jobName", "sample-job", Sort.Direction.DESC) )); DataJobPage dataJobPage = (DataJobPage) findDataJobs.get(dataFetchingEnvironment); @@ -102,21 +123,7 @@ void testDataFetcherOfJobs_whenSupportedFieldProvidedWithSorting_shouldReturnJob assertThat(dataJob.getJobName()).isEqualTo("sample-job-3"); } - @Test - void testDataFetcherOfJobs_whenUnsupportedFieldProvided_shouldThrowException() { - when(dataFetchingEnvironment.getArgument("pageNumber")).thenReturn(2); - when(dataFetchingEnvironment.getArgument("pageSize")).thenReturn(2); - when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); - when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); - when(dataFetchingEnvironment.getArgument("search")).thenReturn(null); - when(dataFetchingEnvironment.getArgument("filter")).thenReturn(constructFilter( - Filter.of(UUID.randomUUID().toString(), "sample-job-1", Filter.Direction.ASC) - )); - assertThrows(GraphqlErrorException.class, () -> { - findDataJobs.get(dataFetchingEnvironment); - }); - } @Test void testDataFetcherOfJobs_whenSearchingSpecificJob_shouldReturnSearchedJob() throws Exception { @@ -125,7 +132,8 @@ void testDataFetcherOfJobs_whenSearchingSpecificJob_shouldReturnSearchedJob() th when(dataFetchingEnvironment.getArgument("search")).thenReturn("sample-job-2"); when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); - when(dataFetchingFieldSelectionSet.contains(anyString())).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(not(eq(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())))).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())).thenReturn(false); DataJobPage dataJobPage = (DataJobPage) findDataJobs.get(dataFetchingEnvironment); @@ -135,13 +143,14 @@ void testDataFetcherOfJobs_whenSearchingSpecificJob_shouldReturnSearchedJob() th } @Test - void testDataFetcherOfJobs_whenSearchingByPattern_shouldReturnMathchingJobs() throws Exception { + void testDataFetcherOfJobs_whenSearchingByPattern_shouldReturnMatchingJobs() throws Exception { when(dataFetchingEnvironment.getArgument("pageNumber")).thenReturn(1); when(dataFetchingEnvironment.getArgument("pageSize")).thenReturn(10); when(dataFetchingEnvironment.getArgument("search")).thenReturn("sample-job-2"); when(jobsRepository.findAll()).thenReturn(mockListOfDataJobs()); when(dataFetchingEnvironment.getSelectionSet()).thenReturn(dataFetchingFieldSelectionSet); - when(dataFetchingFieldSelectionSet.contains(anyString())).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(not(eq(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())))).thenReturn(true); + when(dataFetchingFieldSelectionSet.contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())).thenReturn(false); DataJobPage dataJobPage = (DataJobPage) findDataJobs.get(dataFetchingEnvironment); @@ -182,6 +191,16 @@ void testDataFetcherOfJobs_whenValidPageNumberIsProvided_shouldNotThrowException }); } + + private List mockListOfDeployments() { + List jobDeployments = new ArrayList<>(); + + jobDeployments.add(mockSampleDeployment("sample-job-1", true)); + jobDeployments.add(mockSampleDeployment("sample-job-2", false)); + + return jobDeployments; + } + private List mockListOfDataJobs() { List dataJobs = new ArrayList<>(); @@ -203,8 +222,17 @@ private DataJob mockSampleDataJob(String jobName, String description, String sch return dataJob; } - private ArrayList> constructFilter(Filter ... filters ) { - rawFilters = new ArrayList<>(); + private JobDeploymentStatus mockSampleDeployment(String jobName, boolean enabled) { + JobDeploymentStatus status = new JobDeploymentStatus(); + status.setEnabled(enabled); + status.setDataJobName(jobName); + status.setCronJobName(jobName+"-latest"); + status.setMode("release"); + return status; + } + + static ArrayList> constructFilter(Filter ... filters ) { + ArrayList> rawFilters = new ArrayList<>(); Arrays.stream(filters).forEach(filter -> { LinkedHashMap map = new LinkedHashMap<>(); diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatusTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatusTest.java index 3890097a75..082869b7ca 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatusTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDeploymentStatusTest.java @@ -8,9 +8,10 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobDeployment; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; import java.util.Collections; import java.util.Comparator; @@ -41,7 +42,7 @@ void testJobDeploymentStatusStrategy_whenAlteringFieldData_shouldNotModifyState( @Test void testJobDeploymentStatusStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("random", null, Filter.Direction.DESC); + Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob(JobFieldStrategyByDeploymentStatus.DeploymentStatus.ENABLED); V2DataJob b = createDummyJob(JobFieldStrategyByDeploymentStatus.DeploymentStatus.DISABLED); @@ -59,7 +60,7 @@ void testJobDeploymentStatusStrategy_whenComputingValidCriteriaWithFilter_should V2DataJob b = createDummyJob(JobFieldStrategyByDeploymentStatus.DeploymentStatus.DISABLED); V2DataJob c = createDummyJob(JobFieldStrategyByDeploymentStatus.DeploymentStatus.NOT_DEPLOYED); - Filter enabledFilter = new Filter("deployments.status", "enabled", Filter.Direction.ASC); + Filter enabledFilter = new Filter("deployments.status", "enabled", Sort.Direction.ASC); Criteria criteriaForEnabledJobs = strategyByDeploymentStatus.computeFilterCriteria(baseCriteria, enabledFilter); assertThat(criteriaForEnabledJobs.getPredicate().test(a)).isTrue(); @@ -69,14 +70,14 @@ void testJobDeploymentStatusStrategy_whenComputingValidCriteriaWithFilter_should assertThat(criteriaForEnabledJobs.getComparator().compare(c, b)).isPositive(); assertThat(criteriaForEnabledJobs.getComparator().compare(a, c)).isNegative(); - Filter disabledFilter = new Filter("deployments.status", "disabled", Filter.Direction.ASC); + Filter disabledFilter = new Filter("deployments.status", "disabled", Sort.Direction.ASC); Criteria criteriaForDisabledJobs = strategyByDeploymentStatus.computeFilterCriteria(baseCriteria, disabledFilter); assertThat(criteriaForDisabledJobs.getPredicate().test(a)).isFalse(); assertThat(criteriaForDisabledJobs.getPredicate().test(b)).isTrue(); assertThat(criteriaForDisabledJobs.getPredicate().test(c)).isFalse(); - Filter notDeployedFilter = new Filter("deployments.status", "not_deployed", Filter.Direction.ASC); + Filter notDeployedFilter = new Filter("deployments.status", "not_deployed", Sort.Direction.ASC); Criteria criteriaForNotDeployedJobs = strategyByDeploymentStatus.computeFilterCriteria(baseCriteria, notDeployedFilter); assertThat(criteriaForNotDeployedJobs.getPredicate().test(a)).isFalse(); diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescriptionTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescriptionTest.java index d7aad64a58..2d9eabad90 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescriptionTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByDescriptionTest.java @@ -8,8 +8,9 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; import java.util.Comparator; import java.util.Objects; @@ -38,7 +39,7 @@ void testJobDescriptionStrategy_whenAlteringFieldData_shouldNotModifyState() { @Test void testJobDescriptionStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("random", null, Filter.Direction.DESC); + Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob("A"); V2DataJob b = createDummyJob("B"); @@ -51,7 +52,7 @@ void testJobDescriptionStrategy_whenComputingValidCriteriaWithoutFilter_shouldRe @Test void testJobDescriptionStrategy_whenComputingValidCriteriaWithFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("description", "A", Filter.Direction.ASC); + Filter baseFilter = new Filter("description", "A", Sort.Direction.ASC); V2DataJob a = createDummyJob("a"); V2DataJob b = createDummyJob("b"); diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNameTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNameTest.java index 2e57b13525..856b6253d5 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNameTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNameTest.java @@ -7,8 +7,9 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; import java.util.Comparator; import java.util.Objects; @@ -37,7 +38,7 @@ void testJobNameStrategy_whenAlteringFieldData_shouldNotModifyState() { @Test void testJobNameStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(dataJob -> dataJob.getConfig().getDescription())); - Filter baseFilter = new Filter("random", null, Filter.Direction.DESC); + Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob("A"); V2DataJob b = createDummyJob("B"); @@ -50,7 +51,7 @@ void testJobNameStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnVal @Test void testJobNameStrategy_whenComputingValidCriteriaWithFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(dataJob -> dataJob.getConfig().getDescription())); - Filter baseFilter = new Filter("jobName", "A", Filter.Direction.ASC); + Filter baseFilter = new Filter("jobName", "A", Sort.Direction.ASC); V2DataJob a = createDummyJob("a"); V2DataJob b = createDummyJob("b"); diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java index 9d2c6b5782..0c93617ed4 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByNextRunTest.java @@ -13,8 +13,9 @@ import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; import com.vmware.taurus.service.graphql.model.V2DataJobSchedule; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -105,7 +106,7 @@ void testJobNextRunStrategy_whenAlteringFieldDataWithInvalidSchedule_shouldRetur @Test void testJobNextRunStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("random", null, Filter.Direction.DESC); + Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob("5 4 * * *"); V2DataJob b = createDummyJob("5 6 * * *"); // later than previous @@ -128,7 +129,7 @@ void testJobNextRunStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturn void testJobNextRunStrategy_whenComputingValidCriteriaWithFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); Filter baseFilter = new Filter("config.schedule.nextRunEpochSeconds", - String.format("%d-%d",getNextWeek(), getSecondMonthOfNextYear()), Filter.Direction.ASC); + String.format("%d-%d",getNextWeek(), getSecondMonthOfNextYear()), Sort.Direction.ASC); V2DataJob a = createDummyJob("5 4 1 1 1"); V2DataJob b = createDummyJob("5 6 * * *"); // later than previous diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeamTest.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeamTest.java index 2a1851db7d..8f2833b8ea 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeamTest.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/graphql/strategy/datajob/JobFieldStrategyByTeamTest.java @@ -8,8 +8,9 @@ import com.vmware.taurus.service.graphql.model.Criteria; import com.vmware.taurus.service.graphql.model.V2DataJob; import com.vmware.taurus.service.graphql.model.V2DataJobConfig; -import com.vmware.taurus.service.model.Filter; +import com.vmware.taurus.service.graphql.model.Filter; import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; import java.util.Comparator; import java.util.Objects; @@ -38,7 +39,7 @@ void testJobTeamStrategy_whenAlteringFieldData_shouldNotModifyState() { @Test void testJobTeamStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("random", null, Filter.Direction.DESC); + Filter baseFilter = new Filter("random", null, Sort.Direction.DESC); V2DataJob a = createDummyJob("starshot"); V2DataJob b = createDummyJob("taurus"); @@ -51,7 +52,7 @@ void testJobTeamStrategy_whenComputingValidCriteriaWithoutFilter_shouldReturnVal @Test void testJobTeamStrategy_whenComputingValidCriteriaWithFilter_shouldReturnValidCriteria() { Criteria baseCriteria = new Criteria<>(Objects::nonNull, Comparator.comparing(V2DataJob::getJobName)); - Filter baseFilter = new Filter("config.team", "starshot", Filter.Direction.ASC); + Filter baseFilter = new Filter("config.team", "starshot", Sort.Direction.ASC); V2DataJob a = createDummyJob("starshot"); V2DataJob b = createDummyJob("taurus");