Skip to content

Commit

Permalink
control-service: Add Data Job Executions to the graphql
Browse files Browse the repository at this point in the history
Testing:
Local run of tests and manual testing, also added some new test cases in unit tests and some code change in Integration tests

Signed-off-by: Plamen Kostov <[email protected]>
  • Loading branch information
Plamen Kostov committed Sep 26, 2021
1 parent e8f1772 commit fe23af0
Show file tree
Hide file tree
Showing 25 changed files with 547 additions and 117 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -199,13 +199,18 @@ public void testGraphQLFields() throws Exception {
// Test requesting of fields that are computed
String contentAsString = mockMvc.perform(get(String.format("/data-jobs/for-team/%s/jobs", TEST_TEAM_NAME))
.with(user("user"))
.param("query", "query($filter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" +
.param("query",
"query($filter: [Predicate], $executionFilter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" +
" jobs(pageNumber: $pageNumber, pageSize: $pageSize, filter: $filter, search: $search) {" +
" content {" +
" jobName" +
" deployments {" +
" id" +
" enabled" +
" executions(pageNumber: 1, pageSize: 5, filter: $executionFilter) {" +
" id" +
" status" +
" }" +
" }" +
" config {" +
" team" +
Expand All @@ -223,7 +228,13 @@ public void testGraphQLFields() throws Exception {
.param("variables", "{" +
"\"search\": \"" + TEST_JOB_1 + "\"," +
"\"pageNumber\": 1," +
"\"pageSize\": 10" +
"\"pageSize\": 10," +
"\"executionFilter\": [" +
" {" +
" \"sort\": \"DESC\"," +
" \"property\": \"deployments.executions.status\"" +
" }" +
" ]" +
"}")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.model.DataJobExecutionIdAndEndTime;
import com.vmware.taurus.service.model.ExecutionStatus;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;

import java.time.OffsetDateTime;
Expand All @@ -30,6 +31,8 @@ public interface JobExecutionRepository extends JpaRepository<DataJobExecution,

List<DataJobExecution> findDataJobExecutionsByDataJobName(String jobName);

List<DataJobExecution> findDataJobExecutionsByDataJobName(String jobName, Pageable pageable);

List<DataJobExecution> findDataJobExecutionsByDataJobNameAndStatusIn(String jobName, List<ExecutionStatus> statuses);

List<DataJobExecutionIdAndEndTime> findByDataJobNameAndStatusNotInOrderByEndTime(String jobName, List<ExecutionStatus> statuses);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
/*
* Copyright (c) 2021 VMware, Inc.
* SPDX-License-Identifier: Apache-2.0
*/

package com.vmware.taurus.service.graphql;

import com.vmware.taurus.datajobs.ToApiModelConverter;
import com.vmware.taurus.service.JobExecutionRepository;
import com.vmware.taurus.service.graphql.model.V2DataJob;
import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy;
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.graphql.model.Filter;
import com.vmware.taurus.service.graphql.model.ExecutionQueryVariables;
import graphql.GraphQLException;
import graphql.schema.DataFetchingEnvironment;
import graphql.schema.SelectedField;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;

/**
* Data fetcher class for Data Job Executions
*
* Data fetchers are classes that provides to graphql api the needed data while it modify the source data:
* By providing list of data jobs it alter each job and attach its execution while reading requested
* information from graphql query to specify how many executions, are they sorted by specific field, etc.
*
* Currently, execution data fetcher does not provide filtering by specifying fields due
* to its post-pagination loading - the execution data is attached after slicing of the requested page.
*/
@Component
public class ExecutionDataFetcher {

private final JobExecutionRepository jobsExecutionRepository;

public ExecutionDataFetcher(JobExecutionRepository jobsExecutionRepository) {
this.jobsExecutionRepository = jobsExecutionRepository;
}

List<V2DataJob> populateExecutions(List<V2DataJob> allDataJob, DataFetchingEnvironment dataFetchingEnvironment) {
final ExecutionQueryVariables queryVariables = fetchQueryVariables(dataFetchingEnvironment);
final Pageable pageable = constructPageable(queryVariables);
allDataJob.forEach(dataJob -> {
if (dataJob.getDeployments() != null) {
List<DataJobExecution> executionsPerJob = jobsExecutionRepository.findDataJobExecutionsByDataJobName(dataJob.getJobName(), pageable);
dataJob.getDeployments()
.stream()
.findFirst()
.ifPresent(deployment -> deployment.setExecutions(
executionsPerJob
.stream()
.map(ToApiModelConverter::jobExecutionToConvert)
.collect(Collectors.toList())));
}
});
return allDataJob;
}

@SuppressWarnings("unchecked")
private ExecutionQueryVariables fetchQueryVariables(DataFetchingEnvironment dataFetchingEnvironment) {
ExecutionQueryVariables queryVariables = new ExecutionQueryVariables();
SelectedField executionFields = dataFetchingEnvironment
.getSelectionSet().getField(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath());

Map<String, Object> execArgs = executionFields.getArguments();
if (execArgs.get("pageNumber") == null || execArgs.get("pageSize") == null) {
throw new GraphQLException("Executions field must contain pageSize and pageNumber");
}
queryVariables.setPageNumber((int) execArgs.get("pageNumber"));
queryVariables.setPageSize((int) execArgs.get("pageSize"));
GraphQLUtils.validatePageInput(queryVariables.getPageSize(), queryVariables.getPageNumber());
queryVariables.setFilters(GraphQLUtils.convertFilters((ArrayList<LinkedHashMap<String, String>>) execArgs.get("filter")));
validateFilterInputForExecutions(queryVariables.getFilters());

return queryVariables;
}

/**
* As we receive filters as custom GraphQL object, this method translated it to Spring data Pageable element
* By default if there isn't any fields specified we return only paginating details
* If sorting is not provided we use the default (ASC), by design it take maximum 1 sorting
* @param queryVar Query variables which holds multiple Filter object
* @return Pageable element containing page and sort
*/
private Pageable constructPageable(ExecutionQueryVariables queryVar) {
Sort.Direction direction = queryVar.getFilters().stream()
.map(Filter::getSort)
.filter(Objects::nonNull)
.findFirst()
.orElse(Sort.Direction.ASC);

List<Sort.Order> order = queryVar.getFilters().stream()
.map(Filter::getProperty)
.filter(Objects::nonNull)
.map(s -> s.replace(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getField() + ".", ""))
.map(s -> new Sort.Order(direction, s))
.collect(Collectors.toList());

PageRequest pageRequest = PageRequest.of(queryVar.getPageNumber() - 1, queryVar.getPageSize());
return order.isEmpty() ? pageRequest : pageRequest.withSort(Sort.by(order));
}

void validateFilterInputForExecutions(List<Filter> executionsFilter) {
final Optional<Filter> filterNotSupported = executionsFilter.stream()
.filter(e -> e.getPattern() != null)
.findAny();
if (filterNotSupported.isPresent()) {
throw new GraphQLException("Using patterns for execution filtering is currently not supported");
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,28 @@
import com.vmware.taurus.service.JobsRepository;
import com.vmware.taurus.service.deploy.DeploymentService;
import com.vmware.taurus.service.graphql.model.Criteria;
import com.vmware.taurus.service.graphql.model.DataJobQueryVariables;
import com.vmware.taurus.service.graphql.model.Filter;
import com.vmware.taurus.service.graphql.model.V2DataJob;
import com.vmware.taurus.service.graphql.strategy.FieldStrategy;
import com.vmware.taurus.service.graphql.strategy.JobFieldStrategyFactory;
import com.vmware.taurus.service.graphql.strategy.datajob.JobFieldStrategyBy;
import com.vmware.taurus.service.model.DataJobPage;
import com.vmware.taurus.service.model.Filter;
import com.vmware.taurus.service.model.JobDeploymentStatus;
import graphql.GraphQLException;
import graphql.GraphqlErrorException;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import graphql.schema.DataFetchingFieldSelectionSet;
import lombok.AllArgsConstructor;
import org.springframework.stereotype.Component;

import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.stream.Collectors;
Expand All @@ -39,46 +46,63 @@ public class GraphQLDataFetchers {
private final JobFieldStrategyFactory strategyFactory;
private final JobsRepository jobsRepository;
private final DeploymentService deploymentService;
private final ExecutionDataFetcher executionDataFetcher;

public DataFetcher<Object> findAllAndBuildDataJobPage() {
return dataFetchingEnvironment -> {
int pageNumber = dataFetchingEnvironment.getArgument("pageNumber");
int pageSize = dataFetchingEnvironment.getArgument("pageSize");
String search = dataFetchingEnvironment.getArgument("search");
List<Filter> filters = convertFilters(dataFetchingEnvironment.getArgument("filter"));
validateInput(pageSize, pageNumber);

DataJobQueryVariables queryVar = fetchDataJobQueryVariables(dataFetchingEnvironment);
List<V2DataJob> allDataJob = StreamSupport.stream(jobsRepository.findAll().spliterator(), false)
.map(ToApiModelConverter::toV2DataJob)
.collect(Collectors.toList());

DataFetchingFieldSelectionSet requestedFields = dataFetchingEnvironment.getSelectionSet();
final Criteria<V2DataJob> filterCriteria = populateCriteria(filters);

List<V2DataJob> dataJobsFiltered = populateDataJobsByRequestedFields(requestedFields, allDataJob).stream()
final Criteria<V2DataJob> filterCriteria = populateCriteria(queryVar.getFilters());
List<V2DataJob> dataJobsFiltered = populateDataJobsByRequestedFields(dataFetchingEnvironment, allDataJob).stream()
.filter(filterCriteria.getPredicate())
.filter(computeSearch(requestedFields, search))
.filter(computeSearch(dataFetchingEnvironment.getSelectionSet(), queryVar.getSearch()))
.sorted(filterCriteria.getComparator())
.collect(Collectors.toList());

int count = dataJobsFiltered.size();

List<Object> resultList = dataJobsFiltered.stream()
.skip((long) (pageNumber - 1) * pageSize)
.limit(pageSize)
List<V2DataJob> dataJobList = dataJobsFiltered.stream()
.skip((long) (queryVar.getPageNumber() - 1) * queryVar.getPageSize())
.limit(queryVar.getPageSize())
.collect(Collectors.toList());

return buildDataJobPage(pageSize, count, resultList);
List<V2DataJob> resultList = populateDataJobsPostPagination(dataJobList, dataFetchingEnvironment);

return buildDataJobPage(queryVar.getPageSize(), count, new ArrayList<>(resultList));
};
}

private List<V2DataJob> populateDataJobsPostPagination(List<V2DataJob> allDataJob, DataFetchingEnvironment dataFetchingEnvironment) {
if (dataFetchingEnvironment.getSelectionSet().contains(JobFieldStrategyBy.DEPLOYMENT_EXECUTIONS.getPath())) {
executionDataFetcher.populateExecutions(allDataJob, dataFetchingEnvironment);
}

return allDataJob;
}

private DataJobQueryVariables fetchDataJobQueryVariables(DataFetchingEnvironment dataFetchingEnvironment) {
DataJobQueryVariables queryVariables = new DataJobQueryVariables();

queryVariables.setPageNumber(dataFetchingEnvironment.getArgument("pageNumber"));
queryVariables.setPageSize(dataFetchingEnvironment.getArgument("pageSize"));
GraphQLUtils.validatePageInput(queryVariables.getPageSize(), queryVariables.getPageNumber());
queryVariables.setSearch(dataFetchingEnvironment.getArgument("search"));
queryVariables.setFilters(GraphQLUtils.convertFilters(dataFetchingEnvironment.getArgument("filter")));

return queryVariables;
}

/**
* Alter each data job in order to populate fields that are requested from the GraphQL body
* @param requestedFields Requested fields from GraphQL query, parsed from the env
* @param dataFetchingEnvironment Environment holder of the graphql requests
* @param allDataJob List of the data jobs which will be altered
* @return Altered data job list
*/
private List<V2DataJob> populateDataJobsByRequestedFields(DataFetchingFieldSelectionSet requestedFields, List<V2DataJob> allDataJob) {
private List<V2DataJob> populateDataJobsByRequestedFields(DataFetchingEnvironment dataFetchingEnvironment, List<V2DataJob> allDataJob) {
DataFetchingFieldSelectionSet requestedFields = dataFetchingEnvironment.getSelectionSet();
if (requestedFields.contains(JobFieldStrategyBy.DEPLOYMENT.getPath())) {
populateDeployments(allDataJob);
}
Expand All @@ -90,19 +114,6 @@ private List<V2DataJob> populateDataJobsByRequestedFields(DataFetchingFieldSelec
return allDataJob;
}

private List<Filter> convertFilters(ArrayList<LinkedHashMap<String, String>> rawFilters) {
List<Filter> filters = new ArrayList<>();
if (rawFilters != null && !rawFilters.isEmpty()) {
rawFilters.forEach(map -> {
if (map != null && !map.isEmpty()) {
Filter.Direction direction = map.get("sort") == null ? null : Filter.Direction.valueOf(map.get("sort"));
filters.add(new Filter(map.get("property"), map.get("pattern"), direction));
}
});
}
return filters;
}

private Criteria<V2DataJob> populateCriteria(List<Filter> filterList) {
// concurrent result, calculation might be using Fork-Join API to speed-up
final AtomicReference<Criteria<V2DataJob>> criteriaResult = new AtomicReference<>(JOB_CRITERIA_DEFAULT);
Expand Down Expand Up @@ -145,23 +156,6 @@ private Predicate<V2DataJob> computeSearch(DataFetchingFieldSelectionSet request
return predicate == null ? Objects::nonNull : predicate;
}

private static void validateInput(int pageSize, int pageNumber) {
if (pageSize < 1) {
throw new GraphQLException("Page size cannot be less than 1");
}
if (pageNumber < 1) {
throw new GraphQLException("Page cannot be less than 1");
}
}

private static DataJobPage buildDataJobPage(int pageSize, int count, List<Object> pageList) {
var dataJobPage = new DataJobPage();
dataJobPage.setContent(pageList);
dataJobPage.setTotalPages(((count - 1) / pageSize + 1));
dataJobPage.setTotalItems(count);
return dataJobPage;
}

private List<V2DataJob> populateDeployments(List<V2DataJob> allDataJob) {
Map<String, JobDeploymentStatus> deploymentStatuses = deploymentService.readDeployments()
.stream().collect(Collectors.toMap(JobDeploymentStatus::getDataJobName, cronJob -> cronJob));
Expand All @@ -176,4 +170,12 @@ private List<V2DataJob> populateDeployments(List<V2DataJob> allDataJob) {
});
return allDataJob;
}

private static DataJobPage buildDataJobPage(int pageSize, int count, List<Object> pageList) {
var dataJobPage = new DataJobPage();
dataJobPage.setContent(pageList);
dataJobPage.setTotalPages(((count - 1) / pageSize + 1));
dataJobPage.setTotalItems(count);
return dataJobPage;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package com.vmware.taurus.service.graphql;

import com.vmware.taurus.service.graphql.model.Filter;
import graphql.GraphQLException;
import lombok.experimental.UtilityClass;
import org.springframework.data.domain.Sort;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;

@UtilityClass
public class GraphQLUtils {

/**
* GraphQL's library parses json query variables to List of LinkedHashMaps. In order to use later the
* filtering and sorting by specific field easily we convert them to list of Filters
* @see Filter
* @see GraphQLDataFetchers
*
* @param rawFilters filters object in fetched from graphql environment to convert
* @return List of converted filters
*/
public static List<Filter> convertFilters(List<LinkedHashMap<String, String>> rawFilters) {
List<Filter> filters = new ArrayList<>();
if (rawFilters != null && !rawFilters.isEmpty()) {
rawFilters.forEach(map -> {
if (map != null && !map.isEmpty()) {
Sort.Direction direction = map.get("sort") == null ? null : Sort.Direction.valueOf(map.get("sort"));
filters.add(new Filter(map.get("property"), map.get("pattern"), direction));
}
});
}
return filters;
}

public static void validatePageInput(int pageSize, int pageNumber) {
if (pageSize < 1) {
throw new GraphQLException("Page size cannot be less than 1");
}
if (pageNumber < 1) {
throw new GraphQLException("Page cannot be less than 1");
}
}

}
Loading

0 comments on commit fe23af0

Please sign in to comment.