Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

control-service: graphQL filter by jobName, startTimeLte, endTimeLte #551

Merged
merged 4 commits into from
Nov 26, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,6 @@

package com.vmware.taurus.graphql.it;

import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

import java.time.OffsetDateTime;
import java.util.List;

import org.hamcrest.Matchers;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;

import com.vmware.taurus.ControlplaneApplication;
import com.vmware.taurus.datajobs.it.common.BaseIT;
import com.vmware.taurus.datajobs.it.common.JobExecutionUtil;
Expand All @@ -30,6 +14,21 @@
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.model.ExecutionStatus;
import com.vmware.taurus.service.model.JobConfig;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;

import java.time.OffsetDateTime;
import java.util.List;

import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = ControlplaneApplication.class)
public class GraphQLExecutionsIT extends BaseIT {
Expand Down Expand Up @@ -67,9 +66,9 @@ public void setup() {
this.dataJobExecution1 = JobExecutionUtil.createDataJobExecution(
jobExecutionRepository, "testId1", dataJob1, now, now, ExecutionStatus.FINISHED);
this.dataJobExecution2 = JobExecutionUtil.createDataJobExecution(
jobExecutionRepository,"testId2", dataJob2, now.minusSeconds(1), now.minusSeconds(1), ExecutionStatus.RUNNING);
jobExecutionRepository, "testId2", dataJob2, now.minusSeconds(1), now.minusSeconds(1), ExecutionStatus.RUNNING);
this.dataJobExecution3 = JobExecutionUtil.createDataJobExecution(
jobExecutionRepository,"testId3", dataJob3, now.minusSeconds(10), now.minusSeconds(10), ExecutionStatus.SUBMITTED);
jobExecutionRepository, "testId3", dataJob3, now.minusSeconds(10), now.minusSeconds(10), ExecutionStatus.SUBMITTED);
}

private static String getQuery() {
Expand Down Expand Up @@ -116,6 +115,34 @@ public void testExecutions_filterByStartTimeGte() throws Exception {
Matchers.not(Matchers.contains(dataJobExecution3.getId()))));
}

@Test
public void testExecutions_filterByStartTimeLte() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(JOBS_URI)
.queryParam("query", getQuery())
.param("variables", "{" +
"\"filter\": {" +
" \"startTimeLte\": \"" + dataJobExecution2.getStartTime() + "\"" +
" }," +
"\"pageNumber\": 1," +
"\"pageSize\": 10" +
"}")
.with(user(TEST_USERNAME)))
.andExpect(status().is(200))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath(
"$.data.content[*].id",
Matchers.contains(dataJobExecution2.getId(), dataJobExecution3.getId())))
.andExpect(jsonPath(
"$.data.content[*].jobName",
Matchers.contains(dataJob2.getName(), dataJob3.getName())))
.andExpect(jsonPath(
"$.data.content[*].status",
Matchers.contains(dataJobExecution2.getStatus().toString(), dataJobExecution3.getStatus().toString())))
.andExpect(jsonPath(
"$.data.content[*].id",
Matchers.not(Matchers.contains(dataJobExecution1.getId()))));
}

@Test
public void testExecutions_filterByEndTimeGte() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(JOBS_URI)
Expand Down Expand Up @@ -144,6 +171,34 @@ public void testExecutions_filterByEndTimeGte() throws Exception {
Matchers.not(Matchers.contains(dataJobExecution3.getId()))));
}

@Test
public void testExecutions_filterByEndTimeLte() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(JOBS_URI)
.queryParam("query", getQuery())
.param("variables", "{" +
"\"filter\": {" +
" \"endTimeLte\": \"" + dataJobExecution2.getEndTime() + "\"" +
" }," +
"\"pageNumber\": 1," +
"\"pageSize\": 10" +
"}")
.with(user("user")))
.andExpect(status().is(200))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath(
"$.data.content[*].id",
Matchers.contains(dataJobExecution2.getId(), dataJobExecution3.getId())))
.andExpect(jsonPath(
"$.data.content[*].jobName",
Matchers.contains(dataJob2.getName(), dataJob3.getName())))
.andExpect(jsonPath(
"$.data.content[*].status",
Matchers.contains(dataJobExecution2.getStatus().toString(), dataJobExecution3.getStatus().toString())))
.andExpect(jsonPath(
"$.data.content[*].id",
Matchers.not(Matchers.contains(dataJobExecution1.getId()))));
}

@Test
public void testExecutions_filterByStatusIn() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(JOBS_URI)
Expand All @@ -169,4 +224,32 @@ public void testExecutions_filterByStatusIn() throws Exception {
Matchers.contains(dataJobExecution1.getStatus().toString(), dataJobExecution2.getStatus().toString())))
.andExpect(jsonPath("$.data.content[*].id", Matchers.not(Matchers.contains(dataJobExecution3.getId()))));
}

@Test
public void testExecutions_filterByJobNameIn() throws Exception {
mockMvc.perform(MockMvcRequestBuilders.get(JOBS_URI)
.queryParam("query", getQuery())
.param("variables", "{" +
"\"filter\": {" +
" \"jobNameIn\": [\"" + dataJobExecution1.getDataJob().getName() + "\"]" +
" }," +
"\"pageNumber\": 1," +
"\"pageSize\": 10" +
"}")
.with(user("user")))
.andExpect(status().is(200))
.andExpect(content().contentType("application/json"))
.andExpect(jsonPath(
"$.data.content[*].id",
Matchers.contains(dataJobExecution1.getId())))
.andExpect(jsonPath(
"$.data.content[*].jobName",
Matchers.contains(dataJob1.getName())))
.andExpect(jsonPath(
"$.data.content[*].status",
Matchers.contains(dataJobExecution1.getStatus().toString())))
.andExpect(jsonPath("$.data.content[*].id", Matchers.not(Matchers.contains(dataJobExecution3.getId()))))
.andExpect(jsonPath("$.data.content[*].id", Matchers.not(Matchers.contains(dataJobExecution2.getId()))));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,21 @@

package com.vmware.taurus.service;

import com.vmware.taurus.service.graphql.model.DataJobExecutionFilter;
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.model.DataJobExecution_;
import com.vmware.taurus.service.model.DataJob_;
import lombok.AllArgsConstructor;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.data.jpa.domain.Specification;

import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.List;

import lombok.AllArgsConstructor;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.data.jpa.domain.Specification;

import com.vmware.taurus.service.graphql.model.DataJobExecutionFilter;
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.model.DataJobExecution_;

@AllArgsConstructor
public class JobExecutionFilterSpec implements Specification<DataJobExecution> {

Expand All @@ -34,13 +34,26 @@ public Predicate toPredicate(Root<DataJobExecution> root, CriteriaQuery<?> query
predicates.add(builder.greaterThanOrEqualTo(root.get(DataJobExecution_.START_TIME), filter.getStartTimeGte()));
}

if (filter.getStartTimeLte() != null) {
predicates.add(builder.lessThanOrEqualTo(root.get(DataJobExecution_.START_TIME), filter.getStartTimeLte()));
}

if (filter.getEndTimeGte() != null) {
predicates.add(builder.greaterThanOrEqualTo(root.get(DataJobExecution_.END_TIME), filter.getEndTimeGte()));
}

if (filter.getEndTimeLte() != null) {
predicates.add(builder.lessThanOrEqualTo(root.get(DataJobExecution_.END_TIME), filter.getEndTimeLte()));
}

if (CollectionUtils.isNotEmpty(filter.getStatusIn())) {
predicates.add(root.get(DataJobExecution_.STATUS).in(filter.getStatusIn()));
}

if (CollectionUtils.isNotEmpty(filter.getJobNameIn())) {
predicates.add(root.get(DataJobExecution_.DATA_JOB).get(DataJob_.NAME).in(filter.getJobNameIn()));
}

}

return builder.and(predicates.toArray(new Predicate[0]));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import static com.vmware.taurus.service.graphql.model.DataJobExecutionOrder.AVAILABLE_PROPERTIES;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionOrder.DIRECTION_FIELD;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionOrder.PROPERTY_FIELD;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionOrder.PUBLIC_NAME_TO_DB_ENTITY_MAP;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionQueryVariables.FILTER_FIELD;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionQueryVariables.ORDER_FIELD;
import static com.vmware.taurus.service.graphql.model.DataJobExecutionQueryVariables.PAGE_NUMBER_FIELD;
Expand Down Expand Up @@ -246,7 +247,7 @@ private static Optional<DataJobExecutionFilter> extractDataJobExecutionFilter(Ma

builder.statusIn(
Optional.ofNullable(filterRaw.get(DataJobExecutionFilter.STATUS_IN_FIELD))
.map(statusInRaw -> ((List<String>)statusInRaw))
.map(statusInRaw -> ((List<String>) statusInRaw))
.stream()
.flatMap(v1Jobs -> v1Jobs.stream())
.filter(Objects::nonNull)
Expand All @@ -257,8 +258,11 @@ private static Optional<DataJobExecutionFilter> extractDataJobExecutionFilter(Ma

filter = Optional.of(
builder
.startTimeGte((OffsetDateTime)filterRaw.get(DataJobExecutionFilter.START_TIME_GTE_FIELD))
.endTimeGte((OffsetDateTime)filterRaw.get(DataJobExecutionFilter.END_TIME_GTE_FIELD))
.startTimeGte((OffsetDateTime) filterRaw.get(DataJobExecutionFilter.START_TIME_GTE_FIELD))
.endTimeGte((OffsetDateTime) filterRaw.get(DataJobExecutionFilter.END_TIME_GTE_FIELD))
.startTimeLte((OffsetDateTime) filterRaw.get(DataJobExecutionFilter.START_TIME_LTE_FIELD))
.endTimeLte((OffsetDateTime) filterRaw.get(DataJobExecutionFilter.END_TIME_LTE_FIELD))
.jobNameIn((List<String>) filterRaw.get(DataJobExecutionFilter.JOB_NAME_IN_FIELD))
.build()
);
}
Expand All @@ -274,8 +278,9 @@ private static Optional<DataJobExecutionOrder> extractDataJobExecutionOrder(Map<

builder.property(
Optional.ofNullable(orderRaw.get(PROPERTY_FIELD))
.map(o -> (String)o)
.map(o -> (String) o)
.filter(p -> AVAILABLE_PROPERTIES.contains(p))
.map(p -> PUBLIC_NAME_TO_DB_ENTITY_MAP.getOrDefault(p, p)) // If no mapping present use user provided property name
.orElseThrow(() -> new GraphQLException(String.format(
"%s.%s must be in [%s]",
ORDER_FIELD,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,13 @@

package com.vmware.taurus.service.graphql.model;

import java.time.OffsetDateTime;
import java.util.List;

import com.vmware.taurus.service.model.ExecutionStatus;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;

import com.vmware.taurus.service.model.ExecutionStatus;
import java.time.OffsetDateTime;
import java.util.List;

@Data
@AllArgsConstructor
Expand All @@ -22,8 +21,15 @@ public class DataJobExecutionFilter {
public static final String START_TIME_GTE_FIELD = "startTimeGte";
public static final String END_TIME_GTE_FIELD = "endTimeGte";
public static final String STATUS_IN_FIELD = "statusIn";
public static final String JOB_NAME_IN_FIELD = "jobNameIn";
public static final String START_TIME_LTE_FIELD = "startTimeLte";
public static final String END_TIME_LTE_FIELD = "endTimeLte";

private OffsetDateTime startTimeGte;
private OffsetDateTime endTimeGte;
private List<ExecutionStatus> statusIn;
private List<String> jobNameIn;
private OffsetDateTime startTimeLte;
private OffsetDateTime endTimeLte;

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,21 @@

package com.vmware.taurus.service.graphql.model;

import java.util.Set;

import com.vmware.taurus.service.model.DataJobExecution_;
import com.vmware.taurus.service.model.DataJob_;
import lombok.Builder;
import lombok.Data;
import org.springframework.data.domain.Sort;

import com.vmware.taurus.service.model.DataJobExecution_;
import java.util.Map;
import java.util.Set;

@Data
@Builder
public class DataJobExecutionOrder {

public static final String DATA_JOB_NAME = "jobName";

public static final Set<String> AVAILABLE_PROPERTIES = Set.of(
DataJobExecution_.MESSAGE,
DataJobExecution_.TYPE,
Expand All @@ -30,7 +33,12 @@ public class DataJobExecutionOrder {
DataJobExecution_.LAST_DEPLOYED_BY,
DataJobExecution_.STARTED_BY,
DataJobExecution_.STATUS,
DataJobExecution_.VDK_VERSION);
DataJobExecution_.VDK_VERSION,
DATA_JOB_NAME);

public static final Map<String, String> PUBLIC_NAME_TO_DB_ENTITY_MAP = Map.of(
DATA_JOB_NAME, DataJobExecution_.DATA_JOB + "." + DataJob_.NAME
);

public static final String PROPERTY_FIELD = "property";
public static final String DIRECTION_FIELD = "direction";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ enum Direction {
input DataJobExecutionFilter {
startTimeGte: DateTime,
endTimeGte: DateTime,
statusIn: [DataJobExecutionStatus]
statusIn: [DataJobExecutionStatus],
jobNameIn: [String],
startTimeLte: DateTime,
endTimeLte: DateTime
}

input DataJobExecutionOrder {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,27 @@

import com.vmware.taurus.service.JobExecutionRepository;
import com.vmware.taurus.service.JobsRepository;
import com.vmware.taurus.service.model.*;
import com.vmware.taurus.service.model.DataJob;
import com.vmware.taurus.service.model.DataJobExecution;
import com.vmware.taurus.service.model.DeploymentStatus;
import com.vmware.taurus.service.model.ExecutionStatus;
import com.vmware.taurus.service.model.ExecutionType;
import com.vmware.taurus.service.model.JobConfig;
import org.junit.jupiter.api.Assertions;

import java.time.OffsetDateTime;

public final class RepositoryUtil {

public static DataJob createDataJob(JobsRepository jobsRepository) {
return createDataJob(jobsRepository, "test-job");
}

public static DataJob createDataJob(JobsRepository jobsRepository, String jobName) {
JobConfig config = new JobConfig();
config.setSchedule("schedule");
config.setTeam("test-team");
var expectedJob = new DataJob("test-job", config, DeploymentStatus.NONE);
var expectedJob = new DataJob(jobName, config, DeploymentStatus.NONE);
var actualJob = jobsRepository.save(expectedJob);
Assertions.assertEquals(expectedJob, actualJob);

Expand Down
Loading