Skip to content

Commit

Permalink
Merge branch 'main' into person/mzhivkov/filter-by-job-start-end-time
Browse files Browse the repository at this point in the history
  • Loading branch information
Momchil Z authored Nov 24, 2021
2 parents b26bb11 + 0f5b186 commit 51d130b
Show file tree
Hide file tree
Showing 61 changed files with 810 additions and 159 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ repos:
- id: trailing-whitespace
- id: check-executables-have-shebangs
- repo: https://github.com/psf/black
rev: 21.10b0
rev: 21.11b1
hooks:
- id: black
language_version: python3.7
Expand Down Expand Up @@ -45,7 +45,7 @@ repos:
args: [--py37-plus, '--application-directories=.:src']
# use latest python syntax
- repo: https://github.com/asottile/pyupgrade
rev: v2.29.0
rev: v2.29.1
hooks:
- id: pyupgrade
args: [--py37-plus]
Expand Down
12 changes: 12 additions & 0 deletions projects/control-service/cicd/deploy-testing-pipelines-service.sh
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,18 @@ if [ "$RUN_ENVIRONMENT_SETUP" = 'y' ]; then
--docker-password="$CICD_CONTAINER_REGISTRY_USER_PASSWORD" \
--docker-email="[email protected]" --dry-run=client -o yaml | kubectl apply -f -
kubectl patch serviceaccount default -p '{"imagePullSecrets":[{"name":"'$secret_name'"}]}'

if [ -n "$DOCKERHUB_READONLY_USERNAME" ]; then
dockerhub_secretname='secret-dockerhub-docker'
kubectl create secret docker-registry "$dockerhub_secretname" \
--docker-server="https://index.docker.io/v1/" \
--docker-username="$DOCKERHUB_READONLY_USERNAME" \
--docker-password="$DOCKERHUB_READONLY_PASSWORD" \
--docker-email="[email protected]" --dry-run=client -o yaml | kubectl apply -f -

kubectl patch serviceaccount default -p '{"imagePullSecrets":[{"name":"'$secret_name'"},{"name":"'$dockerhub_secretname'"}]}'
fi

fi

# this is the internal hostname of the Control Service.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ spec:
value: "{{ .Values.notificationOwnerEmail }}"
- name: NOTIFICATION_OWNER_NAME
value: "{{ .Values.notificationOwnerName }}"
- name: NOTIFICATION_CC_EMAILS
value: "{{ .Values.notificationCcEmails }}"
- name: GIT_URL
value: "{{ .Values.deploymentGitUrl }}"
- name: GIT_BRANCH
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ credentials:
## The owner name and email address that will be used to send all Versatile Data Kit related email notifications.
notificationOwnerEmail: "[email protected]"
notificationOwnerName: "Versatile Data Kit"
### Coma separate list of mails which will be cc-ed for Control Service managed notifications
### This does not apply for notifications managed by external services (like AlertManager)
#notificationCcEmails: "[email protected],[email protected]"


### deploymentXXX refer to properties used in order to deploy a Data Job.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,8 @@
import com.vmware.taurus.ControlplaneApplication;
import com.vmware.taurus.datajobs.it.common.BaseIT;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.ResultActions;

import static com.vmware.taurus.datajobs.it.common.WebHookServerMockExtension.NEW_TEST_TEAM_NAME;
import static com.vmware.taurus.datajobs.it.common.WebHookServerMockExtension.TEST_JOB_1;
Expand All @@ -22,16 +19,16 @@
import static com.vmware.taurus.datajobs.it.common.WebHookServerMockExtension.TEST_JOB_5;
import static com.vmware.taurus.datajobs.it.common.WebHookServerMockExtension.TEST_JOB_6;
import static com.vmware.taurus.datajobs.it.common.WebHookServerMockExtension.TEST_TEAM_NAME;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = ControlplaneApplication.class)
public class DataJobGraphQLIT extends BaseIT {

private static final Logger LOG = LoggerFactory.getLogger(DataJobGraphQLIT.class);
private static final String DEFAULT_QUERY_WITH_VARS =
"query($filter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" +
" jobs(pageNumber: $pageNumber, pageSize: $pageSize, filter: $filter, search: $search) {" +
Expand Down Expand Up @@ -199,61 +196,6 @@ public void testGraphQLPagination() throws Exception {
deleteDummyJobs();
}

@Test
public void testGraphQLFields() throws Exception {
String dataJobTestBodyOne = getDataJobRequestBody(TEST_TEAM_NAME, TEST_JOB_1);
createJob(dataJobTestBodyOne, TEST_TEAM_NAME);

// Test requesting of fields that are computed
String contentAsString = mockMvc.perform(get(String.format("/data-jobs/for-team/%s/jobs", TEST_TEAM_NAME))
.with(user("user"))
.param("query",
"query($filter: [Predicate], $executionFilter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" +
" jobs(pageNumber: $pageNumber, pageSize: $pageSize, filter: $filter, search: $search) {" +
" content {" +
" jobName" +
" deployments {" +
" id" +
" enabled" +
" executions(pageNumber: 1, pageSize: 5, filter: $executionFilter) {" +
" id" +
" status" +
" }" +
" }" +
" config {" +
" team" +
" description" +
" schedule {" +
" scheduleCron" +
" nextRunEpochSeconds" +
" }" +
" }" +
" }" +
" totalPages" +
" totalItems" +
" }" +
"}")
.param("variables", "{" +
"\"search\": \"" + TEST_JOB_1 + "\"," +
"\"pageNumber\": 1," +
"\"pageSize\": 10," +
"\"executionFilter\": [" +
" {" +
" \"sort\": \"DESC\"," +
" \"property\": \"deployments.executions.status\"" +
" }" +
" ]" +
"}")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.data.content[0].config.team", is(TEST_TEAM_NAME)))
.andExpect(jsonPath("$.data.content[0].config.schedule.scheduleCron", is(TEST_JOB_SCHEDULE)))
.andExpect(jsonPath("$.data.content[0].config.schedule.nextRunEpochSeconds", greaterThan(1)))
.andReturn().getResponse().getContentAsString();

deleteJob(TEST_JOB_1, TEST_TEAM_NAME);
}

private void createDummyJobs() throws Exception {
// Setup by creating 3 jobs in 2 separate teams (6 jobs total)
String dataJobTestBodyOne = getDataJobRequestBody(TEST_TEAM_NAME, TEST_JOB_1);
Expand Down Expand Up @@ -294,7 +236,7 @@ private void createJob(String body, String teamName) throws Exception {
}

private void deleteJob(String jobName, String teamName) throws Exception {
ResultActions resultActions = mockMvc.perform(delete(String.format("/data-jobs/for-team/%s/jobs/%s", teamName, jobName))
mockMvc.perform(delete(String.format("/data-jobs/for-team/%s/jobs/%s", teamName, jobName))
.with(user("user"))
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@

package com.vmware.taurus.datajobs.it.common;

import java.util.UUID;

import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,17 @@

package com.vmware.taurus.datajobs.it.common;

import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;

import java.time.Instant;
import java.util.function.Predicate;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.vmware.taurus.controlplane.model.data.DataJobConfig;
import com.vmware.taurus.controlplane.model.data.DataJobDeployment;
import com.vmware.taurus.controlplane.model.data.DataJobMode;
import com.vmware.taurus.controlplane.model.data.DataJobResources;
import com.vmware.taurus.controlplane.model.data.DataJobSchedule;
import com.vmware.taurus.service.credentials.KerberosCredentialsRepository;
import com.vmware.taurus.service.kubernetes.ControlKubernetesService;
import com.vmware.taurus.service.kubernetes.DataJobsKubernetesService;
import com.vmware.taurus.service.model.JobConfig;
import io.kubernetes.client.ApiException;
import org.apache.commons.lang3.StringUtils;
import org.hamcrest.BaseMatcher;
Expand All @@ -36,15 +40,11 @@
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;

import com.vmware.taurus.controlplane.model.data.DataJobConfig;
import com.vmware.taurus.controlplane.model.data.DataJobDeployment;
import com.vmware.taurus.controlplane.model.data.DataJobMode;
import com.vmware.taurus.controlplane.model.data.DataJobResources;
import com.vmware.taurus.controlplane.model.data.DataJobSchedule;
import com.vmware.taurus.service.credentials.KerberosCredentialsRepository;
import com.vmware.taurus.service.kubernetes.ControlKubernetesService;
import com.vmware.taurus.service.kubernetes.DataJobsKubernetesService;
import com.vmware.taurus.service.model.JobConfig;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.util.function.Predicate;

import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;

@AutoConfigureMockMvc
@ActiveProfiles({"test"})
Expand Down Expand Up @@ -154,6 +154,20 @@ public void describeTo(Description description) {
};
}

protected Matcher<String> isDate(OffsetDateTime value) {
return new BaseMatcher<>() {
@Override
public boolean matches(Object actual) {
return value.isEqual(OffsetDateTime.parse((String) actual));
}

@Override
public void describeTo(Description description) {
description.appendText("failed to match date");
}
};
}

public static String getDataJobRequestBody(String teamName, String jobName) throws JsonProcessingException {
var job = new com.vmware.taurus.controlplane.model.data.DataJob();
job.setJobName(jobName);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Copyright 2021 VMware, Inc.
* SPDX-License-Identifier: Apache-2.0
*/

package com.vmware.taurus.graphql.it;

import com.vmware.taurus.datajobs.it.common.BaseDataJobDeploymentIT;
import com.vmware.taurus.service.JobsRepository;
import com.vmware.taurus.service.model.ExecutionStatus;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;

import java.time.OffsetDateTime;
import java.time.ZoneOffset;

import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;

public class GraphQLDataJobsFieldsIT extends BaseDataJobDeploymentIT {

private static final String DEFAULT_QUERY_WITH_DEPLOYMENTS =
"query($filter: [Predicate], $executionFilter: [Predicate], $search: String, $pageNumber: Int, $pageSize: Int) {" +
" jobs(pageNumber: $pageNumber, pageSize: $pageSize, filter: $filter, search: $search) {" +
" content {" +
" jobName" +
" deployments {" +
" id" +
" enabled" +
" lastExecutionStatus" +
" lastExecutionTime" +
" lastExecutionDuration" +
" executions(pageNumber: 1, pageSize: 5, filter: $executionFilter) {" +
" id" +
" status" +
" }" +
" }" +
" config {" +
" team" +
" description" +
" schedule {" +
" scheduleCron" +
" nextRunEpochSeconds" +
" }" +
" }" +
" }" +
" totalPages" +
" totalItems" +
" }" +
"}";

@Autowired
private JobsRepository jobsRepository;

@Test
void testFields(String jobName, String teamName, String username) throws Exception {
var dataJob = jobsRepository.findById(jobName).get();
dataJob.setLastExecutionStatus(ExecutionStatus.FINISHED);
dataJob.setLastExecutionEndTime(OffsetDateTime.of(2000, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC));
dataJob.setLastExecutionDuration(1000);
dataJob = jobsRepository.save(dataJob);

// Test requesting of fields that are computed
mockMvc.perform(get(JOBS_URI)
.with(user(username))
.param("query", DEFAULT_QUERY_WITH_DEPLOYMENTS)
.param("variables", "{" +
"\"search\": \"" + jobName + "\"," +
"\"pageNumber\": 1," +
"\"pageSize\": 10," +
"\"executionFilter\": [" +
" {" +
" \"sort\": \"DESC\"," +
" \"property\": \"deployments.executions.status\"" +
" }" +
" ]" +
"}")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.data.content[0].config.team", is(teamName)))
.andExpect(jsonPath("$.data.content[0].config.schedule.scheduleCron", is(dataJob.getJobConfig().getSchedule())))
.andExpect(jsonPath("$.data.content[0].config.schedule.nextRunEpochSeconds", greaterThan(1)))
.andExpect(jsonPath("$.data.content[0].deployments[0].lastExecutionStatus", is(dataJob.getLastExecutionStatus().name())))
.andExpect(jsonPath("$.data.content[0].deployments[0].lastExecutionTime", isDate(dataJob.getLastExecutionEndTime())))
.andExpect(jsonPath("$.data.content[0].deployments[0].lastExecutionDuration", is(dataJob.getLastExecutionDuration())))
.andReturn().getResponse().getContentAsString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import java.time.OffsetDateTime;
import java.util.List;
import java.util.Optional;

/**
* Spring Data / JPA Repository for DataJobExecution objects and their members
Expand All @@ -32,6 +33,8 @@ public interface JobExecutionRepository extends JpaRepository<DataJobExecution,

List<DataJobExecution> findDataJobExecutionsByDataJobName(String jobName);

Optional<DataJobExecution> findFirstByDataJobNameOrderByStartTimeDesc(String jobName);

List<DataJobExecution> findDataJobExecutionsByDataJobName(String jobName, Pageable pageable);

List<DataJobExecution> findDataJobExecutionsByDataJobNameAndStatusIn(String jobName, List<ExecutionStatus> statuses);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,8 @@ public Optional<DataJob> getByNameAndTeam(String jobName, String teamName) {

/**
* Updates the last job execution in the database for the specified data job.
* The status is updated only if the execution has completed.
* The status is updated only if the execution has completed and
* is more recent than the currently persisted last execution.
*/
public void updateLastExecution(
final DataJobExecution dataJobExecution) {
Expand All @@ -220,15 +221,25 @@ public void updateLastExecution(
return;
}

// Check if the job exists
var dataJobOptional = jobsRepository.findById(dataJobExecution.getDataJob().getName());
if (dataJobOptional.isPresent()) {
var dataJob = dataJobOptional.get();
if (dataJobExecution.getEndTime() != null) {
dataJob.setLastExecutionStatus(dataJobExecution.getStatus());
dataJob.setLastExecutionEndTime(dataJobExecution.getEndTime());
dataJob.setLastExecutionDuration((int) (dataJobExecution.getEndTime().toEpochSecond() - dataJobExecution.getStartTime().toEpochSecond()));
jobsRepository.save(dataJob);
}
if (dataJobOptional.isEmpty()) {
log.debug("The last execution info for data job {} will NOT be updated. The data job was not found in the database.",
dataJobExecution.getDataJob().getName());
return;
}

// Check if the execution is more recent than the one already recorded for this job
var dataJob = dataJobOptional.get();
if (dataJob.getLastExecutionEndTime() != null &&
dataJob.getLastExecutionEndTime().isBefore(dataJobExecution.getEndTime())) {
log.debug("The last execution info for data job {} will NOT be updated. The execution {} was not recent.",
dataJobExecution.getDataJob().getName(), dataJobExecution.getId());
}

dataJob.setLastExecutionStatus(dataJobExecution.getStatus());
dataJob.setLastExecutionEndTime(dataJobExecution.getEndTime());
dataJob.setLastExecutionDuration((int) (dataJobExecution.getEndTime().toEpochSecond() - dataJobExecution.getStartTime().toEpochSecond()));
jobsRepository.save(dataJob);
}
}
Loading

0 comments on commit 51d130b

Please sign in to comment.