Skip to content

Commit

Permalink
Merge branch 'alien4cloud:3.0.x' into 3.0.x
Browse files Browse the repository at this point in the history
  • Loading branch information
trihoangvo committed Jun 7, 2021
2 parents 9e95460 + 21ee2d2 commit 9e7ded4
Show file tree
Hide file tree
Showing 75 changed files with 1,214 additions and 505 deletions.
19 changes: 16 additions & 3 deletions alien4cloud-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<parent>
<groupId>alien4cloud</groupId>
<artifactId>alien4cloud-parent</artifactId>
<version>3.1.0-SNAPSHOT</version>
<version>3.3.0-SNAPSHOT</version>
</parent>
<artifactId>alien4cloud-common</artifactId>
<name>Alien 4 Cloud Common</name>
Expand Down Expand Up @@ -49,10 +49,10 @@
<groupId>org.alien4cloud</groupId>
<artifactId>elasticsearch-annotations</artifactId>
</dependency>
<dependency>
<!-- <dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
</dependency>
</dependency> -->

<!-- YAML -->
<dependency>
Expand Down Expand Up @@ -123,6 +123,19 @@
<artifactId>slf4j-api</artifactId>
</dependency>

<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb-impl</artifactId>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</dependency>

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
Expand Down
16 changes: 16 additions & 0 deletions alien4cloud-common/src/main/java/alien4cloud/utils/AlienUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.ArrayUtils;
Expand Down Expand Up @@ -43,6 +44,21 @@ public static <T> Collection<T> safe(Collection<T> collection) {
return org.apache.commons.collections4.CollectionUtils.emptyIfNull(collection);
}

/**
* Utility method to iterate over a set that can be null<br>
* for(T element : safe(set)) {}
* <br>
* <br>
* ATTENTION: Use this method for readonly operation only
*
* @param set The list that may be null.
* @param <T> The type of map keys
* @return The map or an empty map.
*/
public static <T> Set<T> safe(Set<T> set) {
return org.apache.commons.collections4.SetUtils.emptyIfNull(set);
}

/**
* Utility method to iterate over a map that can be null<br>
* for(T element : safe(list)) {}
Expand Down
16 changes: 10 additions & 6 deletions alien4cloud-common/src/main/java/alien4cloud/utils/FileUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,13 @@
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

import javax.xml.bind.DatatypeConverter;

import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;

import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.BaseEncoding;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;

Expand Down Expand Up @@ -108,6 +107,9 @@ public static void zip(Path inputPath, Path outputPath) throws IOException {
Files.walkFileTree(inputPath, new ZipDirWalker(inputPath, zipOutputStream));
}
zipOutputStream.flush();
} catch (IOException e) {
log.error ("zip error", e);
throw e;
} finally {
Closeables.close(zipOutputStream, true);
}
Expand Down Expand Up @@ -158,7 +160,7 @@ public static void tar(Path inputPath, Path outputPath, boolean gZipped, boolean
* @throws IOException In case something fails.
*/
public static void unzip(final Path zipFile, final Path destination) throws IOException {
try (FileSystem zipFS = FileSystems.newFileSystem(zipFile, null)) {
try (FileSystem zipFS = FileSystems.newFileSystem(zipFile, (ClassLoader)null)) {
final Path root = zipFS.getPath("/");
copy(root, destination, StandardCopyOption.REPLACE_EXISTING);
}
Expand Down Expand Up @@ -379,7 +381,8 @@ public static String getSHA1Checksum(Path path) {
}
MessageDigest digest = MessageDigest.getInstance("SHA1");
addFileToDigest(digest, path);
return DatatypeConverter.printHexBinary(digest.digest());
return BaseEncoding.base16().encode(digest.digest());
//return DatatypeConverter.printHexBinary(digest.digest());
}

/**
Expand All @@ -391,7 +394,7 @@ public static String getSHA1Checksum(Path path) {
@SneakyThrows({ IOException.class })
public static String deepSHA1(Path rootPath) {
if (isZipFile(rootPath)) {
try (FileSystem csarFS = FileSystems.newFileSystem(rootPath, null)) {
try (FileSystem csarFS = FileSystems.newFileSystem(rootPath, (ClassLoader)null)) {
Path innerZipPath = csarFS.getPath(FileSystems.getDefault().getSeparator());
return computeDirectoryHash(innerZipPath);
}
Expand All @@ -407,7 +410,8 @@ public static String deepSHA1(Path rootPath) {
private static String computeDirectoryHash(Path rootPath) {
MessageDigest digest = MessageDigest.getInstance("SHA1");
Files.walk(rootPath).filter(FileUtil::isNotHidden).filter(Files::isRegularFile).forEach(path -> addFileToDigest(digest, path));
return DatatypeConverter.printHexBinary(digest.digest());
return BaseEncoding.base16().encode(digest.digest());
//return DatatypeConverter.printHexBinary(digest.digest());

}

Expand Down
6 changes: 4 additions & 2 deletions alien4cloud-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<parent>
<groupId>alien4cloud</groupId>
<artifactId>alien4cloud-parent</artifactId>
<version>3.1.0-SNAPSHOT</version>
<version>3.3.0-SNAPSHOT</version>
</parent>
<artifactId>alien4cloud-core</artifactId>
<name>Alien 4 Cloud Core</name>
Expand Down Expand Up @@ -72,10 +72,12 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<!--
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
</dependency>
-->

<!-- RxJava -->
<dependency>
Expand Down Expand Up @@ -108,7 +110,7 @@
</dependency>
<dependency>
<groupId>com.sun.xml.bind</groupId>
<artifactId>jaxb</artifactId>
<artifactId>jaxb-impl</artifactId>
</dependency>
<dependency>
<groupId>javax.el</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,10 @@

import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.paas.model.PaaSDeploymentLog;
import groovy.util.logging.Log4j;

/**
* Service is used by some premium plugins for archive the deployments logs.
*/
@Log4j
@Service
public class DeploymentLoggingService {
@Resource(name = "alien-monitor-es-dao")
Expand Down Expand Up @@ -64,4 +62,4 @@ public synchronized void save(final PaaSDeploymentLog[] deploymentLogs) {
}
}

}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
package alien4cloud.deployment;

import alien4cloud.deployment.matching.services.location.TopologyLocationUtils;
import alien4cloud.deployment.model.SecretProviderConfigurationAndCredentials;
import alien4cloud.model.deployment.Deployment;
import alien4cloud.model.deployment.DeploymentTopology;
import alien4cloud.model.orchestrators.locations.Location;
import alien4cloud.orchestrators.plugin.IOrchestratorPlugin;
import alien4cloud.paas.IPaaSCallback;
import alien4cloud.paas.OrchestratorPluginService;
import alien4cloud.paas.model.PaaSDeploymentContext;
import lombok.extern.slf4j.Slf4j;
import org.alien4cloud.secret.services.SecretProviderService;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;

import javax.inject.Inject;
import java.util.Map;

/**
* Manages topology purge.
*/
@Service
@Slf4j
public class PurgeService {
@Inject
private OrchestratorPluginService orchestratorPluginService;
@Inject
private DeploymentService deploymentService;
@Inject
private DeploymentRuntimeStateService deploymentRuntimeStateService;
@Inject
private DeploymentLockService deploymentLockService;
@Inject
private SecretProviderService secretProviderService;
@Inject
private DeploymentTopologyService deploymentTopologyService;
/**
* Purge a deployment object
*
* @param environmentId environment id to purge
*/
public void purgeEnvironment(SecretProviderConfigurationAndCredentials secretProviderConfigurationAndCredentials, String environmentId) {
Deployment deployment = deploymentService.getActiveDeployment(environmentId);
if (deployment != null) {
purge(secretProviderConfigurationAndCredentials, deployment);
} else {
log.warn("No deployment found for environment " + environmentId);
}
}

/**
* purge a deployment.
*
* @param deployment Depoyment to purge
*/
private void purge(SecretProviderConfigurationAndCredentials secretProviderConfigurationAndCredentials, final Deployment deployment) {
deploymentLockService.doWithDeploymentWriteLock(deployment.getOrchestratorDeploymentId(), () -> {
log.info("Purging deployment [{}] on orchestrator [{}]", deployment.getId(), deployment.getOrchestratorId());
IOrchestratorPlugin orchestratorPlugin = orchestratorPluginService.getOrFail(deployment.getOrchestratorId());
DeploymentTopology deployedTopology = deploymentRuntimeStateService.getRuntimeTopology(deployment.getId());

Map<String, String> locationIds = TopologyLocationUtils.getLocationIds(deployedTopology);
Map<String, Location> locations = deploymentTopologyService.getLocations(locationIds);

SecretProviderConfigurationAndCredentials authResponse = null;
if (secretProviderService.isSecretProvided(secretProviderConfigurationAndCredentials)) {
authResponse = secretProviderService.generateToken(locations,
secretProviderConfigurationAndCredentials.getSecretProviderConfiguration().getPluginName(),
secretProviderConfigurationAndCredentials.getCredentials());
}

PaaSDeploymentContext deploymentContext = new PaaSDeploymentContext(deployment, deployedTopology, authResponse);

orchestratorPlugin.purge(deploymentContext, new IPaaSCallback<ResponseEntity>() {
@Override
public void onSuccess(ResponseEntity data) {
deploymentService.markUndeployed(deployment);
log.info("Deployment [{}] purged on orchestrator [{}]", deployment.getId(), deployment.getOrchestratorId());
}

@Override
public void onFailure(Throwable throwable) {
log.warn("Fail while purging deployment [{}] on orchestrator [{}]", deployment.getId(), deployment.getOrchestratorId());
}
});

return null;
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,13 @@ public interface IPaaSProvider {
*/
void undeploy(PaaSDeploymentContext deploymentContext, IPaaSCallback<?> callback,boolean force);

/**
* Purge a deployment
*
* @param deploymentContext the context of the un-deployment
*/
void purge(PaaSDeploymentContext deploymentContext, IPaaSCallback<?> callback);

/**
* Scale up/down a node
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,5 +51,9 @@ public enum DeploymentStatus {
/**
* Paas Provider not reachable, error ...
*/
UNKNOWN
UNKNOWN,
/**
* A purge has failed
*/
PURGE_FAILURE,
}
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ private Class<?> getLinkedType(IPluginLinker<?> linker) {
public void uploadInitPlugins(List<Path> uploadedPluginsPath) throws IOException {
for (Path uploadedPluginPath : uploadedPluginsPath) {
// load the plugin descriptor
FileSystem fs = FileSystems.newFileSystem(uploadedPluginPath, null);
FileSystem fs = FileSystems.newFileSystem(uploadedPluginPath, (ClassLoader)null);
PluginDescriptor descriptor;
try {
try {
Expand Down Expand Up @@ -282,7 +282,7 @@ public void uploadInitPlugins(List<Path> uploadedPluginsPath) throws IOException
*/
public Plugin uploadPlugin(Path uploadedPluginPath) throws PluginLoadingException, IOException, MissingPlugingDescriptorFileException {
// load the plugin descriptor
FileSystem fs = FileSystems.newFileSystem(uploadedPluginPath, null);
FileSystem fs = FileSystems.newFileSystem(uploadedPluginPath, (ClassLoader)null);
PluginDescriptor descriptor;
try {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@

@Slf4j
@Component
public class PurgeService {
public class PurgeDataService {

private final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(1, new NamedThreadFactory("a4c-purge-service"));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,15 +98,19 @@ private void processNode(NodeTemplate node,NodeType type,VariableModifierContext
private void processProperty(Map.Entry<String,AbstractPropertyValue> entry, AbstractInheritableToscaType type, String path, VariableModifierContext context) {
PropertyDefinition definition = safe(type.getProperties()).get(entry.getKey());

if (entry.getValue() instanceof ScalarPropertyValue) {
log.debug("Processing {}", path);
entry.setValue((AbstractPropertyValue) processScalar((ScalarPropertyValue) entry.getValue(),definition,path,context));
} else if (entry.getValue() instanceof ComplexPropertyValue) {
log.debug("Processing {}[]", path);
processComplex((ComplexPropertyValue) entry.getValue(),definition,path,context);
} else if (entry.getValue() instanceof ListPropertyValue) {
log.debug("Processing {}[]", path);
processListComplex(((ListPropertyValue) entry.getValue()).getValue(),definition,path,context);
if (definition != null) {
if (entry.getValue() instanceof ScalarPropertyValue) {
log.debug("Processing {}", path);
entry.setValue((AbstractPropertyValue) processScalar((ScalarPropertyValue) entry.getValue(), definition, path, context));
} else if (entry.getValue() instanceof ComplexPropertyValue) {
log.debug("Processing {}[]", path);
processComplex((ComplexPropertyValue) entry.getValue(), definition, path, context);
} else if (entry.getValue() instanceof ListPropertyValue) {
log.debug("Processing {}[]", path);
processListComplex(((ListPropertyValue) entry.getValue()).getValue(), definition, path, context);
}
} else {
log.warn("Cannot find PropertyDefinition %s",entry.getKey());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ private class ZipArchivePathResolver extends AbstractArchivePathResolver {
private FileSystem fileSystem;

private ZipArchivePathResolver(Path archive) throws IOException {
fileSystem = FileSystems.newFileSystem(archive, null);
fileSystem = FileSystems.newFileSystem(archive, (ClassLoader)null);
}

@Override
Expand Down Expand Up @@ -81,4 +81,4 @@ protected ArchivePathChecker createPathChecker(Path archive) {
public ParsingResult<ArchiveRoot> process(Path archive, ParsingResult<ArchiveRoot> parsedArchive, String workspace) {
return doProcess(archive, parsedArchive, workspace);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ private void importImage(Path archiveFile, List<ParsingError> parsingErrors, Tag
csarFS = FileSystems.getDefault();
iconPath = csarFS.getPath(archiveFile.toString(), iconTag.getValue());
} else {
csarFS = FileSystems.newFileSystem(archiveFile, null);
csarFS = FileSystems.newFileSystem(archiveFile, (ClassLoader)null);
iconPath = csarFS.getPath(iconTag.getValue());
}
if (!Files.isDirectory(iconPath)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@ public class ConnectStepFromOperation extends AbstractWorkflowOperation {

@Override
public String commitMessage() {
return "Connect steps <" + StringUtils.join(getFromStepIds(), ",") + "> to step <" + getToStepId() + "> in the workflow <" + getWorkflowName() + ">";
return "Add onSuccess links from steps <" + StringUtils.join(getFromStepIds(), ",") + "> to step <" + getToStepId() + "> in the workflow <" + getWorkflowName() + ">";
}
}
Loading

0 comments on commit 9e7ded4

Please sign in to comment.