diff --git a/src/main/java/com/elastic/support/diagnostics/commands/RetrieveSystemDigest.java b/src/main/java/com/elastic/support/diagnostics/commands/RetrieveSystemDigest.java index bf1dba7f..8fb82d9a 100644 --- a/src/main/java/com/elastic/support/diagnostics/commands/RetrieveSystemDigest.java +++ b/src/main/java/com/elastic/support/diagnostics/commands/RetrieveSystemDigest.java @@ -27,52 +27,55 @@ public class RetrieveSystemDigest implements Command { private final Logger logger = LogManager.getLogger(RetrieveSystemDigest.class); public void execute(DiagnosticContext context) { - try { SystemInfo si = new SystemInfo(); HardwareAbstractionLayer hal = si.getHardware(); OperatingSystem os = si.getOperatingSystem(); File sysFileJson = new File(context.tempDir + SystemProperties.fileSeparator + "system-digest.json"); - OutputStream outputStreamJson = new FileOutputStream(sysFileJson); - BufferedWriter jsonWriter = new BufferedWriter(new OutputStreamWriter(outputStreamJson)); - String jsonInfo = si.toPrettyJSON(); - jsonWriter.write(jsonInfo); - jsonWriter.close(); + + try ( + OutputStream outputStreamJson = new FileOutputStream(sysFileJson); + BufferedWriter jsonWriter = new BufferedWriter(new OutputStreamWriter(outputStreamJson)); + ) { + String jsonInfo = si.toPrettyJSON(); + jsonWriter.write(jsonInfo); + } File sysFile = new File(context.tempDir + SystemProperties.fileSeparator + "system-digest.txt"); - OutputStream outputStream = new FileOutputStream(sysFile); - BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outputStream)); - printComputerSystem(writer, hal.getComputerSystem()); - writer.newLine(); + try ( + OutputStream outputStream = new FileOutputStream(sysFile); + BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outputStream)); + ) { + printComputerSystem(writer, hal.getComputerSystem()); + writer.newLine(); - printProcessor(writer, hal.getProcessor()); - writer.newLine(); + printProcessor(writer, hal.getProcessor()); + writer.newLine(); - printMemory(writer, hal.getMemory()); - writer.newLine(); + printMemory(writer, hal.getMemory()); + writer.newLine(); - printCpu(writer, hal.getProcessor()); - writer.newLine(); + printCpu(writer, hal.getProcessor()); + writer.newLine(); - printProcesses(writer, os, hal.getMemory()); - writer.newLine(); + printProcesses(writer, os, hal.getMemory()); + writer.newLine(); - printDisks(writer, hal.getDiskStores()); - writer.newLine(); + printDisks(writer, hal.getDiskStores()); + writer.newLine(); - printFileSystem(writer, os.getFileSystem()); - writer.newLine(); + printFileSystem(writer, os.getFileSystem()); + writer.newLine(); - printNetworkInterfaces(writer, hal.getNetworkIFs()); - writer.newLine(); + printNetworkInterfaces(writer, hal.getNetworkIFs()); + writer.newLine(); - printNetworkParameters(writer, os.getNetworkParams()); - writer.newLine(); + printNetworkParameters(writer, os.getNetworkParams()); + writer.newLine(); + } - writer.close(); logger.info("Finished querying SysInfo."); - } catch (final Exception e) { logger.info("Failed saving system-digest.txt file.", e); } diff --git a/src/main/java/com/elastic/support/monitoring/MonitoringImportService.java b/src/main/java/com/elastic/support/monitoring/MonitoringImportService.java index 8f5c5988..e6aec6b5 100644 --- a/src/main/java/com/elastic/support/monitoring/MonitoringImportService.java +++ b/src/main/java/com/elastic/support/monitoring/MonitoringImportService.java @@ -24,9 +24,8 @@ public class MonitoringImportService extends ElasticRestClientService { private Logger logger = LogManager.getLogger(MonitoringImportService.class); private static final String SCROLL_ID = "{ \"scroll_id\" : \"{{scrollId}}\" }"; - void execImport(MonitoringImportInputs inputs){ - - Map configMap = JsonYamlUtils.readYamlFromClasspath(Constants.DIAG_CONFIG, true); + void execImport(MonitoringImportInputs inputs) throws DiagnosticException { + Map configMap = JsonYamlUtils.readYamlFromClasspath(Constants.DIAG_CONFIG, true); MonitoringImportConfig config = new MonitoringImportConfig(configMap); try (RestClient client = getClient(inputs, config)){ diff --git a/src/main/java/com/elastic/support/scrub/ScrubProcessor.java b/src/main/java/com/elastic/support/scrub/ScrubProcessor.java index ddfe6317..e77e7cf9 100644 --- a/src/main/java/com/elastic/support/scrub/ScrubProcessor.java +++ b/src/main/java/com/elastic/support/scrub/ScrubProcessor.java @@ -1,6 +1,7 @@ package com.elastic.support.scrub; import com.elastic.support.Constants; +import com.elastic.support.diagnostics.DiagnosticException; import com.elastic.support.util.JsonYamlUtils; import com.fasterxml.jackson.databind.JsonNode; import org.apache.commons.lang3.ObjectUtils; @@ -33,8 +34,7 @@ public class ScrubProcessor { - public ScrubProcessor(String nodes) { - + public ScrubProcessor(String nodes) throws DiagnosticException { this(); if (StringUtils.isNotEmpty(nodes)) { @@ -42,9 +42,9 @@ public ScrubProcessor(String nodes) { } } - public ScrubProcessor() { - scrubConfig = - JsonYamlUtils.readYamlFromClasspath("scrub.yml", false); + public ScrubProcessor() throws DiagnosticException { + scrubConfig = JsonYamlUtils.readYamlFromClasspath("scrub.yml", false); + Collection auto = (Collection) scrubConfig.get("auto-scrub"); if (auto != null) { autoScrub.addAll(auto); diff --git a/src/main/java/com/elastic/support/util/ArchiveUtils.java b/src/main/java/com/elastic/support/util/ArchiveUtils.java index b3ba0aba..da098b04 100644 --- a/src/main/java/com/elastic/support/util/ArchiveUtils.java +++ b/src/main/java/com/elastic/support/util/ArchiveUtils.java @@ -39,39 +39,42 @@ public static File createArchive(String dir, String archiveFileName) throws Diag } } - public static File createZipArchive(String dir, String archiveFileName) throws IOException { + private static File createZipArchive(String dir, String archiveFileName) throws IOException { File srcDir = new File(dir); String filename = dir + "-" + archiveFileName + ".zip"; File file = new File(filename); - FileOutputStream fout = new FileOutputStream(filename); - ZipArchiveOutputStream taos = new ZipArchiveOutputStream(fout); - archiveResultsZip(archiveFileName, taos, srcDir, "", true); - taos.close(); - logger.info(Constants.CONSOLE, "Archive: " + filename + " was created"); - - return file; + try ( + FileOutputStream fout = new FileOutputStream(filename); + ZipArchiveOutputStream taos = new ZipArchiveOutputStream(fout) + ) { + archiveResultsZip(archiveFileName, taos, srcDir, "", true); + logger.info(Constants.CONSOLE, "Archive: " + filename + " was created"); + return file; + } } - public static File createTarArchive(String dir, String archiveFileName) throws IOException { + private static File createTarArchive(String dir, String archiveFileName) throws IOException { File srcDir = new File(dir); String filename = dir + "-" + archiveFileName + ".tar.gz"; File file = new File(filename); - FileOutputStream fout = new FileOutputStream(filename); - CompressorOutputStream cout = new GzipCompressorOutputStream(fout); - TarArchiveOutputStream taos = new TarArchiveOutputStream(cout); - taos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); - taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); - archiveResultsTar(archiveFileName, taos, srcDir, "", true); - taos.close(); + try ( + FileOutputStream fout = new FileOutputStream(filename); + CompressorOutputStream cout = new GzipCompressorOutputStream(fout); + TarArchiveOutputStream taos = new TarArchiveOutputStream(cout) + ) { + taos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); + taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); + archiveResultsTar(archiveFileName, taos, srcDir, "", true); - logger.info(Constants.CONSOLE, "Archive: " + filename + " was created"); + logger.info(Constants.CONSOLE, "Archive: " + filename + " was created"); - return file; + return file; + } } - public static void archiveResultsZip(String archiveFilename, ZipArchiveOutputStream taos, File file, String path, boolean append) { + private static void archiveResultsZip(String archiveFilename, ZipArchiveOutputStream taos, File file, String path, boolean append) { String relPath = ""; try { @@ -84,11 +87,10 @@ public static void archiveResultsZip(String archiveFilename, ZipArchiveOutputStr taos.putArchiveEntry(tae); if (file.isFile()) { - BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); - IOUtils.copy(bis, taos); - taos.closeArchiveEntry(); - bis.close(); - + try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file))) { + IOUtils.copy(bis, taos); + taos.closeArchiveEntry(); + } } else if (file.isDirectory()) { taos.closeArchiveEntry(); for (File childFile : file.listFiles()) { @@ -100,7 +102,7 @@ public static void archiveResultsZip(String archiveFilename, ZipArchiveOutputStr } } - public static void archiveResultsTar(String archiveFilename, TarArchiveOutputStream taos, File file, String path, boolean append) { + private static void archiveResultsTar(String archiveFilename, TarArchiveOutputStream taos, File file, String path, boolean append) { String relPath = ""; try { @@ -113,13 +115,13 @@ public static void archiveResultsTar(String archiveFilename, TarArchiveOutputStr taos.putArchiveEntry(tae); if (file.isFile()) { - BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); - IOUtils.copy(bis, taos); - taos.closeArchiveEntry(); - bis.close(); - + try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file))) { + IOUtils.copy(bis, taos); + taos.closeArchiveEntry(); + } } else if (file.isDirectory()) { taos.closeArchiveEntry(); + for (File childFile : file.listFiles()) { archiveResultsTar(archiveFilename, taos, childFile, relPath, false); } @@ -183,7 +185,9 @@ else if (entry.isDirectory()) { logger.error(e); } finally { - ais.close(); + if (ais != null) { + ais.close(); + } } } diff --git a/src/main/java/com/elastic/support/util/JsonYamlUtils.java b/src/main/java/com/elastic/support/util/JsonYamlUtils.java index 886fab0b..ec0dea7e 100644 --- a/src/main/java/com/elastic/support/util/JsonYamlUtils.java +++ b/src/main/java/com/elastic/support/util/JsonYamlUtils.java @@ -1,44 +1,31 @@ package com.elastic.support.util; +import com.elastic.support.diagnostics.DiagnosticException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import java.io.File; -import java.io.FileInputStream; -import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.util.*; - public class JsonYamlUtils { private static final Logger logger = LoggerFactory.getLogger(JsonYamlUtils.class); public static ObjectMapper mapper = new ObjectMapper(); - public static ObjectMapper formatMapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); - - public static JsonNode createJsonNodeFromFileName(String fileName) { - File jsonFile = FileUtils.getFile(fileName); - return createJsonNodeFromFile(jsonFile); - } public static JsonNode createJsonNodeFromFileName(String dir, String fileName) { File jsonFile = FileUtils.getFile(dir, fileName); - return createJsonNodeFromFile(jsonFile); - } - public static JsonNode createJsonNodeFromFile(File jsonFile) { try { String fileString = FileUtils.readFileToString(jsonFile, "UTF8"); + return JsonYamlUtils.createJsonNodeFromString(fileString); } catch (IOException e) { logger.info("Error reading in JSON string from file: {}", jsonFile); @@ -48,154 +35,39 @@ public static JsonNode createJsonNodeFromFile(File jsonFile) { public static JsonNode createJsonNodeFromString(String nodeString) { try { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readTree(nodeString); + return new ObjectMapper().readTree(nodeString); } catch (IOException e) { logger.info("Error creating JSON node from input string: {}", nodeString); throw new RuntimeException(e); } } - public static JsonNode createJsonNodeFromClasspath(String path) { - try { - InputStream is; - is = JsonYamlUtils.class.getClassLoader().getResourceAsStream(path); - String nodeString = new String(IOUtils.toByteArray(is)); - ObjectMapper mapper = new ObjectMapper(); - return mapper.readTree(nodeString); - } catch (IOException e) { - logger.info("Error creating JSON node {}", path); - throw new RuntimeException(e); - } - } - - public static void writeYaml(String path, Map tree) { - try { - DumperOptions options = new DumperOptions(); - options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); - Yaml yaml = new Yaml(options); - FileWriter writer = new FileWriter(path); - yaml.dump(tree, writer); - } catch (IOException e) { - logger.info("Error writing YAML to: {}", path); - throw new RuntimeException(e); - } - } - - public static Map readYamlFromClasspath(String path, boolean isBlock) { - try { + public static Map readYamlFromClasspath(String path, boolean isBlock) throws DiagnosticException { + try ( InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(path); - Map doc = JsonYamlUtils.readYaml(inputStream, isBlock); - SystemUtils.streamClose(path, inputStream); - return doc; - } catch (Exception e) { - logger.info("Error reading YAML from {}", path); - throw new RuntimeException(e); - } - } - - public static Map readYamlFromPath(String path, boolean isBlock) throws Exception { - File fl = FileUtils.getFile(path); - InputStream inputStream = new FileInputStream(fl); - Map doc = JsonYamlUtils.readYaml(inputStream, isBlock); - SystemUtils.streamClose(path, inputStream); - return doc; - } - - public static Map readYaml(InputStream in, boolean isBlock) throws Exception { - Map doc = null; - - try { - DumperOptions options = new DumperOptions(); - if (isBlock) { - options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); - } - - Yaml yaml = new Yaml(options); - doc = (Map) yaml.load(in); - - } catch (Exception e) { - logger.info("Error encountered retrieving yml file.", e); + ) { + return JsonYamlUtils.readYaml(inputStream, isBlock); } - finally { - return nullSafeYamlMap(doc); + catch (IOException e) { + logger.info("Error reading YAML from {}", path); + throw new DiagnosticException("Error reading YAML file",e); } } - public static Map flattenYaml(Map map) { - Map result = new LinkedHashMap<>(); - buildFlattenedMap(result, map, null); - return result; - } - - public static Map flattenMap(Map map){ - return flattenYaml(map); - } + private static Map readYaml(InputStream in, boolean isBlock) { + DumperOptions options = new DumperOptions(); - public static Map flattenNode(JsonNode node) { - try { - ObjectMapper mapper = new ObjectMapper(); - Map jsonMap = mapper.convertValue(node, Map.class); - //String json = mapper.writeValueAsString(node) - //Map jsonMap = mapper.readValue(json, new TypeReference() {}); - Map flat = flattenYaml(jsonMap); - return flat; - } catch (Exception e) { - throw new RuntimeException(e); + if (isBlock) { + options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); } - } - public static void buildFlattenedMap(Map result, Map source, String path) { - - Set> entries = source.entrySet(); - for (Map.Entry entry : entries) { - String key = entry.getKey().toString(); - if (StringUtils.isNoneEmpty(path)) { - if (key.startsWith("[")) { - key = path + key; - } else { - key = path + "." + key; - } - } - Object value = entry.getValue(); - if (value instanceof String) { - result.put(key, value); - } else if (value instanceof Map) { - // Need a compound key - Map map = (Map) value; - buildFlattenedMap(result, map, key); - //} else if (value instanceof List) { - // result.put(key, value); - } else if (value instanceof Collection) { - // Need a compound key - @SuppressWarnings("unchecked") - Collection collection = (Collection) value; - int count = 0; - for (Object object : collection) { - buildFlattenedMap(result, - Collections.singletonMap("[" + (count++) + "]", object), key); - } - } else { - result.put(key, value == null ? "" : value); - } - } - } + Yaml yaml = new Yaml(options); + Map doc = yaml.load(in); - private static Map nullSafeYamlMap(Map doc){ if (doc == null){ - doc = new HashMap(); + return new HashMap<>(); } - return doc; - } - - private static Map listToMap(List input){ - int sz = input.size(); - Map output = new LinkedHashMap<>(); - for(int i=0; i < sz; i++){ - output.put("idx_" + i, input.get(i)); - } - - return output; + return doc; } } diff --git a/src/main/java/com/elastic/support/util/SystemUtils.java b/src/main/java/com/elastic/support/util/SystemUtils.java index a47ed15c..0def19d0 100644 --- a/src/main/java/com/elastic/support/util/SystemUtils.java +++ b/src/main/java/com/elastic/support/util/SystemUtils.java @@ -42,20 +42,6 @@ public static void writeToFile(String content, String dest) throws DiagnosticExc } } - public static void streamClose(String path, InputStream instream) { - - if (instream != null) { - try { - instream.close(); - } catch (Throwable t) { - logger.error(Constants.CONSOLE, "Error encountered when attempting to close file {}", path); - } - } else { - logger.error(Constants.CONSOLE, "Error encountered when attempting to close file: null InputStream {}", path); - } - - } - public static void nukeDirectory(String dir){ try { File tmp = new File(dir); diff --git a/src/test/java/com/elastic/support/diagnostics/commands/TestKibanaGetDetails.java b/src/test/java/com/elastic/support/diagnostics/commands/TestKibanaGetDetails.java index 876f91e7..ea839a1b 100644 --- a/src/test/java/com/elastic/support/diagnostics/commands/TestKibanaGetDetails.java +++ b/src/test/java/com/elastic/support/diagnostics/commands/TestKibanaGetDetails.java @@ -147,7 +147,7 @@ public void testClusterFindTargetNode() { } @Test - public void testFunctionGetStats() { + public void testFunctionGetStats() throws DiagnosticException { mockServer .when( diff --git a/src/test/java/com/elastic/support/diagnostics/commands/TestRunKibanaQueries.java b/src/test/java/com/elastic/support/diagnostics/commands/TestRunKibanaQueries.java index fd776597..2c6dca48 100644 --- a/src/test/java/com/elastic/support/diagnostics/commands/TestRunKibanaQueries.java +++ b/src/test/java/com/elastic/support/diagnostics/commands/TestRunKibanaQueries.java @@ -84,7 +84,7 @@ public void tearDown() { } - private DiagnosticContext initializeKibana(String version) { + private DiagnosticContext initializeKibana(String version) throws DiagnosticException { DiagnosticContext context = new DiagnosticContext(); RestEntryConfig builder = new RestEntryConfig(version); diff --git a/src/test/java/com/elastic/support/rest/TestRestConfigFileValidity.java b/src/test/java/com/elastic/support/rest/TestRestConfigFileValidity.java index 1ae3d582..dde04e52 100644 --- a/src/test/java/com/elastic/support/rest/TestRestConfigFileValidity.java +++ b/src/test/java/com/elastic/support/rest/TestRestConfigFileValidity.java @@ -1,5 +1,6 @@ package com.elastic.support.rest; +import com.elastic.support.diagnostics.DiagnosticException; import com.elastic.support.util.JsonYamlUtils; import com.vdurmont.semver4j.Semver; import org.apache.logging.log4j.LogManager; @@ -17,7 +18,7 @@ public class TestRestConfigFileValidity { protected static Semver sem= new Semver("9.9.999", Semver.SemverType.NPM); @Test - public void validateElasticConfigVersioning(){ + public void validateElasticConfigVersioning() throws DiagnosticException { // validates whether each set of version entries has exactly one valid outcome. Map restEntriesConfig = JsonYamlUtils.readYamlFromClasspath("elastic-rest.yml", true); validateEntries(restEntriesConfig);