diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index be2396f..7c2cc8b 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -1,5 +1,4 @@ -image: adoptopenjdk:8-openj9 -#image: openjdk:8 +image: openjdk:8 pipelines: branches: diff --git a/build.gradle b/build.gradle index a5dc29e..0146626 100644 --- a/build.gradle +++ b/build.gradle @@ -1,8 +1,6 @@ plugins { - // Apply the groovy plugin to add support for Groovy + id 'java' id 'groovy' - - // Apply the application plugin to add support for building a CLI application. id 'application' // Code coverage of tests @@ -18,6 +16,9 @@ repositories { } dependencies { + implementation 'info.picocli:picocli:4.5.1' + annotationProcessor 'info.picocli:picocli-codegen:4.5.1' + implementation 'org.tomlj:tomlj:1.0.0' implementation 'org.codehaus.groovy:groovy-all:3.0.5' implementation 'com.squareup.okhttp3:okhttp:4.8.0' implementation 'org.influxdb:influxdb-java:2.19' @@ -32,7 +33,7 @@ dependencies { } application { - mainClassName = 'biz.nellemann.hmci.App' + mainClassName = 'biz.nellemann.hmci.Main' } test { @@ -68,7 +69,6 @@ ospackage { buildRpm { dependsOn startShadowScripts - //requires('java-1.8.0-openjdk-headless') os = LINUX } @@ -77,7 +77,6 @@ buildDeb { requires('default-jre-headless') } - jacoco { toolVersion = "0.8.5" } @@ -103,22 +102,16 @@ jacocoTestCoverageVerification { } check.dependsOn jacocoTestCoverageVerification - -processResources.dependsOn.add("versionFile") -versionFile { - // Path to the file to be written - file = new File(project.buildDir, 'resources/main/version.properties') -} - jar { manifest { attributes( - 'Built-By' : System.properties['user.name'], - 'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(), - 'Build-Revision' : versioning.info.commit, 'Created-By' : "Gradle ${gradle.gradleVersion}", + 'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}", 'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})", - 'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}" + 'Build-User' : System.properties['user.name'], + 'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build), + 'Build-Revision' : versioning.info.commit, + 'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(), ) } } diff --git a/src/main/groovy/biz/nellemann/hmci/App.groovy b/src/main/groovy/biz/nellemann/hmci/App.groovy deleted file mode 100644 index 677f7d2..0000000 --- a/src/main/groovy/biz/nellemann/hmci/App.groovy +++ /dev/null @@ -1,246 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.cli.picocli.CliBuilder -import groovy.cli.picocli.OptionAccessor -import groovy.util.logging.Slf4j - -@Slf4j -class App implements Runnable { - - final ConfigObject configuration - final Integer refreshEverySec - final Integer rescanHmcEvery - - InfluxClient influxClient - Map hmcClients = new HashMap<>() - Map systems = new HashMap() - Map partitions = new HashMap() - - - App(ConfigObject configuration) { - this.configuration = configuration - log.debug configuration.toString() - - refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60 - rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15 - - String influxUrl = configuration.get('influx')['url'] - String influxUsername = configuration.get('influx')['username'] - String influxPassword = configuration.get('influx')['password'] - String influxDatabase = configuration.get('influx')['database'] - - try { - influxClient = new InfluxClient(influxUrl, influxUsername, influxPassword, influxDatabase) - influxClient.login() - } catch(Exception e) { - System.exit(1) - } - - // Initial scan - discover() - - run() - } - - - void discover() { - - configuration.get('hmc').each { Object key, Object hmc -> - if(!hmcClients?.containsKey(key)) { - log.info("Adding HMC: " + hmc.toString()) - String hmcKey = key - String hmcUrl = hmc['url'] - String hmcUsername = hmc['username'] - String hmcPassword = hmc['password'] - Boolean hmcUnsafe = hmc['unsafe'] - HmcClient hmcClient = new HmcClient(hmcKey, hmcUrl, hmcUsername, hmcPassword, hmcUnsafe) - hmcClients.put(hmcKey, hmcClient) - } - } - - hmcClients.each { hmcId, hmcClient -> - - - try { - hmcClient.login() - hmcClient.getManagedSystems().each { systemId, system -> - - // Add to list of known systems - systems.putIfAbsent(systemId, system) - - // Get LPAR's for this system - hmcClient.getLogicalPartitionsForManagedSystem(system).each { partitionId, partition -> - - // Add to list of known partitions - partitions.putIfAbsent(partitionId, partition) - } - } - } catch(Exception e) { - log.error("discover() - " + hmcId + " error: " + e.message) - //hmcClients.remove(hmcId) - } - - } - - } - - - void getMetricsForSystems() { - - try { - - systems.each {systemId, system -> - - HmcClient hmcClient = hmcClients.get(system.hmcId) - - // Get and process metrics for this system - String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system) - if(tmpJsonString && !tmpJsonString.empty) { - system.processMetrics(tmpJsonString) - } - - } - - } catch(Exception e) { - log.error(e.message) - } - - } - - - void getMetricsForPartitions() { - - try { - - // Get LPAR's for this system - partitions.each { partitionId, partition -> - - HmcClient hmcClient = hmcClients.get(partition.system.hmcId) - - // Get and process metrics for this partition - String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition) - if(tmpJsonString2 && !tmpJsonString2.empty) { - partition.processMetrics(tmpJsonString2) - } - - } - - } catch(Exception e) { - log.error(e.message) - } - } - - - void writeMetricsForManagedSystems() { - systems.each {systemId, system -> - influxClient.writeManagedSystem(system) - } - } - - - void writeMetricsForLogicalPartitions() { - partitions.each {partitionId, partition -> - influxClient.writeLogicalPartition(partition) - } - } - - - static String getVersion() { - URL url = getClass().getResource("/version.properties"); - if (url == null) { - return "No version.txt file found in the classpath." - } - Properties properties = new Properties(); - properties.load(url.openStream()); - return properties.getProperty("VERSION_GRADLE") + "-" + properties.getProperty("VERSION_BUILD") - } - - - static void main(String... args) { - - def cli = new CliBuilder(name: "hmci") - cli.h(longOpt: 'help', usageHelp: true, 'display usage information') - cli.v(longOpt: 'version', versionHelp: true, 'display version information') - cli.c(longOpt: 'config', args: 1, required: true, paramLabel: "FILE", defaultValue: '/etc/hmci.groovy', 'configuration file') - - OptionAccessor options = cli.parse(args) - if (options.h) { - cli.usage() - return - } - - if(options.v) { - println("Version " + getVersion()) - return - } - - ConfigObject configuration - if(options.c) { - - File configurationFile = new File((String)options.config) - if(!configurationFile.exists()) { - println("Error - No configuration file found at: " + configurationFile.toString()) - System.exit(1) - } - - configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL()); - } - - if(configuration == null || configuration.isEmpty()) { - println("Error - Empty or faulty configuration") - System.exit(1) - } - - new App(configuration) - } - - - @Override - void run() { - - log.debug("run()") - - boolean keepRunning = true - int executions = 0 - - while(keepRunning) { - - try { - getMetricsForSystems() - getMetricsForPartitions() - - writeMetricsForManagedSystems() - writeMetricsForLogicalPartitions() - influxClient.writeBatchPoints() - - // Refresh HMC's - if(executions > rescanHmcEvery) { - executions = 0 - discover() - } - } catch(Exception e) { - log.error(e.message, e) - } - - executions++ - Thread.sleep(refreshEverySec * 1000) - } - - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy b/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy index 9e19aa4..27a6d3c 100644 --- a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy +++ b/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy @@ -15,6 +15,9 @@ */ package biz.nellemann.hmci +import biz.nellemann.hmci.Configuration.HmcObject +import groovy.transform.CompileDynamic +import groovy.transform.CompileStatic import groovy.util.logging.Slf4j import groovy.xml.XmlSlurper import okhttp3.MediaType @@ -34,6 +37,7 @@ import java.security.cert.CertificateException import java.security.cert.X509Certificate; @Slf4j +@CompileStatic class HmcClient { private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest"); @@ -48,18 +52,21 @@ class HmcClient { protected String authToken private final OkHttpClient client - HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) { - this.hmcId = hmcId - this.baseUrl = baseUrl - this.username = username - this.password = password - this.unsafe = unsafe + + HmcClient(HmcObject configHmc) { + + this.hmcId = configHmc.name + this.baseUrl = configHmc.url + this.username = configHmc.username + this.password = configHmc.password + this.unsafe = configHmc.unsafe if(unsafe) { this.client = getUnsafeOkHttpClient() } else { this.client = new OkHttpClient() } + } @@ -69,12 +76,15 @@ class HmcClient { * * @throws IOException */ + //@CompileDynamic void login(Boolean force = false) throws IOException { if(authToken && !force) { return } + log.info("Connecting to HMC - " + baseUrl); + String payload = """\ @@ -96,7 +106,7 @@ class HmcClient { if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); // Get response body and parse - String responseBody = response.body.string() + String responseBody = response.body().string(); response.body().close() def xml = new XmlSlurper().parseText(responseBody) @@ -144,6 +154,7 @@ class HmcClient { * * @return */ + @CompileDynamic Map getManagedSystems() { URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl)) Response response = getResponse(url) @@ -185,6 +196,7 @@ class HmcClient { * @param UUID of managed system * @return */ + @CompileDynamic Map getLogicalPartitionsForManagedSystem(ManagedSystem system) { URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id)) Response response = getResponse(url) @@ -225,6 +237,7 @@ class HmcClient { * @param systemId * @return */ + @CompileDynamic String getPcmDataForManagedSystem(ManagedSystem system) { log.debug("getPcmDataForManagedSystem() - " + system.id) URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id)) @@ -257,6 +270,7 @@ class HmcClient { * @param partitionId * @return */ + @CompileDynamic String getPcmDataForLogicalPartition(LogicalPartition partition) { log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id)) @@ -305,6 +319,7 @@ class HmcClient { * @param url * @return */ + //@CompileDynamic private Response getResponse(URL url, Integer retry = 0) { if(responseErrors > 2) { @@ -324,18 +339,18 @@ class HmcClient { if (!response.isSuccessful()) { response.body().close() - if(response.code == 401) { + if(response.code() == 401) { login(true) return getResponse(url, retry++) } if(retry < 2) { - log.warn("getResponse() - Retrying due to unexpected response: " + response.code) + log.warn("getResponse() - Retrying due to unexpected response: " + response.code()) return getResponse(url, retry++) } - log.error("getResponse() - Unexpected response: " + response.code) - throw new IOException("getResponse() - Unexpected response: " + response.code) + log.error("getResponse() - Unexpected response: " + response.code()) + throw new IOException("getResponse() - Unexpected response: " + response.code()) }; return response diff --git a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy b/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy index e15854c..04a21d2 100644 --- a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy +++ b/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy @@ -13,63 +13,74 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package biz.nellemann.hmci +package biz.nellemann.hmci; -import groovy.util.logging.Slf4j -import org.influxdb.BatchOptions -import org.influxdb.InfluxDB -import org.influxdb.InfluxDBFactory -import org.influxdb.dto.BatchPoints -import org.influxdb.dto.Point -import org.influxdb.dto.Query +import biz.nellemann.hmci.Configuration.InfluxObject +import groovy.transform.CompileStatic; +import org.influxdb.BatchOptions; +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBFactory; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.time.Instant -import java.util.concurrent.TimeUnit +import java.util.concurrent.TimeUnit; -@Slf4j +@CompileStatic class InfluxClient { - final String url - final String username - final String password - final String database + private final static Logger log = LoggerFactory.getLogger(InfluxClient.class); - InfluxDB influxDB - BatchPoints batchPoints + final private String url; + final private String username; + final private String password; + final private String database; + + private InfluxDB influxDB; + private BatchPoints batchPoints; - InfluxClient(String url, String username, String password, String database) { - this.url = url - this.username = username - this.password = password - this.database = database + InfluxClient(InfluxObject config) { + this.url = config.url; + this.username = config.username; + this.password = config.password; + this.database = config.database; } - void login() { - if(!influxDB) { - try { - influxDB = InfluxDBFactory.connect(url, username, password); - createDatabase() + void login() throws Exception { - // Enable batch writes to get better performance. - //BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500); - influxDB.enableBatch(BatchOptions.DEFAULTS); - //influxDB.setLogLevel(InfluxDB.LogLevel.BASIC); + if(influxDB != null) { + return + } - batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); + try { + log.info("Connecting to InfluxDB - " + url); + influxDB = InfluxDBFactory.connect(url, username, password); + createDatabase(); - } catch(Exception e) { - log.error(e.message) - throw new Exception(e) - } + // Enable batch writes to get better performance. + //BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500); + influxDB.enableBatch(BatchOptions.DEFAULTS); + //influxDB.setLogLevel(InfluxDB.LogLevel.BASIC); + + batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); + + } catch(Exception e) { + log.error(e.getMessage()); + throw new Exception(e); } } void logoff() { - influxDB?.close(); - influxDB = null + if(influxDB != null) { + influxDB.close(); + } + influxDB = null; } @@ -81,13 +92,13 @@ class InfluxClient { void writeBatchPoints() { - log.debug("writeBatchPoints()") + log.debug("writeBatchPoints()"); try { influxDB.write(batchPoints); } catch(Exception e) { - log.error("writeBatchPoints() error - " + e.message) - logoff() - login() + log.error("writeBatchPoints() error - " + e.getMessage()); + logoff(); + login(); } } @@ -101,81 +112,81 @@ class InfluxClient { void writeManagedSystem(ManagedSystem system) { if(system.metrics == null) { - log.warn("writeManagedSystem() - null metrics, skipping") - return + log.warn("writeManagedSystem() - null metrics, skipping"); + return; } - Instant timestamp = system.getTimestamp() - if(!timestamp) { - log.warn("writeManagedSystem() - no timestamp, skipping") - return + Instant timestamp = system.getTimestamp(); + if(timestamp == null) { + log.warn("writeManagedSystem() - no timestamp, skipping"); + return; } //BatchPoints batchPoints = BatchPoints.database(database).build(); - getSystemMemory(system, timestamp).each { - batchPoints.point(it) - } + getSystemMemory(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemProcessor(system, timestamp).each { - batchPoints.point(it) - } + getSystemProcessor(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemSharedProcessorPools(system, timestamp).each { - batchPoints.point(it) - } + getSystemSharedProcessorPools(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemSharedAdapters(system, timestamp).each { - batchPoints.point(it) - } + getSystemSharedAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemFiberChannelAdapters(system, timestamp).each { - batchPoints.point(it) - } + getSystemFiberChannelAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemGenericPhysicalAdapters(system, timestamp).each { - batchPoints.point(it) - } + getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getSystemGenericVirtualAdapters(system, timestamp).each { - batchPoints.point(it) - } + getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); } private static List getSystemMemory(ManagedSystem system, Instant timestamp) { - List metrics = system.getMemoryMetrics() - return processMeasurementMap(metrics, timestamp, "SystemMemory") + List metrics = system.getMemoryMetrics(); + return processMeasurementMap(metrics, timestamp, "SystemMemory"); } private static List getSystemProcessor(ManagedSystem system, Instant timestamp) { - List metrics = system.getProcessorMetrics() - return processMeasurementMap(metrics, timestamp, "SystemProcessor") + List metrics = system.getProcessorMetrics(); + return processMeasurementMap(metrics, timestamp, "SystemProcessor"); } private static List getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) { - List metrics = system.getSharedProcessorPools() - return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool") + List metrics = system.getSharedProcessorPools(); + return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool"); } private static List getSystemSharedAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemSharedAdapters() - return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters") + List metrics = system.getSystemSharedAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters"); } private static List getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemFiberChannelAdapters() - return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters") + List metrics = system.getSystemFiberChannelAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters"); } private static List getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemGenericPhysicalAdapters() - return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters") + List metrics = system.getSystemGenericPhysicalAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters"); } private static List getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemGenericVirtualAdapters() - return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters") + List metrics = system.getSystemGenericVirtualAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters"); } @@ -186,64 +197,64 @@ class InfluxClient { void writeLogicalPartition(LogicalPartition partition) { if(partition.metrics == null) { - log.warn("writeLogicalPartition() - null metrics, skipping") - return + log.warn("writeLogicalPartition() - null metrics, skipping"); + return; } - Instant timestamp = partition.getTimestamp() - if(!timestamp) { - log.warn("writeLogicalPartition() - no timestamp, skipping") - return + Instant timestamp = partition.getTimestamp(); + if(timestamp == null) { + log.warn("writeLogicalPartition() - no timestamp, skipping"); + return; } //BatchPoints batchPoints = BatchPoints.database(database).build(); - getPartitionAffinityScore(partition, timestamp).each { - batchPoints.point(it) - } + getPartitionAffinityScore(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getPartitionMemory(partition, timestamp).each { - batchPoints.point(it) - } + getPartitionMemory(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getPartitionProcessor(partition, timestamp).each { - batchPoints.point(it) - } + getPartitionProcessor(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getPartitionVirtualEthernetAdapter(partition, timestamp).each { - batchPoints.point(it) - } + getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); - getPartitionVirtualFiberChannelAdapter(partition, timestamp).each { - batchPoints.point(it) - } + getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); //influxDB.write(batchPoints); } private static List getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getAffinityScore() - return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore") + List metrics = partition.getAffinityScore(); + return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore"); } private static List getPartitionMemory(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getMemoryMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionMemory") + List metrics = partition.getMemoryMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionMemory"); } private static List getPartitionProcessor(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getProcessorMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionProcessor") + List metrics = partition.getProcessorMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionProcessor"); } private static List getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getVirtualEthernetAdapterMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters") + List metrics = partition.getVirtualEthernetAdapterMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters"); } private static List getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getVirtualFiberChannelAdaptersMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters") + List metrics = partition.getVirtualFiberChannelAdaptersMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters"); } @@ -252,33 +263,34 @@ class InfluxClient { Shared */ - private static List processMeasurementMap(List listOfMaps, Instant timestamp, String measurement) { + private static List processMeasurementMap(List measurements, Instant timestamp, String measurement) { - List list = new ArrayList<>() - - listOfMaps.each { map -> + List listOfPoints = new ArrayList<>(); + measurements.forEach( m -> { // Iterate fields - map.get("fields").each { String fieldName, BigDecimal fieldValue -> + //Map fieldsMap = m.get("fields"); + m.fields.forEach((fieldName, fieldValue) -> { log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue) Point.Builder builder = Point.measurement(measurement) .time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS) .tag("name", fieldName) - .addField("value", fieldValue) + .addField("value", fieldValue); // For each field, we add all tags - map.get("tags").each { String tagName, String tagValue -> - builder.tag(tagName, tagValue) - log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue) - } + //Map tagsMap = m.get("tags"); + m.tags.forEach((tagName, tagValue) -> { + builder.tag(tagName, tagValue); + log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue); + }); - list.add(builder.build()) - } + listOfPoints.add(builder.build()); + }); - } + }); - return list + return listOfPoints; } diff --git a/src/main/groovy/biz/nellemann/hmci/Insights.groovy b/src/main/groovy/biz/nellemann/hmci/Insights.groovy new file mode 100644 index 0000000..9334bed --- /dev/null +++ b/src/main/groovy/biz/nellemann/hmci/Insights.groovy @@ -0,0 +1,176 @@ +/** + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci + +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j; + +@Slf4j +@CompileStatic +class Insights { + + final Configuration configuration; + + InfluxClient influxClient; + Map hmcClients = new HashMap<>(); + Map systems = new HashMap(); + Map partitions = new HashMap(); + + + Insights(Configuration configuration) { + this.configuration = configuration; + + try { + influxClient = new InfluxClient(configuration.influx); + influxClient.login(); + } catch(Exception e) { + System.exit(1); + } + + // Initial scan + discover(); + } + + + void discover() { + + configuration.hmc.forEach( configHmc -> { + if(!hmcClients?.containsKey(configHmc.name)) { + log.debug("Adding HMC: " + configHmc.toString()) + HmcClient hmcClient = new HmcClient(configHmc) + hmcClients.put(configHmc.name, hmcClient) + } + }); + + hmcClients.forEach(( hmcId, hmcClient) -> { + + try { + hmcClient.login() + hmcClient.getManagedSystems().each { systemId, system -> + + // Add to list of known systems + systems.putIfAbsent(systemId, system) + + // Get LPAR's for this system + hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> { + + // Add to list of known partitions + partitions.putIfAbsent(partitionId, partition) + }); + } + } catch(Exception e) { + log.error("discover() - " + hmcId + " error: " + e.message) + //hmcClients.remove(hmcId) + } + + }); + + } + + + void getMetricsForSystems() { + + try { + + systems.forEach((systemId, system) -> { + + HmcClient hmcClient = hmcClients.get(system.hmcId) + + // Get and process metrics for this system + String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system) + if(tmpJsonString && !tmpJsonString.empty) { + system.processMetrics(tmpJsonString) + } + + }); + + } catch(Exception e) { + log.error(e.message) + } + + } + + + void getMetricsForPartitions() { + + try { + + // Get LPAR's for this system + partitions.forEach((partitionId, partition) -> { + + HmcClient hmcClient = hmcClients.get(partition.system.hmcId) + + // Get and process metrics for this partition + String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition) + if(tmpJsonString2 && !tmpJsonString2.empty) { + partition.processMetrics(tmpJsonString2) + } + + }); + + } catch(Exception e) { + log.error(e.message) + } + } + + + void writeMetricsForManagedSystems() { + systems.forEach((systemId, system) -> { + influxClient.writeManagedSystem(system) + }); + } + + + void writeMetricsForLogicalPartitions() { + partitions.each {partitionId, partition -> + influxClient.writeLogicalPartition(partition) + } + } + + + void run() { + + log.debug("run()") + + boolean keepRunning = true + int executions = 0 + + while(keepRunning) { + + try { + getMetricsForSystems() + getMetricsForPartitions() + + writeMetricsForManagedSystems() + writeMetricsForLogicalPartitions() + influxClient.writeBatchPoints() + + // Refresh HMC's + if(executions > configuration.rescan) { + executions = 0 + discover() + } + } catch(Exception e) { + log.error(e.message, e) + } + + executions++ + Thread.sleep(configuration.refresh * 1000) + } + + } + +} diff --git a/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy b/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy index 98c4ab5..9dbb073 100644 --- a/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy +++ b/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy @@ -15,9 +15,12 @@ */ package biz.nellemann.hmci +import groovy.transform.CompileDynamic +import groovy.transform.CompileStatic import groovy.util.logging.Slf4j @Slf4j +@CompileStatic class LogicalPartition extends MetaSystem { public String id @@ -36,64 +39,68 @@ class LogicalPartition extends MetaSystem { return "[${id}] ${name} (${type})" } + @CompileDynamic + List getAffinityScore() { - List getAffinityScore() { - - List list = new ArrayList<>() - Map map = new HashMap() + List list = new ArrayList<>() + //Map map = new HashMap() HashMap tagsMap = [ system: system.name, partition: name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getAffinityScore() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ affinityScore: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.affinityScore, ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getAffinityScore() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); return list } - List getMemoryMetrics() { + @CompileDynamic + List getMemoryMetrics() { - List list = new ArrayList<>() - Map map = new HashMap() + List list = new ArrayList<>() + //Map map = new HashMap() HashMap tagsMap = [ system: system.name, partition: name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ logicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.logicalMem?.first(), backedPhysicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.backedPhysicalMem?.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + return list } + @CompileDynamic + List getProcessorMetrics() { - List getProcessorMetrics() { - - List list = new ArrayList<>() - Map map = new HashMap() + List list = new ArrayList<>() + //Map map = new HashMap() HashMap tagsMap = [ system: system.name, partition: name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -109,19 +116,21 @@ class LogicalPartition extends MetaSystem { timePerInstructionExecution: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timeSpentWaitingForDispatch?.first(), timeSpentWaitingForDispatch: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timePerInstructionExecution?.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + return list } + @CompileDynamic + List getVirtualEthernetAdapterMetrics() { - List getVirtualEthernetAdapterMetrics() { - - List list = new ArrayList<>() + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.network?.virtualEthernetAdapters?.each { - Map map = new HashMap() + //Map map = new HashMap() HashMap tagsMap = [ system: system.name, @@ -131,7 +140,7 @@ class LogicalPartition extends MetaSystem { vlanId: it.vlanId as String, vswitchId: it.vswitchId as String, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -140,10 +149,11 @@ class LogicalPartition extends MetaSystem { receivedBytes: it.receivedBytes.first(), sentBytes: it.sentBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); } return list @@ -151,11 +161,12 @@ class LogicalPartition extends MetaSystem { //PartitionVirtualFiberChannelAdapters - List getVirtualFiberChannelAdaptersMetrics() { + @CompileDynamic + List getVirtualFiberChannelAdaptersMetrics() { - List list = new ArrayList<>() + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.storage?.virtualFiberChannelAdapters?.each { - Map map = new HashMap() + //Map map = new HashMap() HashMap tagsMap = [ system: system.name, @@ -163,7 +174,7 @@ class LogicalPartition extends MetaSystem { viosId: it.viosId as String, wwpn: it.wwpn, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -171,10 +182,11 @@ class LogicalPartition extends MetaSystem { writeBytes: it.writeBytes.first(), readBytes: it.readBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); } return list diff --git a/src/main/groovy/biz/nellemann/hmci/Main.groovy b/src/main/groovy/biz/nellemann/hmci/Main.groovy new file mode 100644 index 0000000..0994d74 --- /dev/null +++ b/src/main/groovy/biz/nellemann/hmci/Main.groovy @@ -0,0 +1,59 @@ +/* + Copyright 2020 mark.nellemann@gmail.com + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + */ +package biz.nellemann.hmci + +import groovy.transform.CompileStatic; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import picocli.CommandLine; +import picocli.CommandLine.Command; +import java.util.concurrent.Callable; + +@CompileStatic +@Command(name = "hmci", + mixinStandardHelpOptions = true, + description = "HMC Insights.", + versionProvider = biz.nellemann.hmci.VersionProvider.class) +public class Main implements Callable { + + private final static Logger log = LoggerFactory.getLogger(Main.class); + + @CommandLine.Option(names = ["-c", "--conf"], description = "Configuration file [default: '/etc/hmci.toml'].") + private String configurationFile = "/etc/hmci.toml"; + + public static void main(String... args) { + int exitCode = new CommandLine(new Main()).execute(args); + System.exit(exitCode); + } + + + @Override + public Integer call() throws IOException { + + File file = new File(configurationFile); + if(!file.exists()) { + System.err.println("Error - No configuration file found at: " + file.toString()); + return -1; + } + + Configuration configuration = new Configuration(configurationFile); + Insights insights = new Insights(configuration); + insights.run(); + + return 0; + } + +} diff --git a/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy b/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy index c339519..0d5ce1b 100644 --- a/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy +++ b/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy @@ -15,10 +15,13 @@ */ package biz.nellemann.hmci +import groovy.transform.CompileDynamic +import groovy.transform.CompileStatic import groovy.util.logging.Slf4j @Slf4j +@CompileStatic class ManagedSystem extends MetaSystem { public final String hmcId @@ -43,40 +46,46 @@ class ManagedSystem extends MetaSystem { } - List getMemoryMetrics() { + @CompileDynamic + List getMemoryMetrics() { - List list = new ArrayList<>() - Map map = new HashMap() + List list = new ArrayList<>() + //Map map = new HashMap() HashMap tagsMap = [ system: name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()) - HashMap fieldsMap = [ - totalMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(), - availableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(), - configurableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(), - assignedMemToLpars: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(), + Map fieldsMap = [ + "totalMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(), + "availableMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(), + "configurableMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(), + "assignedMemToLpars": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(), ] - map.put("fields", fieldsMap) + + //map.put("fields", fieldsMap) log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement) + return list } - List getProcessorMetrics() { + @CompileDynamic + List getProcessorMetrics() { - List list = new ArrayList<>() - Map map = new HashMap() + List list = new ArrayList<>() + //Map map = new HashMap<>() HashMap tagsMap = [ system: name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) + //measurement.tags = tagsMap; log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -85,55 +94,62 @@ class ManagedSystem extends MetaSystem { availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.availableProcUnits?.first(), configurableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.configurableProcUnits?.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) + //measurement.fields = fieldsMap; log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()) - list.add(map) + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement) + return list } - List getSharedProcessorPools() { + @CompileDynamic + List getSharedProcessorPools() { - List list = new ArrayList<>() + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.serverUtil?.sharedProcessorPool?.each { - Map map = new HashMap() + //Map map = new HashMap() HashMap tagsMap = [ system: name, pool: it.name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ assignedProcUnits: it.assignedProcUnits.first(), availableProcUnits: it.availableProcUnits.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString()) - list.add(map) - + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement) } return list } - List getSystemSharedAdapters() { + @CompileDynamic + List getSystemSharedAdapters() { - List list = new ArrayList<>() + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each {vios -> vios.network.sharedAdapters.each { - Map map = new HashMap() + //Map map = new HashMap() + Measurement measurement = new Measurement(); HashMap tagsMap = [ system: name, type: it.type, vios: vios.name, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) + measurement.tags = tagsMap; log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -141,25 +157,27 @@ class ManagedSystem extends MetaSystem { receivedBytes: it.receivedBytes.first(), transferredBytes: it.transferredBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString()) - list.add(map) + list.add(measurement) } - } return list } - List getSystemFiberChannelAdapters() { + @CompileDynamic + List getSystemFiberChannelAdapters() { - List list = new ArrayList<>() + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name) vios.storage?.fiberChannelAdapters?.each { - Map map = new HashMap() + //HashMap map = new HashMap<>() + Measurement measurement = new Measurement(); HashMap tagsMap = [ id: it.id, @@ -168,7 +186,8 @@ class ManagedSystem extends MetaSystem { vios: vios.name, device: it.physicalLocation, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) + measurement.tags = tagsMap; log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -176,23 +195,25 @@ class ManagedSystem extends MetaSystem { readBytes: it.readBytes.first(), transmittedBytes: it.transmittedBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString()) - list.add(map) + list.add(measurement) } - } return list } - List getSystemGenericPhysicalAdapters() { - List list = new ArrayList<>() + @CompileDynamic + List getSystemGenericPhysicalAdapters() { + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> vios.storage?.genericPhysicalAdapters?.each { - Map map = new HashMap() + //Map map = new HashMap() + Measurement measurement = new Measurement(); HashMap tagsMap = [ id: it.id, @@ -200,7 +221,8 @@ class ManagedSystem extends MetaSystem { vios: vios.name, device: it.physicalLocation, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) + measurement.tags = tagsMap; log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -208,24 +230,25 @@ class ManagedSystem extends MetaSystem { readBytes: it.readBytes.first(), transmittedBytes: it.transmittedBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString()) - list.add(map) - + list.add(measurement) } - } return list } - List getSystemGenericVirtualAdapters() { - List list = new ArrayList<>() + @CompileDynamic + List getSystemGenericVirtualAdapters() { + List list = new ArrayList<>() metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> vios.storage?.genericVirtualAdapters?.each { - Map map = new HashMap() + //Map map = new HashMap() + Measurement measurement = new Measurement(); HashMap tagsMap = [ id: it.id, @@ -233,7 +256,8 @@ class ManagedSystem extends MetaSystem { vios: vios.name, device: it.physicalLocation, ] - map.put("tags", tagsMap) + //map.put("tags", tagsMap) + measurement.tags = tagsMap; log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString()) HashMap fieldsMap = [ @@ -241,13 +265,12 @@ class ManagedSystem extends MetaSystem { readBytes: it.readBytes.first(), transmittedBytes: it.transmittedBytes.first(), ] - map.put("fields", fieldsMap) + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString()) - list.add(map) - + list.add(measurement); } - } return list diff --git a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy b/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy index 065a780..c73a24e 100644 --- a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy +++ b/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy @@ -17,6 +17,8 @@ package biz.nellemann.hmci import biz.nellemann.hmci.pcm.PcmData import groovy.json.JsonSlurper +import groovy.transform.CompileDynamic +import groovy.transform.CompileStatic import groovy.util.logging.Slf4j import java.time.Instant @@ -24,20 +26,22 @@ import java.time.format.DateTimeFormatter import java.time.format.DateTimeParseException @Slf4j +@CompileStatic abstract class MetaSystem { protected PcmData metrics + @CompileDynamic void processMetrics(String json) { - def pcmMap = new JsonSlurper().parseText(json) - metrics = new PcmData(pcmMap as Map) + Map pcmMap = new JsonSlurper().parseText(json) as Map + metrics = new PcmData(pcmMap) } - + @CompileDynamic Instant getTimestamp() { String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp - Instant instant + Instant instant = null try { log.debug("getTimeStamp() - PMC Timestamp: " + timestamp) DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]"); diff --git a/src/main/java/biz/nellemann/hmci/Configuration.java b/src/main/java/biz/nellemann/hmci/Configuration.java new file mode 100644 index 0000000..215b93a --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Configuration.java @@ -0,0 +1,161 @@ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.tomlj.Toml; +import org.tomlj.TomlParseResult; +import org.tomlj.TomlTable; + +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; + +public class Configuration { + + private final static Logger log = LoggerFactory.getLogger(Configuration.class); + + final public Long refresh; + final public Long rescan; + final public InfluxObject influx; + final public List hmc; + + Configuration(String configurationFile) throws IOException { + + Path source = Paths.get(configurationFile); + TomlParseResult result = Toml.parse(source); + result.errors().forEach(error -> System.err.println(error.toString())); + //System.out.println(result.toJson()); + + if(result.contains("refresh")) { + refresh = result.getLong("refresh"); + } else { + refresh = 15l; + } + + if(result.contains("rescan")) { + rescan = result.getLong("rescan"); + } else { + rescan = 60l; + } + + hmc = getHmc(result); + influx = getInflux(result); + + } + + + List getHmc(TomlParseResult result) { + + ArrayList list = new ArrayList<>(); + + if(result.contains("hmc") && result.isTable("hmc")) { + TomlTable hmcTable = result.getTable("hmc"); + for(String key : hmcTable.keySet()) { + + HmcObject c = new HmcObject(); + c.name = key; + + if(hmcTable.contains(key+".url")) { + c.url = hmcTable.getString(key+".url"); + } + + if(hmcTable.contains(key+".username")) { + c.username = hmcTable.getString(key+".username"); + } + + if(hmcTable.contains(key+".password")) { + c.password = hmcTable.getString(key+".password"); + } + + if(hmcTable.contains(key+".unsafe")) { + c.unsafe = hmcTable.getBoolean(key+".unsafe"); + } else { + c.unsafe = false; + } + + list.add(c); + } + } + + return list; + } + + + InfluxObject getInflux(TomlParseResult result) { + + InfluxObject c = new InfluxObject(); + + if(result.contains("influx")) { + TomlTable t = result.getTable("influx"); + + if(t != null && t.contains("url")) { + c.url = t.getString("url"); + } + + if(t != null && t.contains("username")) { + c.username = t.getString("username"); + } + + if(t != null && t.contains("password")) { + c.password = t.getString("password"); + } + + if(t != null && t.contains("database")) { + c.database = t.getString("database"); + } + + } + + return c; + } + + + static class InfluxObject { + + String url = "http://localhost:8086"; + String username = "root"; + String password = ""; + String database = "hmci"; + + private boolean isValid = false; + + Boolean isValid() { + return isValid(); + } + + // TODO: Fixme + void validate() { + isValid = true; + } + + } + + + static class HmcObject { + + String name; + String url; + String username; + String password; + Boolean unsafe; + + private boolean isValid = false; + + Boolean isValid() { + return isValid(); + } + + // TODO: Fixme + void validate() { + isValid = true; + } + + @Override + public String toString() { + return name; + } + } + +} diff --git a/src/main/java/biz/nellemann/hmci/Measurement.java b/src/main/java/biz/nellemann/hmci/Measurement.java new file mode 100644 index 0000000..ad1bff9 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Measurement.java @@ -0,0 +1,17 @@ +package biz.nellemann.hmci; + +import java.util.Map; + +public class Measurement { + + Map tags; + Map fields; + + Measurement() { + } + + Measurement(Map tags, Map fields) { + this.tags = tags; + this.fields = fields; + } +} diff --git a/src/main/java/biz/nellemann/hmci/VersionProvider.java b/src/main/java/biz/nellemann/hmci/VersionProvider.java new file mode 100644 index 0000000..dcd480f --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/VersionProvider.java @@ -0,0 +1,19 @@ +package biz.nellemann.hmci; + +import picocli.CommandLine; + +import java.io.IOException; +import java.util.jar.Attributes; +import java.util.jar.Manifest; + +class VersionProvider implements CommandLine.IVersionProvider { + + public String[] getVersion() throws IOException { + + Manifest manifest = new Manifest(getClass().getResourceAsStream("/META-INF/MANIFEST.MF")); + Attributes attrs = manifest.getMainAttributes(); + + return new String[] { "${COMMAND-FULL-NAME} " + attrs.getValue("Build-Version") }; + } + +} diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index feefcff..c394543 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -3,7 +3,7 @@ - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{12} - %msg%n diff --git a/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy b/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy new file mode 100644 index 0000000..b88eb18 --- /dev/null +++ b/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy @@ -0,0 +1,30 @@ +package biz.nellemann.hmci + +import spock.lang.Specification + + +class ConfigurationTest extends Specification { + + String testConfigurationFile = new File(getClass().getResource('/hmci.toml').toURI()).absolutePath + + void "test parsing"() { + + when: + Configuration conf = new Configuration(testConfigurationFile); + + then: + conf != null + + } + + void "test lookup influx"() { + + when: + Configuration conf = new Configuration(testConfigurationFile); + + then: + conf != null + + } + +} diff --git a/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy b/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy index a29e1ad..49f265b 100644 --- a/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy @@ -12,7 +12,12 @@ class HmcClientTest extends Specification { def setup() { mockServer.start(); - hmc = new HmcClient("site", mockServer.url("/").toString(), "testUser", "testPassword") + Configuration.HmcObject configHmc = new Configuration.HmcObject() + configHmc.name = "site1" + configHmc.url = mockServer.url("/").toString() + configHmc.username = "testUser" + configHmc.password = "testPassword" + hmc = new HmcClient(configHmc) hmc.authToken = "blaBla" } diff --git a/src/test/groovy/biz/nellemann/hmci/AppTest.groovy b/src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy similarity index 75% rename from src/test/groovy/biz/nellemann/hmci/AppTest.groovy rename to src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy index ef9c7e7..241827a 100644 --- a/src/test/groovy/biz/nellemann/hmci/AppTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy @@ -5,6 +5,6 @@ package biz.nellemann.hmci import spock.lang.Specification -class AppTest extends Specification { - +class InsightsTest extends Specification { + } diff --git a/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy b/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy index 0984707..3445c82 100644 --- a/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy @@ -34,12 +34,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getMemoryMetrics() + List listOfMeasurements = lpar.getMemoryMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['logicalMem'] == 8192.000 - listOfMaps.first().get("tags")['partition'] == '9Flash01' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['logicalMem'] == 8192.000 + listOfMeasurements.first().tags['partition'] == '9Flash01' } @@ -53,12 +53,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getProcessorMetrics() + List listOfMeasurements = lpar.getProcessorMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['utilizedProcUnits'] == 0.001 - listOfMaps.first().get("tags")['partition'] == '9Flash01' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['utilizedProcUnits'] == 0.001 + listOfMeasurements.first().tags['partition'] == '9Flash01' } @@ -72,12 +72,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getVirtualEthernetAdapterMetrics() + List listOfMeasurements = lpar.getVirtualEthernetAdapterMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['receivedBytes'] == 276.467 - listOfMaps.first().get("tags")['sea'] == 'ent5' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['receivedBytes'] == 276.467 + listOfMeasurements.first().tags['sea'] == 'ent5' } void "test getVirtualFiberChannelAdaptersMetrics"() { @@ -90,12 +90,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getVirtualFiberChannelAdaptersMetrics() + List listOfMeasurements = lpar.getVirtualFiberChannelAdaptersMetrics() then: - listOfMaps.size() == 4 - listOfMaps.first().get("fields")['writeBytes'] == 6690.133 - listOfMaps.first().get("tags")['viosId'] == '1' + listOfMeasurements.size() == 4 + listOfMeasurements.first().fields['writeBytes'] == 6690.133 + listOfMeasurements.first().tags['viosId'] == '1' } diff --git a/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy b/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy index 115b78f..9f58ffc 100644 --- a/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy @@ -33,11 +33,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getMemoryMetrics() + List listOfMeasurements = system.getMemoryMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['totalMem'] == 1048576.000 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['totalMem'] == 1048576.000 } void "test getProcessorMetrics"() { @@ -49,11 +49,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getProcessorMetrics() + List listOfMeasurements = system.getProcessorMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['availableProcUnits'] == 16.000 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['availableProcUnits'] == 16.000 } void "test getSystemSharedProcessorPools"() { @@ -65,11 +65,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getSharedProcessorPools() + List listOfMeasurements = system.getSharedProcessorPools() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['assignedProcUnits'] == 23.767 } void "test VIOS data"() { @@ -80,11 +80,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getSharedProcessorPools() + List listOfMeasurements = system.getSharedProcessorPools() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['assignedProcUnits'] == 23.767 } } diff --git a/src/test/resources/hmci.toml b/src/test/resources/hmci.toml new file mode 100644 index 0000000..a5c87f5 --- /dev/null +++ b/src/test/resources/hmci.toml @@ -0,0 +1,32 @@ +# HMCi Configuration + +# How often to query HMC's for data - in seconds +hmci.refresh = 30 + +# Rescan HMC's for new systems and partitions - every x refresh +hmci.rescan = 60 + +# InfluxDB to save metrics +[influx] +url = "http://localhost:8086" +username = "root" +password = "" +database = "hmci" + + +# One or more HMC's to query for data and metrics +[hmc] + + # HMC on our primary site + [hmc.site1] + url = "https://10.10.10.10:12443" + username = "hmci" + password = "hmcihmci" + unsafe = true # Ignore SSL cert. errors + + # Example + #[hmc.site2] + #url = "https://10.10.20.20:12443" + #username = "viewer" + #password = "someSecret" + #unsafe = false