diff --git a/README.md b/README.md index c411467..822a16b 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ HMCi is a small utility to fetch metrics from one or more HMC's and push those t - Ensure you have correct date/time and use a NTP service to keep it accurate! - Install the HMCi package (*.deb* or *.rpm*) from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) or compile from source. -- Copy the *doc/hmci.groovy.tpl* configuration template into */etc/hmci.groovy* and edit the configuration to suit your environment. You can use the *-c [conf-file]* switch if you place this file elsewhere. +- Copy the *doc/hmci.tpml* configuration template into */etc/hmci.toml* and edit the configuration to suit your environment. You can use the *-c* option if you place this file elsewhere. - Configure Grafana to communicate with your InfluxDB and import dashboards from *doc/* into Grafana (The dashboards are slightly modified versions of the dashboard provided by the nmon2influxdb tool). - Run the *bin/hmci* program in a shell, as a @reboot cron task or setup a proper service :) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index be2396f..7c2cc8b 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -1,5 +1,4 @@ -image: adoptopenjdk:8-openj9 -#image: openjdk:8 +image: openjdk:8 pipelines: branches: diff --git a/build.gradle b/build.gradle index a5dc29e..b1ae0e2 100644 --- a/build.gradle +++ b/build.gradle @@ -1,8 +1,6 @@ plugins { - // Apply the groovy plugin to add support for Groovy + id 'java' id 'groovy' - - // Apply the application plugin to add support for building a CLI application. id 'application' // Code coverage of tests @@ -18,12 +16,18 @@ repositories { } dependencies { - implementation 'org.codehaus.groovy:groovy-all:3.0.5' + annotationProcessor 'info.picocli:picocli-codegen:4.5.1' + implementation 'info.picocli:picocli:4.5.1' + implementation 'org.jsoup:jsoup:1.13.1' implementation 'com.squareup.okhttp3:okhttp:4.8.0' + implementation 'com.squareup.moshi:moshi:1.11.0' + implementation 'com.serjltt.moshi:moshi-lazy-adapters:2.2' + implementation 'org.tomlj:tomlj:1.0.0' implementation 'org.influxdb:influxdb-java:2.19' implementation 'org.slf4j:slf4j-api:1.7.+' runtimeOnly 'ch.qos.logback:logback-classic:1.+' + testImplementation 'org.codehaus.groovy:groovy-all:3.0.5' testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0') testImplementation("org.slf4j:slf4j-simple:1.7.+") testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0') @@ -32,7 +36,7 @@ dependencies { } application { - mainClassName = 'biz.nellemann.hmci.App' + mainClassName = 'biz.nellemann.hmci.Main' } test { @@ -68,8 +72,7 @@ ospackage { buildRpm { dependsOn startShadowScripts - //requires('java-1.8.0-openjdk-headless') - os = LINUX + os = "LINUX" } buildDeb { @@ -77,7 +80,6 @@ buildDeb { requires('default-jre-headless') } - jacoco { toolVersion = "0.8.5" } @@ -103,22 +105,16 @@ jacocoTestCoverageVerification { } check.dependsOn jacocoTestCoverageVerification - -processResources.dependsOn.add("versionFile") -versionFile { - // Path to the file to be written - file = new File(project.buildDir, 'resources/main/version.properties') -} - jar { manifest { attributes( - 'Built-By' : System.properties['user.name'], - 'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(), - 'Build-Revision' : versioning.info.commit, 'Created-By' : "Gradle ${gradle.gradleVersion}", + 'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}", 'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})", - 'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}" + 'Build-User' : System.properties['user.name'], + 'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build), + 'Build-Revision' : versioning.info.commit, + 'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(), ) } } diff --git a/doc/hmci.groovy.tpl b/doc/hmci.groovy.tpl deleted file mode 100644 index 37bc0eb..0000000 --- a/doc/hmci.groovy.tpl +++ /dev/null @@ -1,39 +0,0 @@ -/* - Copy this file to /etc/hmci.groovy and change it to suit your environment. -*/ - -// How often to query HMC's for data - in seconds -hmci.refresh = 30 - -// Rescan HMC's for new systems and partitions - every x refresh -hmci.rescan = 60 - -// InfluxDB to save metrics -influx { - url = "http://localhost:8086" - username = "root" - password = "" - database = "hmci" -} - -// One or more HMC's to query for data and metrics -hmc { - - // HMC on our primary site - site1 { - url = "https://10.10.10.10:12443" - username = "hmci" - password = "hmcihmci" - unsafe = true // Ignore SSL cert. errors - } - - /* - site2 { - url = "https://10.10.20.20:12443" - username = "viewer" - password = "someSecret" - unsafe = false - } - */ - -} diff --git a/doc/hmci.toml b/doc/hmci.toml new file mode 100644 index 0000000..da9e502 --- /dev/null +++ b/doc/hmci.toml @@ -0,0 +1,31 @@ +# HMCi Configuration + +# How often to query HMC's for data - in seconds +hmci.refresh = 30 + +# Rescan HMC's for new systems and partitions - every x refresh +hmci.rescan = 60 + +# InfluxDB to save metrics +[influx] +url = "http://localhost:8086" +username = "root" +password = "" +database = "hmci" + +# One or more HMC's to query for data and metrics +[hmc] + + # HMC on our primary site + [hmc.site1] + url = "https://10.10.10.10:12443" + username = "hmci" + password = "hmcihmci" + unsafe = true # Ignore SSL cert. errors + + # Example + #[hmc.site2] + #url = "https://10.10.20.20:12443" + #username = "viewer" + #password = "someSecret" + #unsafe = false diff --git a/gradle.properties b/gradle.properties index a1e7b9a..5077ec8 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,3 @@ id = hmci group = biz.nellemann.hmci -version = 1.0.10 +version = 0.2.1 diff --git a/src/main/groovy/biz/nellemann/hmci/App.groovy b/src/main/groovy/biz/nellemann/hmci/App.groovy deleted file mode 100644 index 677f7d2..0000000 --- a/src/main/groovy/biz/nellemann/hmci/App.groovy +++ /dev/null @@ -1,246 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.cli.picocli.CliBuilder -import groovy.cli.picocli.OptionAccessor -import groovy.util.logging.Slf4j - -@Slf4j -class App implements Runnable { - - final ConfigObject configuration - final Integer refreshEverySec - final Integer rescanHmcEvery - - InfluxClient influxClient - Map hmcClients = new HashMap<>() - Map systems = new HashMap() - Map partitions = new HashMap() - - - App(ConfigObject configuration) { - this.configuration = configuration - log.debug configuration.toString() - - refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60 - rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15 - - String influxUrl = configuration.get('influx')['url'] - String influxUsername = configuration.get('influx')['username'] - String influxPassword = configuration.get('influx')['password'] - String influxDatabase = configuration.get('influx')['database'] - - try { - influxClient = new InfluxClient(influxUrl, influxUsername, influxPassword, influxDatabase) - influxClient.login() - } catch(Exception e) { - System.exit(1) - } - - // Initial scan - discover() - - run() - } - - - void discover() { - - configuration.get('hmc').each { Object key, Object hmc -> - if(!hmcClients?.containsKey(key)) { - log.info("Adding HMC: " + hmc.toString()) - String hmcKey = key - String hmcUrl = hmc['url'] - String hmcUsername = hmc['username'] - String hmcPassword = hmc['password'] - Boolean hmcUnsafe = hmc['unsafe'] - HmcClient hmcClient = new HmcClient(hmcKey, hmcUrl, hmcUsername, hmcPassword, hmcUnsafe) - hmcClients.put(hmcKey, hmcClient) - } - } - - hmcClients.each { hmcId, hmcClient -> - - - try { - hmcClient.login() - hmcClient.getManagedSystems().each { systemId, system -> - - // Add to list of known systems - systems.putIfAbsent(systemId, system) - - // Get LPAR's for this system - hmcClient.getLogicalPartitionsForManagedSystem(system).each { partitionId, partition -> - - // Add to list of known partitions - partitions.putIfAbsent(partitionId, partition) - } - } - } catch(Exception e) { - log.error("discover() - " + hmcId + " error: " + e.message) - //hmcClients.remove(hmcId) - } - - } - - } - - - void getMetricsForSystems() { - - try { - - systems.each {systemId, system -> - - HmcClient hmcClient = hmcClients.get(system.hmcId) - - // Get and process metrics for this system - String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system) - if(tmpJsonString && !tmpJsonString.empty) { - system.processMetrics(tmpJsonString) - } - - } - - } catch(Exception e) { - log.error(e.message) - } - - } - - - void getMetricsForPartitions() { - - try { - - // Get LPAR's for this system - partitions.each { partitionId, partition -> - - HmcClient hmcClient = hmcClients.get(partition.system.hmcId) - - // Get and process metrics for this partition - String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition) - if(tmpJsonString2 && !tmpJsonString2.empty) { - partition.processMetrics(tmpJsonString2) - } - - } - - } catch(Exception e) { - log.error(e.message) - } - } - - - void writeMetricsForManagedSystems() { - systems.each {systemId, system -> - influxClient.writeManagedSystem(system) - } - } - - - void writeMetricsForLogicalPartitions() { - partitions.each {partitionId, partition -> - influxClient.writeLogicalPartition(partition) - } - } - - - static String getVersion() { - URL url = getClass().getResource("/version.properties"); - if (url == null) { - return "No version.txt file found in the classpath." - } - Properties properties = new Properties(); - properties.load(url.openStream()); - return properties.getProperty("VERSION_GRADLE") + "-" + properties.getProperty("VERSION_BUILD") - } - - - static void main(String... args) { - - def cli = new CliBuilder(name: "hmci") - cli.h(longOpt: 'help', usageHelp: true, 'display usage information') - cli.v(longOpt: 'version', versionHelp: true, 'display version information') - cli.c(longOpt: 'config', args: 1, required: true, paramLabel: "FILE", defaultValue: '/etc/hmci.groovy', 'configuration file') - - OptionAccessor options = cli.parse(args) - if (options.h) { - cli.usage() - return - } - - if(options.v) { - println("Version " + getVersion()) - return - } - - ConfigObject configuration - if(options.c) { - - File configurationFile = new File((String)options.config) - if(!configurationFile.exists()) { - println("Error - No configuration file found at: " + configurationFile.toString()) - System.exit(1) - } - - configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL()); - } - - if(configuration == null || configuration.isEmpty()) { - println("Error - Empty or faulty configuration") - System.exit(1) - } - - new App(configuration) - } - - - @Override - void run() { - - log.debug("run()") - - boolean keepRunning = true - int executions = 0 - - while(keepRunning) { - - try { - getMetricsForSystems() - getMetricsForPartitions() - - writeMetricsForManagedSystems() - writeMetricsForLogicalPartitions() - influxClient.writeBatchPoints() - - // Refresh HMC's - if(executions > rescanHmcEvery) { - executions = 0 - discover() - } - } catch(Exception e) { - log.error(e.message, e) - } - - executions++ - Thread.sleep(refreshEverySec * 1000) - } - - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy b/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy deleted file mode 100644 index 9e19aa4..0000000 --- a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy +++ /dev/null @@ -1,394 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.util.logging.Slf4j -import groovy.xml.XmlSlurper -import okhttp3.MediaType -import okhttp3.OkHttpClient -import okhttp3.Request -import okhttp3.RequestBody -import okhttp3.Response - -import javax.net.ssl.HostnameVerifier -import javax.net.ssl.SSLContext -import javax.net.ssl.SSLSession -import javax.net.ssl.SSLSocketFactory -import javax.net.ssl.TrustManager -import javax.net.ssl.X509TrustManager -import java.security.SecureRandom -import java.security.cert.CertificateException -import java.security.cert.X509Certificate; - -@Slf4j -class HmcClient { - - private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest"); - - private final String hmcId - private final String baseUrl - private final String username - private final String password - private final Boolean unsafe - - protected Integer responseErrors = 0 - protected String authToken - private final OkHttpClient client - - HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) { - this.hmcId = hmcId - this.baseUrl = baseUrl - this.username = username - this.password = password - this.unsafe = unsafe - - if(unsafe) { - this.client = getUnsafeOkHttpClient() - } else { - this.client = new OkHttpClient() - } - } - - - - /** - * Logon to the HMC and get an authentication token for further requests. - * - * @throws IOException - */ - void login(Boolean force = false) throws IOException { - - if(authToken && !force) { - return - } - - String payload = """\ - - - ${username} - ${password} -""" - - URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl)) - Request request = new Request.Builder() - .url(url) - //.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest") - .addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse") - .addHeader("X-Audit-Memento", "hmci") - .put(RequestBody.create(payload, MEDIA_TYPE_IBM_XML_LOGIN)) - .build(); - - try { - Response response = client.newCall(request).execute(); - if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); - - // Get response body and parse - String responseBody = response.body.string() - response.body().close() - - def xml = new XmlSlurper().parseText(responseBody) - authToken = xml.toString() - - log.debug("login() - Auth Token: " + authToken) - } catch(Exception e) { - log.error(e.message) - throw new Exception(e) - } - - } - - - - /** - * Logoff from the HMC and remove any session - * - */ - void logoff() { - - if(!authToken) { - return - } - - URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl)) - Request request = new Request.Builder() - .url(absUrl) - .addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest") - .addHeader("X-API-Session", authToken) - .delete() - .build(); - - Response response = client.newCall(request).execute(); - if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); - - authToken = null - log.debug("logoff()") - } - - - - /** - * Return Map of ManagedSystems seen by this HMC - * - * @return - */ - Map getManagedSystems() { - URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl)) - Response response = getResponse(url) - String responseBody = response.body.string() - Map managedSystemsMap = new HashMap() - - // Do not try to parse empty response - if(responseBody.empty || responseBody.size() < 1) { - responseErrors++ - return managedSystemsMap - } - - def feed = new XmlSlurper().parseText(responseBody) - feed?.entry?.each { entry -> - entry.content.each { content -> - content.ManagedSystem.each { system -> - ManagedSystem managedSystem = new ManagedSystem( - hmcId, - entry.id as String, - system.SystemName as String, - system.MachineTypeModelAndSerialNumber?.MachineType as String, - system.MachineTypeModelAndSerialNumber?.Model as String, - system.MachineTypeModelAndSerialNumber?.SerialNumber as String - ) - managedSystemsMap.put(managedSystem.id, managedSystem) - log.debug("getManagedSystems() - Found system: " + managedSystem.toString()) - } - } - } - - return managedSystemsMap - } - - - - /** - * Return Map of LogicalPartitions seen by a ManagedSystem on this HMC - - * @param UUID of managed system - * @return - */ - Map getLogicalPartitionsForManagedSystem(ManagedSystem system) { - URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id)) - Response response = getResponse(url) - String responseBody = response.body.string() - Map partitionMap = new HashMap() {} - - // Do not try to parse empty response - if(responseBody.empty || responseBody.size() < 1) { - responseErrors++ - return partitionMap - } - - def feed = new XmlSlurper().parseText(responseBody) - feed?.entry?.each { entry -> - //log.debug("Entry") - entry.content.each { content -> - //log.debug("Content") - content.LogicalPartition.each { partition -> - LogicalPartition logicalPartition = new LogicalPartition( - partition.PartitionUUID as String, - partition.PartitionName as String, - partition.PartitionType as String, - system - ) - partitionMap.put(logicalPartition.id, logicalPartition) - log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition.toString()) - } - } - } - - return partitionMap - } - - - - /** - * Parse XML feed to get PCM Data in JSON format - * @param systemId - * @return - */ - String getPcmDataForManagedSystem(ManagedSystem system) { - log.debug("getPcmDataForManagedSystem() - " + system.id) - URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id)) - Response response = getResponse(url) - String responseBody = response.body.string() - String jsonBody - - // Do not try to parse empty response - if(responseBody.empty || responseBody.size() < 1) { - responseErrors++ - return jsonBody - } - - // Parse XML and fetch JSON link - def feed = new XmlSlurper().parseText(responseBody) - feed?.entry?.each { entry -> - String link = entry.link["@href"] - if(entry.category["@term"] == "ManagedSystem") { - jsonBody = getResponseBody(new URL(link)) - } - } - - return jsonBody - } - - - /** - * Parse XML feed to get PCM Data in JSON format - * @param systemId - * @param partitionId - * @return - */ - String getPcmDataForLogicalPartition(LogicalPartition partition) { - - log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id)) - URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id)) - Response response = getResponse(url) - String responseBody = response.body.string() - String jsonBody - - // Do not try to parse empty response - if(responseBody.empty || responseBody.size() < 1) { - responseErrors++ - return jsonBody - } - - // Parse XML and fetch JSON link - def feed = new XmlSlurper().parseText(responseBody) - feed?.entry?.each { entry -> - String link = entry.link["@href"] - if(entry.category["@term"] == "LogicalPartition") { - jsonBody = getResponseBody(new URL(link)) - } - } - - return jsonBody - } - - - /** - * Return body text from a HTTP response from the HMC - * - * @param url - * @return - */ - protected String getResponseBody(URL url) { - Response response = getResponse(url) - String body = response.body().string() - response.body().close() - return body - } - - - - /** - * Return a Response from the HMC - * - * @param url - * @return - */ - private Response getResponse(URL url, Integer retry = 0) { - - if(responseErrors > 2) { - responseErrors = 0 - login(true) - return getResponse(url, retry++) - } - - Request request = new Request.Builder() - .url(url) - .addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") - .addHeader("X-API-Session", authToken) - .get() - .build(); - - Response response = client.newCall(request).execute(); - if (!response.isSuccessful()) { - response.body().close() - - if(response.code == 401) { - login(true) - return getResponse(url, retry++) - } - - if(retry < 2) { - log.warn("getResponse() - Retrying due to unexpected response: " + response.code) - return getResponse(url, retry++) - } - - log.error("getResponse() - Unexpected response: " + response.code) - throw new IOException("getResponse() - Unexpected response: " + response.code) - }; - - return response - } - - - - /** - * Provide an unsafe (ignoring SSL problems) OkHttpClient - * - * @return - */ - private static OkHttpClient getUnsafeOkHttpClient() { - try { - // Create a trust manager that does not validate certificate chains - final TrustManager[] trustAllCerts = new TrustManager[] { - new X509TrustManager() { - @Override - public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { - } - - @Override - public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { - } - - @Override - public X509Certificate[] getAcceptedIssuers() { - return new X509Certificate[]{}; - } - } - }; - - // Install the all-trusting trust manager - final SSLContext sslContext = SSLContext.getInstance("SSL"); - sslContext.init(null, trustAllCerts, new SecureRandom()); - - // Create an ssl socket factory with our all-trusting manager - final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); - - OkHttpClient.Builder builder = new OkHttpClient.Builder(); - builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]); - builder.hostnameVerifier(new HostnameVerifier() { - @Override - public boolean verify(String hostname, SSLSession session) { - return true; - } - }); - - OkHttpClient okHttpClient = builder.build(); - return okHttpClient; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy b/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy deleted file mode 100644 index e15854c..0000000 --- a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy +++ /dev/null @@ -1,285 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.util.logging.Slf4j -import org.influxdb.BatchOptions -import org.influxdb.InfluxDB -import org.influxdb.InfluxDBFactory -import org.influxdb.dto.BatchPoints -import org.influxdb.dto.Point -import org.influxdb.dto.Query - -import java.time.Instant -import java.util.concurrent.TimeUnit - -@Slf4j -class InfluxClient { - - final String url - final String username - final String password - final String database - - InfluxDB influxDB - BatchPoints batchPoints - - - InfluxClient(String url, String username, String password, String database) { - this.url = url - this.username = username - this.password = password - this.database = database - } - - - void login() { - if(!influxDB) { - try { - influxDB = InfluxDBFactory.connect(url, username, password); - createDatabase() - - // Enable batch writes to get better performance. - //BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500); - influxDB.enableBatch(BatchOptions.DEFAULTS); - //influxDB.setLogLevel(InfluxDB.LogLevel.BASIC); - - batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); - - } catch(Exception e) { - log.error(e.message) - throw new Exception(e) - } - } - } - - - void logoff() { - influxDB?.close(); - influxDB = null - } - - - void createDatabase() { - // Create our database... with a default retention of 156w == 3 years - influxDB.query(new Query("CREATE DATABASE " + database + " WITH DURATION 156w")); - influxDB.setDatabase(database); - } - - - void writeBatchPoints() { - log.debug("writeBatchPoints()") - try { - influxDB.write(batchPoints); - } catch(Exception e) { - log.error("writeBatchPoints() error - " + e.message) - logoff() - login() - } - } - - - - /* - Managed System - */ - - - void writeManagedSystem(ManagedSystem system) { - - if(system.metrics == null) { - log.warn("writeManagedSystem() - null metrics, skipping") - return - } - - Instant timestamp = system.getTimestamp() - if(!timestamp) { - log.warn("writeManagedSystem() - no timestamp, skipping") - return - } - - //BatchPoints batchPoints = BatchPoints.database(database).build(); - - getSystemMemory(system, timestamp).each { - batchPoints.point(it) - } - - getSystemProcessor(system, timestamp).each { - batchPoints.point(it) - } - - getSystemSharedProcessorPools(system, timestamp).each { - batchPoints.point(it) - } - - getSystemSharedAdapters(system, timestamp).each { - batchPoints.point(it) - } - - getSystemFiberChannelAdapters(system, timestamp).each { - batchPoints.point(it) - } - - getSystemGenericPhysicalAdapters(system, timestamp).each { - batchPoints.point(it) - } - - getSystemGenericVirtualAdapters(system, timestamp).each { - batchPoints.point(it) - } - } - - - private static List getSystemMemory(ManagedSystem system, Instant timestamp) { - List metrics = system.getMemoryMetrics() - return processMeasurementMap(metrics, timestamp, "SystemMemory") - } - - private static List getSystemProcessor(ManagedSystem system, Instant timestamp) { - List metrics = system.getProcessorMetrics() - return processMeasurementMap(metrics, timestamp, "SystemProcessor") - } - - private static List getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) { - List metrics = system.getSharedProcessorPools() - return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool") - } - - private static List getSystemSharedAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemSharedAdapters() - return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters") - } - - private static List getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemFiberChannelAdapters() - return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters") - } - - private static List getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemGenericPhysicalAdapters() - return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters") - } - - private static List getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) { - List metrics = system.getSystemGenericVirtualAdapters() - return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters") - } - - - /* - Logical Partitions - */ - - void writeLogicalPartition(LogicalPartition partition) { - - if(partition.metrics == null) { - log.warn("writeLogicalPartition() - null metrics, skipping") - return - } - - Instant timestamp = partition.getTimestamp() - if(!timestamp) { - log.warn("writeLogicalPartition() - no timestamp, skipping") - return - } - - //BatchPoints batchPoints = BatchPoints.database(database).build(); - - getPartitionAffinityScore(partition, timestamp).each { - batchPoints.point(it) - } - - getPartitionMemory(partition, timestamp).each { - batchPoints.point(it) - } - - getPartitionProcessor(partition, timestamp).each { - batchPoints.point(it) - } - - getPartitionVirtualEthernetAdapter(partition, timestamp).each { - batchPoints.point(it) - } - - getPartitionVirtualFiberChannelAdapter(partition, timestamp).each { - batchPoints.point(it) - } - - //influxDB.write(batchPoints); - } - - private static List getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getAffinityScore() - return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore") - } - - private static List getPartitionMemory(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getMemoryMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionMemory") - } - - private static List getPartitionProcessor(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getProcessorMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionProcessor") - } - - private static List getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getVirtualEthernetAdapterMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters") - } - - private static List getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) { - List metrics = partition.getVirtualFiberChannelAdaptersMetrics() - return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters") - } - - - - /* - Shared - */ - - private static List processMeasurementMap(List listOfMaps, Instant timestamp, String measurement) { - - List list = new ArrayList<>() - - listOfMaps.each { map -> - - // Iterate fields - map.get("fields").each { String fieldName, BigDecimal fieldValue -> - log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue) - - Point.Builder builder = Point.measurement(measurement) - .time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS) - .tag("name", fieldName) - .addField("value", fieldValue) - - // For each field, we add all tags - map.get("tags").each { String tagName, String tagValue -> - builder.tag(tagName, tagValue) - log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue) - } - - list.add(builder.build()) - } - - } - - return list - } - - -} diff --git a/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy b/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy deleted file mode 100644 index 98c4ab5..0000000 --- a/src/main/groovy/biz/nellemann/hmci/LogicalPartition.groovy +++ /dev/null @@ -1,182 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.util.logging.Slf4j - -@Slf4j -class LogicalPartition extends MetaSystem { - - public String id - public String name - public String type - ManagedSystem system - - LogicalPartition(String id, String name, String type, ManagedSystem system) { - this.id = id - this.name = name - this.type = type - this.system = system - } - - String toString() { - return "[${id}] ${name} (${type})" - } - - - List getAffinityScore() { - - List list = new ArrayList<>() - Map map = new HashMap() - - HashMap tagsMap = [ - system: system.name, - partition: name, - ] - map.put("tags", tagsMap) - log.debug("getAffinityScore() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - affinityScore: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.affinityScore, - ] - map.put("fields", fieldsMap) - log.debug("getAffinityScore() - fields: " + fieldsMap.toString()) - - list.add(map) - return list - } - - - List getMemoryMetrics() { - - List list = new ArrayList<>() - Map map = new HashMap() - - HashMap tagsMap = [ - system: system.name, - partition: name, - ] - map.put("tags", tagsMap) - log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - logicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.logicalMem?.first(), - backedPhysicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.backedPhysicalMem?.first(), - ] - map.put("fields", fieldsMap) - log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - return list - } - - - List getProcessorMetrics() { - - List list = new ArrayList<>() - Map map = new HashMap() - - HashMap tagsMap = [ - system: system.name, - partition: name, - ] - map.put("tags", tagsMap) - log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - utilizedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedProcUnits?.first(), - maxVirtualProcessors: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxVirtualProcessors.first(), - currentVirtualProcessors: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.currentVirtualProcessors.first(), - //donatedProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.donatedProcUnits.first(), - entitledProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.entitledProcUnits.first(), - //idleProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.idleProcUnits.first(), - //maxProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxProcUnits.first(), - utilizedCappedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedCappedProcUnits?.first(), - utilizedUncappedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedUncappedProcUnits?.first(), - timePerInstructionExecution: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timeSpentWaitingForDispatch?.first(), - timeSpentWaitingForDispatch: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timePerInstructionExecution?.first(), - ] - map.put("fields", fieldsMap) - log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - return list - } - - - List getVirtualEthernetAdapterMetrics() { - - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.network?.virtualEthernetAdapters?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - system: system.name, - partition: name, - sea: it.sharedEthernetAdapterId as String, - viosId: it.viosId as String, - vlanId: it.vlanId as String, - vswitchId: it.vswitchId as String, - ] - map.put("tags", tagsMap) - log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - receivedPhysicalBytes: it.receivedPhysicalBytes.first(), - sentPhysicalBytes: it.sentPhysicalBytes.first(), - receivedBytes: it.receivedBytes.first(), - sentBytes: it.sentBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - } - - return list - } - - - //PartitionVirtualFiberChannelAdapters - List getVirtualFiberChannelAdaptersMetrics() { - - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.storage?.virtualFiberChannelAdapters?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - system: system.name, - partition: name, - viosId: it.viosId as String, - wwpn: it.wwpn, - ] - map.put("tags", tagsMap) - log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - transmittedBytes: it.transmittedBytes.first(), - writeBytes: it.writeBytes.first(), - readBytes: it.readBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - } - - return list - } -} diff --git a/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy b/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy deleted file mode 100644 index c339519..0000000 --- a/src/main/groovy/biz/nellemann/hmci/ManagedSystem.groovy +++ /dev/null @@ -1,256 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import groovy.util.logging.Slf4j - - -@Slf4j -class ManagedSystem extends MetaSystem { - - public final String hmcId - public final String id - public final String name - public final String type - public final String model - public final String serialNumber - - - ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) { - this.hmcId = hmcId - this.id = id - this.name = name - this.type = type - this.model = model - this.serialNumber = serialNumber - } - - String toString() { - return "[${id}] ${name} (${type}-${model} ${serialNumber})" - } - - - List getMemoryMetrics() { - - List list = new ArrayList<>() - Map map = new HashMap() - - HashMap tagsMap = [ - system: name, - ] - map.put("tags", tagsMap) - log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - totalMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(), - availableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(), - configurableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(), - assignedMemToLpars: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(), - ] - map.put("fields", fieldsMap) - log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - return list - } - - - List getProcessorMetrics() { - - List list = new ArrayList<>() - Map map = new HashMap() - - HashMap tagsMap = [ - system: name, - ] - map.put("tags", tagsMap) - log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.totalProcUnits?.first(), - utilizedProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.utilizedProcUnits?.first(), - availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.availableProcUnits?.first(), - configurableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.configurableProcUnits?.first(), - ] - map.put("fields", fieldsMap) - log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()) - - list.add(map) - return list - } - - - List getSharedProcessorPools() { - - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.serverUtil?.sharedProcessorPool?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - system: name, - pool: it.name, - ] - map.put("tags", tagsMap) - log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - assignedProcUnits: it.assignedProcUnits.first(), - availableProcUnits: it.availableProcUnits.first(), - ] - map.put("fields", fieldsMap) - log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString()) - - list.add(map) - - } - - return list - } - - - List getSystemSharedAdapters() { - - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each {vios -> - vios.network.sharedAdapters.each { - Map map = new HashMap() - - HashMap tagsMap = [ - system: name, - type: it.type, - vios: vios.name, - ] - map.put("tags", tagsMap) - log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - sentBytes: it.sentBytes.first(), - receivedBytes: it.receivedBytes.first(), - transferredBytes: it.transferredBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString()) - - list.add(map) - } - - } - - return list - } - - - List getSystemFiberChannelAdapters() { - - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> - log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name) - vios.storage?.fiberChannelAdapters?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - id: it.id, - system: name, - wwpn: it.wwpn, - vios: vios.name, - device: it.physicalLocation, - ] - map.put("tags", tagsMap) - log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - writeBytes: it.writeBytes.first(), - readBytes: it.readBytes.first(), - transmittedBytes: it.transmittedBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString()) - - list.add(map) - } - - } - - return list - } - - - List getSystemGenericPhysicalAdapters() { - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> - vios.storage?.genericPhysicalAdapters?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - id: it.id, - system: name, - vios: vios.name, - device: it.physicalLocation, - ] - map.put("tags", tagsMap) - log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - writeBytes: it.writeBytes.first(), - readBytes: it.readBytes.first(), - transmittedBytes: it.transmittedBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString()) - - list.add(map) - - } - - } - - return list - } - - - List getSystemGenericVirtualAdapters() { - List list = new ArrayList<>() - metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios -> - vios.storage?.genericVirtualAdapters?.each { - Map map = new HashMap() - - HashMap tagsMap = [ - id: it.id, - system: name, - vios: vios.name, - device: it.physicalLocation, - ] - map.put("tags", tagsMap) - log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString()) - - HashMap fieldsMap = [ - writeBytes: it.writeBytes.first(), - readBytes: it.readBytes.first(), - transmittedBytes: it.transmittedBytes.first(), - ] - map.put("fields", fieldsMap) - log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString()) - - list.add(map) - - } - - } - - return list - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy b/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy deleted file mode 100644 index 065a780..0000000 --- a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Copyright 2020 Mark Nellemann - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package biz.nellemann.hmci - -import biz.nellemann.hmci.pcm.PcmData -import groovy.json.JsonSlurper -import groovy.util.logging.Slf4j - -import java.time.Instant -import java.time.format.DateTimeFormatter -import java.time.format.DateTimeParseException - -@Slf4j -abstract class MetaSystem { - - protected PcmData metrics - - void processMetrics(String json) { - def pcmMap = new JsonSlurper().parseText(json) - metrics = new PcmData(pcmMap as Map) - } - - - Instant getTimestamp() { - - String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp - Instant instant - try { - log.debug("getTimeStamp() - PMC Timestamp: " + timestamp) - DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]"); - instant = Instant.from(dateTimeFormatter.parse(timestamp)) - log.debug("getTimestamp() - Instant: " + instant.toString()) - } catch(DateTimeParseException e) { - log.warn("getTimestamp() - parse error: " + timestamp) - } - - return instant ?: Instant.now() - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/FiberChannelAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/FiberChannelAdapter.groovy deleted file mode 100644 index f8612f8..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/FiberChannelAdapter.groovy +++ /dev/null @@ -1,19 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class FiberChannelAdapter { - - String id - String wwpn - String physicalLocation - Integer numOfPorts - List numOfReads - List numOfWrites - List readBytes - List writeBytes - List runningSpeed - List transmittedBytes - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/GenericAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/GenericAdapter.groovy deleted file mode 100644 index d8a34d1..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/GenericAdapter.groovy +++ /dev/null @@ -1,18 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class GenericAdapter { - - String id - String type - String physicalLocation - List receivedPackets - List sentPackets - List droppedPackets - List sentBytes - List receivedBytes - List transferredBytes - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.groovy deleted file mode 100644 index 9fc2539..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.groovy +++ /dev/null @@ -1,17 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class GenericPhysicalAdapters { - - String id - String type - String physicalLocation - List numOfReads - List numOfWrites - List readBytes - List writeBytes - List transmittedBytes - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/GenericVirtualAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/GenericVirtualAdapter.groovy deleted file mode 100644 index 1a2334e..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/GenericVirtualAdapter.groovy +++ /dev/null @@ -1,18 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class GenericVirtualAdapter { - - String id - String type - Integer viosId - String physicalLocation - List numOfReads - List numOfWrites - List readBytes - List writeBytes - List transmittedBytes - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/LparMemory.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/LparMemory.groovy deleted file mode 100644 index 4865447..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/LparMemory.groovy +++ /dev/null @@ -1,11 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class LparMemory { - - List logicalMem - List backedPhysicalMem - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/LparProcessor.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/LparProcessor.groovy deleted file mode 100644 index 50f773b..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/LparProcessor.groovy +++ /dev/null @@ -1,23 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class LparProcessor { - - Integer poolId - Integer weight - String mode - List maxVirtualProcessors - List currentVirtualProcessors - List maxProcUnits - List entitledProcUnits - List utilizedProcUnits - List utilizedCappedProcUnits - List utilizedUncappedProcUnits - List idleProcUnits - List donatedProcUnits - List timeSpentWaitingForDispatch - List timePerInstructionExecution - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/LparUtil.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/LparUtil.groovy deleted file mode 100644 index 73ed269..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/LparUtil.groovy +++ /dev/null @@ -1,21 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class LparUtil { - - Integer id - String uuid - String name - String state - String type - String osType - Integer affinityScore - - LparMemory memory - LparProcessor processor - Network network - Storage storage - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/Network.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/Network.groovy deleted file mode 100644 index 3913083..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/Network.groovy +++ /dev/null @@ -1,10 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class Network { - List genericAdapters - List sharedAdapters - List virtualEthernetAdapters -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/PcmData.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/PcmData.groovy deleted file mode 100644 index 986c54c..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/PcmData.groovy +++ /dev/null @@ -1,10 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class PcmData { - - SystemUtil systemUtil - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/PhysicalProcessorPool.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/PhysicalProcessorPool.groovy deleted file mode 100644 index 85d6f48..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/PhysicalProcessorPool.groovy +++ /dev/null @@ -1,14 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class PhysicalProcessorPool { - - List assignedProcUnits - List utilizedProcUnits - List availableProcUnits - List configuredProcUnits - List borrowedProcUnits - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/SampleInfo.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/SampleInfo.groovy deleted file mode 100644 index 10bb780..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/SampleInfo.groovy +++ /dev/null @@ -1,11 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class SampleInfo { - - String timeStamp - Integer status - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/ServerMemory.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/ServerMemory.groovy deleted file mode 100644 index a293af4..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/ServerMemory.groovy +++ /dev/null @@ -1,13 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class ServerMemory { - - List totalMem - List availableMem - List configurableMem - List assignedMemToLpars - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/ServerProcessor.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/ServerProcessor.groovy deleted file mode 100644 index b03a367..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/ServerProcessor.groovy +++ /dev/null @@ -1,13 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class ServerProcessor { - - List totalProcUnits - List utilizedProcUnits - List availableProcUnits - List configurableProcUnits - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/ServerUtil.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/ServerUtil.groovy deleted file mode 100644 index 9f184a1..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/ServerUtil.groovy +++ /dev/null @@ -1,13 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class ServerUtil { - - ServerProcessor processor - ServerMemory memory - PhysicalProcessorPool physicalProcessorPool - List sharedProcessorPool - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/SharedAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/SharedAdapter.groovy deleted file mode 100644 index 0b9ab69..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/SharedAdapter.groovy +++ /dev/null @@ -1,19 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class SharedAdapter { - - String id - String type - String physicalLocation - List receivedPackets - List sentPackets - List droppedPackets - List sentBytes - List receivedBytes - List transferredBytes - List bridgedAdapters - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/SharedProcessorPool.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/SharedProcessorPool.groovy deleted file mode 100644 index 2a0d5e9..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/SharedProcessorPool.groovy +++ /dev/null @@ -1,16 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class SharedProcessorPool { - - String id - String name - List assignedProcUnits - List utilizedProcUnits - List availableProcUnits - List configuredProcUnits - List borrowedProcUnits - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/Storage.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/Storage.groovy deleted file mode 100644 index 150152a..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/Storage.groovy +++ /dev/null @@ -1,14 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class Storage { - - List clientLpars - List genericPhysicalAdapters - List genericVirtualAdapters - List fiberChannelAdapters - List virtualFiberChannelAdapters - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/SystemUtil.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/SystemUtil.groovy deleted file mode 100644 index 707b706..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/SystemUtil.groovy +++ /dev/null @@ -1,11 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class SystemUtil { - - UtilInfo utilInfo - List utilSamples - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/UtilInfo.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/UtilInfo.groovy deleted file mode 100644 index c30d1cf..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/UtilInfo.groovy +++ /dev/null @@ -1,18 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class UtilInfo { - - String version - String metricType - Integer frequency - String startTimeStamp - String endTimeStamp - String mtms - String name - String uuid - List metricArrayOrder - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/UtilSample.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/UtilSample.groovy deleted file mode 100644 index b595aad..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/UtilSample.groovy +++ /dev/null @@ -1,14 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class UtilSample { - - String sampleType - SampleInfo sampleInfo - ServerUtil serverUtil - List viosUtil - List lparsUtil - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/ViosUtil.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/ViosUtil.groovy deleted file mode 100644 index 2873d6e..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/ViosUtil.groovy +++ /dev/null @@ -1,24 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class ViosUtil { - - String id - String uuid - String name - String state - Integer affinityScore - - Memory memory - LparProcessor processor - Network network - Storage storage - - class Memory { - List assignedMem - List utilizedMem - } - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.groovy deleted file mode 100644 index 4930a73..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.groovy +++ /dev/null @@ -1,27 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class VirtualEthernetAdapter { - - String physicalLocation - Integer vlanId - Integer vswitchId - Boolean isPortVlanId - Integer viosId - String sharedEthernetAdapterId - List receivedPackets - List sentPackets - List droppedPackets - List sentBytes - List receivedBytes - List receivedPhysicalPackets - List sentPhysicalPackets - List droppedPhysicalPackets - List sentPhysicalBytes - List receivedPhysicalBytes - List transferredBytes - List transferredPhysicalBytes - -} diff --git a/src/main/groovy/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.groovy b/src/main/groovy/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.groovy deleted file mode 100644 index 380c251..0000000 --- a/src/main/groovy/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.groovy +++ /dev/null @@ -1,20 +0,0 @@ -package biz.nellemann.hmci.pcm - -import groovy.transform.ToString - -@ToString -class VirtualFiberChannelAdapter { - - String wwpn - String wwpn2 - String physicalLocation - String physicalPortWWPN - Integer viosId - List numOfReads - List numOfWrites - List readBytes - List writeBytes - List runningSpeed - List transmittedBytes - -} diff --git a/src/main/java/biz/nellemann/hmci/Configuration.java b/src/main/java/biz/nellemann/hmci/Configuration.java new file mode 100644 index 0000000..967c026 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Configuration.java @@ -0,0 +1,186 @@ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.tomlj.Toml; +import org.tomlj.TomlParseResult; +import org.tomlj.TomlTable; + +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; + +public class Configuration { + + private final static Logger log = LoggerFactory.getLogger(Configuration.class); + + final public Long refresh; + final public Long rescan; + final public InfluxObject influx; + final public List hmc; + + Configuration(String configurationFile) throws IOException { + + Path source = Paths.get(configurationFile); + TomlParseResult result = Toml.parse(source); + result.errors().forEach(error -> System.err.println(error.toString())); + + if(result.contains("refresh")) { + refresh = result.getLong("refresh"); + } else { + refresh = 15L; + } + + if(result.contains("rescan")) { + rescan = result.getLong("rescan"); + } else { + rescan = 60L; + } + + hmc = getHmc(result); + influx = getInflux(result); + + } + + + List getHmc(TomlParseResult result) { + + ArrayList list = new ArrayList<>(); + + if(result.contains("hmc") && result.isTable("hmc")) { + TomlTable hmcTable = result.getTable("hmc"); + if(hmcTable == null) { + return list; + } + for(String key : hmcTable.keySet()) { + + HmcObject c = new HmcObject(); + c.name = key; + + if(hmcTable.contains(key+".url")) { + c.url = hmcTable.getString(key+".url"); + } + + if(hmcTable.contains(key+".username")) { + c.username = hmcTable.getString(key+".username"); + } + + if(hmcTable.contains(key+".password")) { + c.password = hmcTable.getString(key+".password"); + } + + if(hmcTable.contains(key+".unsafe")) { + c.unsafe = hmcTable.getBoolean(key+".unsafe"); + } else { + c.unsafe = false; + } + + list.add(c); + } + } + + return list; + } + + + InfluxObject getInflux(TomlParseResult result) { + + InfluxObject c = new InfluxObject(); + + if(result.contains("influx")) { + TomlTable t = result.getTable("influx"); + + if(t != null && t.contains("url")) { + c.url = t.getString("url"); + } + + if(t != null && t.contains("username")) { + c.username = t.getString("username"); + } + + if(t != null && t.contains("password")) { + c.password = t.getString("password"); + } + + if(t != null && t.contains("database")) { + c.database = t.getString("database"); + } + + } + + return c; + } + + + static class InfluxObject { + + String url = "http://localhost:8086"; + String username = "root"; + String password = ""; + String database = "hmci"; + + private boolean validated = false; + + InfluxObject() { } + + InfluxObject(String url, String username, String password, String database) { + this.url = url; + this.username = username; + this.password = password; + this.database = database; + } + + Boolean isValid() { + return validated; + } + + // TODO: Fixme + void validate() { + validated = true; + } + + @Override + public String toString() { + return url; + } + } + + + static class HmcObject { + + String name; + String url; + String username; + String password; + Boolean unsafe = false; + + private boolean validated = false; + + HmcObject() { } + + HmcObject(String url, String username, String password, Boolean unsafe) { + this.url = url; + this.username = username; + this.password = password; + this.unsafe = unsafe; + } + + + Boolean isValid() { + return validated; + } + + // TODO: Fixme + void validate() { + validated = true; + } + + @Override + public String toString() { + return name; + } + } + +} diff --git a/src/main/java/biz/nellemann/hmci/HmcClient.java b/src/main/java/biz/nellemann/hmci/HmcClient.java new file mode 100644 index 0000000..453509f --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/HmcClient.java @@ -0,0 +1,431 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import biz.nellemann.hmci.Configuration.HmcObject; +import okhttp3.*; +import org.jsoup.Jsoup; +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; +import org.jsoup.select.Elements; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.*; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.security.SecureRandom; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +class HmcClient { + + private final static Logger log = LoggerFactory.getLogger(HmcClient.class); + + private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest"); + + private final String hmcId; + private final String baseUrl; + private final String username; + private final String password; + + protected Integer responseErrors = 0; + protected String authToken; + private final OkHttpClient client; + + + HmcClient(HmcObject configHmc) { + + this.hmcId = configHmc.name; + this.baseUrl = configHmc.url; + this.username = configHmc.username; + this.password = configHmc.password; + Boolean unsafe = configHmc.unsafe; + + if(unsafe) { + this.client = getUnsafeOkHttpClient(); + } else { + this.client = new OkHttpClient(); + } + + } + + + /** + * Logon to the HMC and get an authentication token for further requests. + */ + void login() throws Exception { + this.login(false); + } + + + /** + * Logon to the HMC and get an authentication token for further requests. + * @param force + */ + void login(Boolean force) throws Exception { + + if(authToken != null && !force) { + return; + } + + log.info("Connecting to HMC - " + baseUrl); + + StringBuilder payload = new StringBuilder(); + payload.append(""); + payload.append(""); + payload.append("").append(username).append(""); + payload.append("").append(password).append(""); + payload.append(""); + + try { + URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl)); + Request request = new Request.Builder() + .url(url) + //.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest") + .addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse") + .addHeader("X-Audit-Memento", "hmci") + .put(RequestBody.create(payload.toString(), MEDIA_TYPE_IBM_XML_LOGIN)) + .build(); + + Response response = client.newCall(request).execute(); + if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); + + // Get response body and parse + String responseBody = Objects.requireNonNull(response.body()).string(); + Objects.requireNonNull(response.body()).close(); + + Document doc = Jsoup.parse(responseBody); + authToken = doc.select("X-API-Session").text(); + + log.debug("login() - Auth Token: " + authToken); + } catch (MalformedURLException e) { + log.error("login() - url error", e); + throw new Exception(new Throwable("Login URL Error: " + e.getMessage())); + } catch(Exception e) { + log.error("login() - general error", e); + throw new Exception(new Throwable("Login General Error: " + e.getMessage())); + } + + } + + + + /** + * Logoff from the HMC and remove any session + * + */ + void logoff() throws IOException { + + if(authToken == null) { + return; + } + + URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl)); + Request request = new Request.Builder() + .url(absUrl) + .addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest") + .addHeader("X-API-Session", authToken) + .delete() + .build(); + + Response response = client.newCall(request).execute(); + if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); + + authToken = null; + log.debug("logoff()"); + } + + + + /** + * Return Map of ManagedSystems seen by this HMC + * + * @return Map of system-id and ManagedSystem + */ + Map getManagedSystems() throws Exception { + + URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl)); + Response response = getResponse(url); + String responseBody = Objects.requireNonNull(response.body()).string(); + Map managedSystemsMap = new HashMap<>(); + + // Do not try to parse empty response + if(responseBody.isEmpty() || responseBody.length() <= 1) { + responseErrors++; + return managedSystemsMap; + } + + try { + Document doc = Jsoup.parse(responseBody); + Elements managedSystems = doc.select("ManagedSystem|ManagedSystem"); // doc.select("img[src$=.png]"); + for(Element el : managedSystems) { + ManagedSystem system = new ManagedSystem( + hmcId, + el.select("Metadata > Atom > AtomID").text(), + el.select("SystemName").text(), + el.select("MachineTypeModelAndSerialNumber > MachineType").text(), + el.select("MachineTypeModelAndSerialNumber > Model").text(), + el.select("MachineTypeModelAndSerialNumber > SerialNumber").text() + ); + managedSystemsMap.put(system.id, system); + log.debug("getManagedSystems() - Found system: " + system.toString()); + } + + } catch(Exception e) { + log.warn("getManagedSystems() - xml parse error", e); + } + + return managedSystemsMap; + } + + + + /** + * Return Map of LogicalPartitions seen by a ManagedSystem on this HMC + * @param system a valid ManagedSystem + * @return Map of partition-id and LogicalPartition + */ + Map getLogicalPartitionsForManagedSystem(ManagedSystem system) throws Exception { + URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id)); + Response response = getResponse(url); + String responseBody = Objects.requireNonNull(response.body()).string(); + Map partitionMap = new HashMap() {}; + + // Do not try to parse empty response + if(responseBody.isEmpty() || responseBody.length() <= 1) { + responseErrors++; + return partitionMap; + } + + try { + Document doc = Jsoup.parse(responseBody); + Elements logicalPartitions = doc.select("LogicalPartition|LogicalPartition"); // doc.select("img[src$=.png]"); + for(Element el : logicalPartitions) { + LogicalPartition logicalPartition = new LogicalPartition( + el.select("PartitionUUID").text(), + el.select("PartitionName").text(), + el.select("PartitionType").text(), + system + ); + partitionMap.put(logicalPartition.id, logicalPartition); + log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition.toString()); + } + + } catch(Exception e) { + log.warn("getLogicalPartitionsForManagedSystem() - xml parse error", e); + } + + return partitionMap; + } + + + + /** + * Parse XML feed to get PCM Data in JSON format + * @param system a valid ManagedSystem + * @return JSON string with PCM data for this ManagedSystem + */ + String getPcmDataForManagedSystem(ManagedSystem system) throws Exception { + + log.debug("getPcmDataForManagedSystem() - " + system.id); + URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id)); + Response response = getResponse(url); + String responseBody = Objects.requireNonNull(response.body()).string(); + String jsonBody = null; + + // Do not try to parse empty response + if(responseBody.isEmpty() || responseBody.length() <= 1) { + responseErrors++; + log.warn("getPcmDataForManagedSystem() - empty response"); + return null; + } + + try { + Document doc = Jsoup.parse(responseBody); + Element entry = doc.select("feed > entry").first(); + Element link = entry.select("link[href]").first(); + + if(link.attr("type").equals("application/json")) { + String href = link.attr("href"); + log.debug("getPcmDataForManagedSystem() - json url: " + href); + jsonBody = getResponseBody(new URL(href)); + } + + } catch(Exception e) { + log.warn("getPcmDataForManagedSystem() - xml parse error", e); + } + + return jsonBody; + } + + + /** + * Parse XML feed to get PCM Data in JSON format + * @param partition a valid LogicalPartition + * @return JSON string with PCM data for this LogicalPartition + */ + String getPcmDataForLogicalPartition(LogicalPartition partition) throws Exception { + + log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id)); + URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id)); + Response response = getResponse(url); + String responseBody = Objects.requireNonNull(response.body()).string(); + String jsonBody = null; + + // Do not try to parse empty response + if(responseBody.isEmpty() || responseBody.length() <= 1) { + responseErrors++; + log.warn("getPcmDataForLogicalPartition() - empty response"); + return null; + } + + try { + Document doc = Jsoup.parse(responseBody); + Element entry = doc.select("feed > entry").first(); + Element link = entry.select("link[href]").first(); + + if(link.attr("type").equals("application/json")) { + String href = link.attr("href"); + log.debug("getPcmDataForLogicalPartition() - json url: " + href); + jsonBody = getResponseBody(new URL(href)); + } + + } catch(Exception e) { + log.warn("getPcmDataForLogicalPartition() - xml parse error", e); + } + + return jsonBody; + } + + + /** + * Return body text from a HTTP response from the HMC + * + * @param url URL to get response body as String + * @return String with http reponse body + */ + protected String getResponseBody(URL url) throws Exception { + Response response = getResponse(url); + String body = Objects.requireNonNull(response.body()).string(); + Objects.requireNonNull(response.body()).close(); + return body; + } + + + /** + * Return a Response from the HMC + * @param url to get Response from + * @return Response object + */ + private Response getResponse(URL url) throws Exception { + return getResponse(url, 0); + } + + + /** + * Return a Response from the HMC + * @param url to get Response from + * @param retry number of retries for this call + * @return Response object + */ + private Response getResponse(URL url, Integer retry) throws Exception { + + log.debug("getResponse() - " + url.toString()); + + if(responseErrors > 2) { + responseErrors = 0; + login(true); + return getResponse(url, retry++); + } + + Request request = new Request.Builder() + .url(url) + .addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") + .addHeader("X-API-Session", authToken) + .get() + .build(); + + Response response = client.newCall(request).execute(); + if (!response.isSuccessful()) { + Objects.requireNonNull(response.body()).close(); + + if(response.code() == 401) { + login(true); + return getResponse(url, retry++); + } + + if(retry < 2) { + log.warn("getResponse() - Retrying due to unexpected response: " + response.code()); + return getResponse(url, retry++); + } + + log.error("getResponse() - Unexpected response: " + response.code()); + throw new IOException("getResponse() - Unexpected response: " + response.code()); + } + + return response; + } + + + + /** + * Provide an unsafe (ignoring SSL problems) OkHttpClient + * + * @return + */ + private static OkHttpClient getUnsafeOkHttpClient() { + try { + // Create a trust manager that does not validate certificate chains + final TrustManager[] trustAllCerts = new TrustManager[] { + new X509TrustManager() { + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return new X509Certificate[]{}; + } + } + }; + + // Install the all-trusting trust manager + final SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(null, trustAllCerts, new SecureRandom()); + + // Create an ssl socket factory with our all-trusting manager + final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); + + OkHttpClient.Builder builder = new OkHttpClient.Builder(); + builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]); + builder.hostnameVerifier((hostname, session) -> true); + + return builder.build(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + +} diff --git a/src/main/java/biz/nellemann/hmci/InfluxClient.java b/src/main/java/biz/nellemann/hmci/InfluxClient.java new file mode 100644 index 0000000..39c3519 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/InfluxClient.java @@ -0,0 +1,297 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import biz.nellemann.hmci.Configuration.InfluxObject; +import org.influxdb.BatchOptions; +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBFactory; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +class InfluxClient { + + private final static Logger log = LoggerFactory.getLogger(InfluxClient.class); + + final private String url; + final private String username; + final private String password; + final private String database; + + private InfluxDB influxDB; + private BatchPoints batchPoints; + + + InfluxClient(InfluxObject config) { + this.url = config.url; + this.username = config.username; + this.password = config.password; + this.database = config.database; + } + + + void login() throws Exception { + + if(influxDB != null) { + return; + } + + try { + log.info("Connecting to InfluxDB - " + url); + influxDB = InfluxDBFactory.connect(url, username, password); + createDatabase(); + + // Enable batch writes to get better performance. + //BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500); + influxDB.enableBatch(BatchOptions.DEFAULTS); + //influxDB.setLogLevel(InfluxDB.LogLevel.BASIC); + + batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); + + } catch(Exception e) { + log.error(e.getMessage()); + throw new Exception(e); + } + } + + + void logoff() { + if(influxDB != null) { + influxDB.close(); + } + influxDB = null; + } + + + void createDatabase() { + // Create our database... with a default retention of 156w == 3 years + influxDB.query(new Query("CREATE DATABASE " + database + " WITH DURATION 156w")); + influxDB.setDatabase(database); + } + + + void writeBatchPoints() throws Exception { + log.debug("writeBatchPoints()"); + try { + influxDB.write(batchPoints); + } catch(Exception e) { + log.error("writeBatchPoints() error - " + e.getMessage()); + logoff(); + login(); + } + } + + + + /* + Managed System + */ + + + void writeManagedSystem(ManagedSystem system) { + + if(system.metrics == null) { + log.warn("writeManagedSystem() - null metrics, skipping"); + return; + } + + Instant timestamp = system.getTimestamp(); + if(timestamp == null) { + log.warn("writeManagedSystem() - no timestamp, skipping"); + return; + } + + //BatchPoints batchPoints = BatchPoints.database(database).build(); + + getSystemMemory(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemProcessor(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemSharedProcessorPools(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemSharedAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemFiberChannelAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> { + batchPoints.point(it); + }); + } + + + private static List getSystemMemory(ManagedSystem system, Instant timestamp) { + List metrics = system.getMemoryMetrics(); + return processMeasurementMap(metrics, timestamp, "SystemMemory"); + } + + private static List getSystemProcessor(ManagedSystem system, Instant timestamp) { + List metrics = system.getProcessorMetrics(); + return processMeasurementMap(metrics, timestamp, "SystemProcessor"); + } + + private static List getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) { + List metrics = system.getSharedProcessorPools(); + return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool"); + } + + private static List getSystemSharedAdapters(ManagedSystem system, Instant timestamp) { + List metrics = system.getSystemSharedAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters"); + } + + private static List getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) { + List metrics = system.getSystemFiberChannelAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters"); + } + + private static List getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) { + List metrics = system.getSystemGenericPhysicalAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters"); + } + + private static List getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) { + List metrics = system.getSystemGenericVirtualAdapters(); + return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters"); + } + + + /* + Logical Partitions + */ + + void writeLogicalPartition(LogicalPartition partition) { + + if(partition.metrics == null) { + log.warn("writeLogicalPartition() - null metrics, skipping"); + return; + } + + Instant timestamp = partition.getTimestamp(); + if(timestamp == null) { + log.warn("writeLogicalPartition() - no timestamp, skipping"); + return; + } + + //BatchPoints batchPoints = BatchPoints.database(database).build(); + + getPartitionAffinityScore(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getPartitionMemory(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getPartitionProcessor(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> { + batchPoints.point(it); + }); + + //influxDB.write(batchPoints); + } + + private static List getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) { + List metrics = partition.getAffinityScore(); + return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore"); + } + + private static List getPartitionMemory(LogicalPartition partition, Instant timestamp) { + List metrics = partition.getMemoryMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionMemory"); + } + + private static List getPartitionProcessor(LogicalPartition partition, Instant timestamp) { + List metrics = partition.getProcessorMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionProcessor"); + } + + private static List getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) { + List metrics = partition.getVirtualEthernetAdapterMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters"); + } + + private static List getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) { + List metrics = partition.getVirtualFiberChannelAdaptersMetrics(); + return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters"); + } + + + + /* + Shared + */ + + private static List processMeasurementMap(List measurements, Instant timestamp, String measurement) { + + List listOfPoints = new ArrayList<>(); + measurements.forEach( m -> { + + // Iterate fields + //Map fieldsMap = m.get("fields"); + m.fields.forEach((fieldName, fieldValue) -> { + log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue); + + Point.Builder builder = Point.measurement(measurement) + .time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS) + .tag("name", fieldName) + .addField("value", fieldValue); + + // For each field, we add all tags + //Map tagsMap = m.get("tags"); + m.tags.forEach((tagName, tagValue) -> { + builder.tag(tagName, tagValue); + log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue); + }); + + listOfPoints.add(builder.build()); + }); + + }); + + return listOfPoints; + } + + +} diff --git a/src/main/java/biz/nellemann/hmci/Insights.java b/src/main/java/biz/nellemann/hmci/Insights.java new file mode 100644 index 0000000..fa02b8b --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Insights.java @@ -0,0 +1,194 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static java.lang.Thread.*; + +class Insights { + + private final static Logger log = LoggerFactory.getLogger(Insights.class); + + final Configuration configuration; + + InfluxClient influxClient; + final Map hmcClients = new HashMap<>(); + final Map systems = new HashMap<>(); + final Map partitions = new HashMap<>(); + + + Insights(Configuration configuration) { + this.configuration = configuration; + + try { + influxClient = new InfluxClient(configuration.influx); + influxClient.login(); + } catch(Exception e) { + System.exit(1); + } + + // Initial scan + discover(); + } + + + void discover() { + + configuration.hmc.forEach( configHmc -> { + if(hmcClients != null && !hmcClients.containsKey(configHmc.name)) { + HmcClient hmcClient = new HmcClient(configHmc); + hmcClients.put(configHmc.name, hmcClient); + } + }); + + hmcClients.forEach(( hmcId, hmcClient) -> { + + try { + hmcClient.login(); + hmcClient.getManagedSystems().forEach((systemId, system) -> { + + // Add to list of known systems + systems.putIfAbsent(systemId, system); + + // Get LPAR's for this system + try { + hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> { + + // Add to list of known partitions + partitions.putIfAbsent(partitionId, partition); + }); + } catch (Exception e) { + log.error("discover()", e); + } + + }); + + } catch(Exception e) { + log.error("discover() - " + hmcId + " error: " + e.getMessage()); + } + + }); + + } + + + void getMetricsForSystems() { + + systems.forEach((systemId, system) -> { + + HmcClient hmcClient = hmcClients.get(system.hmcId); + + // Get and process metrics for this system + String tmpJsonString = null; + try { + tmpJsonString = hmcClient.getPcmDataForManagedSystem(system); + } catch (Exception e) { + log.error("getMetricsForSystems()", e); + } + + if(tmpJsonString != null && !tmpJsonString.isEmpty()) { + system.processMetrics(tmpJsonString); + } + + }); + + } + + + void getMetricsForPartitions() { + + try { + + // Get LPAR's for this system + partitions.forEach((partitionId, partition) -> { + + HmcClient hmcClient = hmcClients.get(partition.system.hmcId); + + // Get and process metrics for this partition + String tmpJsonString2 = null; + try { + tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition); + } catch (Exception e) { + log.error("getMetricsForPartitions() - getPcmDataForLogicalPartition", e); + } + if(tmpJsonString2 != null && !tmpJsonString2.isEmpty()) { + partition.processMetrics(tmpJsonString2); + } + + }); + + } catch(Exception e) { + log.error("getMetricsForPartitions()", e); + } + } + + + void writeMetricsForManagedSystems() { + systems.forEach((systemId, system) -> { + influxClient.writeManagedSystem(system); + }); + } + + + void writeMetricsForLogicalPartitions() { + partitions.forEach((partitionId, partition) -> { + influxClient.writeLogicalPartition(partition); + }); + } + + + void run() throws InterruptedException { + + log.debug("run()"); + int executions = 0; + AtomicBoolean keepRunning = new AtomicBoolean(true); + + Thread shutdownHook = new Thread(() -> keepRunning.set(false)); + Runtime.getRuntime().addShutdownHook(shutdownHook); + + do { + + try { + getMetricsForSystems(); + getMetricsForPartitions(); + + writeMetricsForManagedSystems(); + writeMetricsForLogicalPartitions(); + influxClient.writeBatchPoints(); + + // Refresh HMC's + if (executions > configuration.rescan) { + executions = 0; + discover(); + } + } catch (Exception e) { + log.error("run()", e); + } + + executions++; + sleep(configuration.refresh * 1000); + + } while (keepRunning.get()); + + } + +} diff --git a/src/main/java/biz/nellemann/hmci/LogicalPartition.java b/src/main/java/biz/nellemann/hmci/LogicalPartition.java new file mode 100644 index 0000000..3e64f45 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/LogicalPartition.java @@ -0,0 +1,225 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class LogicalPartition extends MetaSystem { + + private final static Logger log = LoggerFactory.getLogger(LogicalPartition.class); + + public final String id; + public final String name; + public final String type; + public final ManagedSystem system; + + + LogicalPartition(String id, String name, String type, ManagedSystem system) { + this.id = id; + this.name = name; + this.type = type; + this.system = system; + } + + + public String toString() { + return String.format("[%s] %s (%s)", id, name, type); + } + + + List getAffinityScore() { + + List list = new ArrayList<>(); + //Map map = new HashMap() + + Map tagsMap = new HashMap() { + { + put("system", system.name); + put("partition", name); + } + }; + + //map.put("tags", tagsMap) + log.debug("getAffinityScore() - tags: " + tagsMap.toString()); + Map fieldsMap = new HashMap() { + { + put("affinityScore", metrics.systemUtil.sample.lparsUtil.affinityScore); + } + }; + + //map.put("fields", fieldsMap) + log.debug("getAffinityScore() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + return list; + } + + + List getMemoryMetrics() { + + List list = new ArrayList<>(); + //Map map = new HashMap() + + Map tagsMap = new HashMap() { + { + put("system", system.name); + put("partition", name); + } + }; + + //map.put("tags", tagsMap) + log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()); + + Map fieldsMap = new HashMap() { + { + put("logicalMem", metrics.systemUtil.sample.lparsUtil.memory.logicalMem); + put("backedPhysicalMem", metrics.systemUtil.sample.lparsUtil.memory.backedPhysicalMem); + } + }; + + + //map.put("fields", fieldsMap) + log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + + return list; + } + + //@CompileDynamic + List getProcessorMetrics() { + + List list = new ArrayList<>(); + //Map map = new HashMap() + + HashMap tagsMap = new HashMap() { + { + put("system", system.name); + put("partition", name); + } + }; + + //map.put("tags", tagsMap) + log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("utilizedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits); + put("maxVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.maxVirtualProcessors); + put("currentVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.currentVirtualProcessors); + //donatedProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.donatedProcUnits.first(), + put("entitledProcUnits", metrics.systemUtil.sample.lparsUtil.processor.entitledProcUnits); + //idleProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.idleProcUnits.first(), + //maxProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxProcUnits.first(), + put("utilizedCappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedCappedProcUnits); + put("utilizedUncappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedUncappedProcUnits); + put("timePerInstructionExecution", metrics.systemUtil.sample.lparsUtil.processor.timeSpentWaitingForDispatch); + put("timeSpentWaitingForDispatch", metrics.systemUtil.sample.lparsUtil.processor.timePerInstructionExecution); + } + }; + + //map.put("fields", fieldsMap) + log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + + return list; + } + + //@CompileDynamic + List getVirtualEthernetAdapterMetrics() { + + List list = new ArrayList<>(); + metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.forEach( adapter -> { + + HashMap tagsMap = new HashMap() { + { + put("system", system.name); + put("partition", name); + put("sea", adapter.sharedEthernetAdapterId); + put("viosId", adapter.viosId.toString()); + put("vlanId", adapter.vlanId.toString()); + put("vswitchId", adapter.vswitchId.toString()); + } + }; + log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("receivedPhysicalBytes", adapter.receivedPhysicalBytes); + put("sentPhysicalBytes", adapter.sentPhysicalBytes); + put("receivedBytes", adapter.receivedBytes); + put("sentBytes", adapter.sentBytes); + } + }; + log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + }); + + return list; + } + + + //PartitionVirtualFiberChannelAdapters + //@CompileDynamic + List getVirtualFiberChannelAdaptersMetrics() { + + List list = new ArrayList<>(); + metrics.systemUtil.sample.lparsUtil.storage.virtualFiberChannelAdapters.forEach( adapter -> { + //Map map = new HashMap() + + HashMap tagsMap = new HashMap() { + { + put("system", system.name); + put("partition", name); + put("viosId", adapter.viosId.toString()); + put("wwpn", adapter.wwpn); + } + }; + + //map.put("tags", tagsMap) + log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("transmittedBytes", adapter.transmittedBytes.get(0)); + put("writeBytes", adapter.writeBytes.get(0)); + put("readBytes", adapter.readBytes.get(0)); + } + }; + + //map.put("fields", fieldsMap) + log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + }); + + return list; + } + +} diff --git a/src/main/java/biz/nellemann/hmci/Main.java b/src/main/java/biz/nellemann/hmci/Main.java new file mode 100644 index 0000000..00e002d --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Main.java @@ -0,0 +1,65 @@ +/* + Copyright 2020 mark.nellemann@gmail.com + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + */ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import picocli.CommandLine; +import picocli.CommandLine.Command; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.Callable; + +@Command(name = "hmci", + mixinStandardHelpOptions = true, + description = "HMC Insights.", + versionProvider = biz.nellemann.hmci.VersionProvider.class) +public class Main implements Callable { + + private final static Logger log = LoggerFactory.getLogger(Main.class); + + @SuppressWarnings("FieldMayBeFinal") + @CommandLine.Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/hmci.toml'].") + private String configurationFile = "/etc/hmci.toml"; + + public static void main(String... args) { + int exitCode = new CommandLine(new Main()).execute(args); + System.exit(exitCode); + } + + + @Override + public Integer call() throws IOException { + + File file = new File(configurationFile); + if(!file.exists()) { + System.err.println("Error - No configuration file found at: " + file.toString()); + return -1; + } + + Configuration configuration = new Configuration(configurationFile); + Insights insights = new Insights(configuration); + try { + insights.run(); + } catch (InterruptedException e) { + log.error(e.getMessage()); + } + + return 0; + } + +} diff --git a/src/main/java/biz/nellemann/hmci/ManagedSystem.java b/src/main/java/biz/nellemann/hmci/ManagedSystem.java new file mode 100644 index 0000000..0a19ebc --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/ManagedSystem.java @@ -0,0 +1,326 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class ManagedSystem extends MetaSystem { + + private final static Logger log = LoggerFactory.getLogger(ManagedSystem.class); + + public final String hmcId; + public final String id; + public final String name; + public final String type; + public final String model; + public final String serialNumber; + + + ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) { + this.hmcId = hmcId; + this.id = id; + this.name = name; + this.type = type; + this.model = model; + this.serialNumber = serialNumber; + } + + public String toString() { + return String.format("[%s] %s (%s-%s %s)", id, name, type, model, serialNumber); + } + + + List getMemoryMetrics() { + + List list = new ArrayList<>(); + //Map map = new HashMap() + + HashMap tagsMap = new HashMap() { + { + put("system", name); + } + }; + + //map.put("tags", tagsMap) + log.debug("getMemoryMetrics() - tags: " + tagsMap.toString()); + + Map fieldsMap = new HashMap() { + { + put("totalMem", metrics.systemUtil.sample.serverUtil.memory.totalMem); + put("availableMem", metrics.systemUtil.sample.serverUtil.memory.availableMem); + put("configurableMem", metrics.systemUtil.sample.serverUtil.memory.configurableMem); + put("assignedMemToLpars", metrics.systemUtil.sample.serverUtil.memory.assignedMemToLpars); + } + }; + + //map.put("fields", fieldsMap) + log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + + return list; + } + + + List getProcessorMetrics() { + + List list = new ArrayList<>(); + //Map map = new HashMap<>() + + HashMap tagsMap = new HashMap() { + { + put("system", name); + } + }; + + //map.put("tags", tagsMap) + //measurement.tags = tagsMap; + log.debug("getProcessorMetrics() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("totalProcUnits", metrics.systemUtil.sample.serverUtil.processor.totalProcUnits); + put("utilizedProcUnits", metrics.systemUtil.sample.serverUtil.processor.utilizedProcUnits); + put("availableProcUnits", metrics.systemUtil.sample.serverUtil.processor.availableProcUnits); + put("configurableProcUnits", metrics.systemUtil.sample.serverUtil.processor.configurableProcUnits); + } + }; + //map.put("fields", fieldsMap) + //measurement.fields = fieldsMap; + log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + + return list; + } + + + List getSharedProcessorPools() { + + List list = new ArrayList<>(); + metrics.systemUtil.sample.serverUtil.sharedProcessorPool.forEach(adapter -> { + //Map map = new HashMap() + + HashMap tagsMap = new HashMap() { + { + put("system", name); + put("pool", adapter.name); + } + }; + + //map.put("tags", tagsMap) + log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("assignedProcUnits", adapter.assignedProcUnits); + put("availableProcUnits", adapter.availableProcUnits); + } + }; + + //map.put("fields", fieldsMap) + log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString()); + + Measurement measurement = new Measurement(tagsMap, fieldsMap); + list.add(measurement); + }); + + return list; + } + + + List getSystemSharedAdapters() { + + List list = new ArrayList<>(); + metrics.systemUtil.sample.viosUtil.forEach(vios -> { + + vios.network.sharedAdapters.forEach(adapter -> { + //Map map = new HashMap() + Measurement measurement = new Measurement(); + + HashMap tagsMap = new HashMap() { + { + put("system", name); + put("type", adapter.type); + put("vios", vios.name); + } + }; + + //map.put("tags", tagsMap) + measurement.tags = tagsMap; + log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("sentBytes", adapter.sentBytes); + put("receivedBytes", adapter.receivedBytes); + put("transferredBytes", adapter.transferredBytes); + } + }; + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; + log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString()); + + list.add(measurement); + }); + + }); + + return list; + } + + + List getSystemFiberChannelAdapters() { + + List list = new ArrayList<>(); + metrics.systemUtil.sample.viosUtil.forEach( vios -> { + log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name); + + vios.storage.fiberChannelAdapters.forEach( adapter -> { + //HashMap map = new HashMap<>() + Measurement measurement = new Measurement(); + + HashMap tagsMap = new HashMap() { + { + put("id", adapter.id); + put("system", name); + put("wwpn", adapter.wwpn); + put("vios", vios.name); + put("device", adapter.physicalLocation); + } + }; + + //map.put("tags", tagsMap) + measurement.tags = tagsMap; + log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("writeBytes", adapter.writeBytes); + put("readBytes", adapter.readBytes); + put("transmittedBytes", adapter.transmittedBytes); + } + }; + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; + log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString()); + + list.add(measurement); + }); + + }); + + return list; + } + + + List getSystemGenericPhysicalAdapters() { + + List list = new ArrayList<>(); + + metrics.systemUtil.sample.viosUtil.forEach( vios -> { + + vios.storage.genericPhysicalAdapters.forEach( adapter -> { + //Map map = new HashMap() + Measurement measurement = new Measurement(); + + HashMap tagsMap = new HashMap() { + { + put("id", adapter.id); + put("system", name); + put("vios", vios.name); + put("device", adapter.physicalLocation); + } + }; + + //map.put("tags", tagsMap) + measurement.tags = tagsMap; + log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("writeBytes", adapter.writeBytes); + put("readBytes", adapter.readBytes); + put("transmittedBytes", adapter.transmittedBytes); + } + }; + + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; + log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString()); + + list.add(measurement); + }); + + }); + + return list; + } + + + List getSystemGenericVirtualAdapters() { + + List list = new ArrayList<>(); + + metrics.systemUtil.sample.viosUtil.forEach( vios -> { + + vios.storage.genericVirtualAdapters.forEach( adapter -> { + + //Map map = new HashMap() + Measurement measurement = new Measurement(); + + HashMap tagsMap = new HashMap() { + { + put("id", adapter.id); + put("system", name); + put("vios", vios.name); + put("device", adapter.physicalLocation); + } + }; + + //map.put("tags", tagsMap) + measurement.tags = tagsMap; + log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString()); + + HashMap fieldsMap = new HashMap() { + { + put("writeBytes", adapter.writeBytes); + put("readBytes", adapter.readBytes); + put("transmittedBytes", adapter.transmittedBytes); + } + }; + + //map.put("fields", fieldsMap) + measurement.fields = fieldsMap; + log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString()); + + list.add(measurement); + }); + + }); + + return list; + } + +} diff --git a/src/main/java/biz/nellemann/hmci/Measurement.java b/src/main/java/biz/nellemann/hmci/Measurement.java new file mode 100644 index 0000000..ad1bff9 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/Measurement.java @@ -0,0 +1,17 @@ +package biz.nellemann.hmci; + +import java.util.Map; + +public class Measurement { + + Map tags; + Map fields; + + Measurement() { + } + + Measurement(Map tags, Map fields) { + this.tags = tags; + this.fields = fields; + } +} diff --git a/src/main/java/biz/nellemann/hmci/MetaSystem.java b/src/main/java/biz/nellemann/hmci/MetaSystem.java new file mode 100644 index 0000000..70a7575 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/MetaSystem.java @@ -0,0 +1,108 @@ +/* + * Copyright 2020 Mark Nellemann + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package biz.nellemann.hmci; + +import biz.nellemann.hmci.pcm.PcmData; +import com.serjltt.moshi.adapters.FirstElement; +import com.squareup.moshi.FromJson; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.Moshi; +import com.squareup.moshi.ToJson; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.math.BigDecimal; +import java.time.Instant; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; + +abstract class MetaSystem { + + private final static Logger log = LoggerFactory.getLogger(MetaSystem.class); + + private final JsonAdapter jsonAdapter; + + protected PcmData metrics; + + MetaSystem() { + try { + Moshi moshi = new Moshi.Builder().add(new NumberAdapter()).add(new BigDecimalAdapter()).add(FirstElement.ADAPTER_FACTORY).build(); + jsonAdapter = moshi.adapter(PcmData.class); + } catch(Exception e) { + log.warn("MetaSystem() error", e); + throw new ExceptionInInitializerError(e); + } + } + + //@CompileDynamic + void processMetrics(String json) { + + try { + metrics = jsonAdapter.fromJson(json); + } catch(Exception e) { + log.warn("processMetrics() error", e); + } + + //Map pcmMap = new JsonSlurper().parseText(json) as Map + //metrics = new PcmData(pcmMap) + } + + //@CompileDynamic + Instant getTimestamp() { + + String timestamp = metrics.systemUtil.sample.sampleInfo.timeStamp; + Instant instant = Instant.now(); + try { + log.debug("getTimeStamp() - PMC Timestamp: " + timestamp); + DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]"); + instant = Instant.from(dateTimeFormatter.parse(timestamp)); + log.debug("getTimestamp() - Instant: " + instant.toString()); + } catch(DateTimeParseException e) { + log.warn("getTimestamp() - parse error: " + timestamp); + } + + return instant; + } + + + static class BigDecimalAdapter { + + @FromJson + BigDecimal fromJson(String string) { + return new BigDecimal(string); + } + + @ToJson + String toJson(BigDecimal value) { + return value.toString(); + } + } + + static class NumberAdapter { + + @FromJson + Number fromJson(String string) { + return new Double(string); + } + + @ToJson + String toJson(Number value) { + return value.toString(); + } + } + +} + diff --git a/src/main/java/biz/nellemann/hmci/VersionProvider.java b/src/main/java/biz/nellemann/hmci/VersionProvider.java new file mode 100644 index 0000000..dcd480f --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/VersionProvider.java @@ -0,0 +1,19 @@ +package biz.nellemann.hmci; + +import picocli.CommandLine; + +import java.io.IOException; +import java.util.jar.Attributes; +import java.util.jar.Manifest; + +class VersionProvider implements CommandLine.IVersionProvider { + + public String[] getVersion() throws IOException { + + Manifest manifest = new Manifest(getClass().getResourceAsStream("/META-INF/MANIFEST.MF")); + Attributes attrs = manifest.getMainAttributes(); + + return new String[] { "${COMMAND-FULL-NAME} " + attrs.getValue("Build-Version") }; + } + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/FiberChannelAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/FiberChannelAdapter.java new file mode 100644 index 0000000..77902b2 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/FiberChannelAdapter.java @@ -0,0 +1,30 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class FiberChannelAdapter { + + public String id; + public String wwpn; + public String physicalLocation; + public Integer numOfPorts; + + @FirstElement + public Number numOfReads; + + @FirstElement + public Number numOfWrites; + + @FirstElement + public Number readBytes; + + @FirstElement + public Number writeBytes; + + @FirstElement + public Number runningSpeed; + + @FirstElement + public Number transmittedBytes; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/GenericAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/GenericAdapter.java new file mode 100644 index 0000000..382512e --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/GenericAdapter.java @@ -0,0 +1,17 @@ +package biz.nellemann.hmci.pcm; + +import java.util.List; + +public class GenericAdapter { + + public String id; + public String type; + public String physicalLocation; + public List receivedPackets; + public List sentPackets; + public List droppedPackets; + public List sentBytes; + public List receivedBytes; + public List transferredBytes; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.java b/src/main/java/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.java new file mode 100644 index 0000000..a3766f3 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/GenericPhysicalAdapters.java @@ -0,0 +1,26 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class GenericPhysicalAdapters { + + public String id; + public String type; + public String physicalLocation; + + @FirstElement + public Number numOfReads; + + @FirstElement + public Number numOfWrites; + + @FirstElement + public Number readBytes; + + @FirstElement + public Number writeBytes; + + @FirstElement + public Number transmittedBytes; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/GenericVirtualAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/GenericVirtualAdapter.java new file mode 100644 index 0000000..9fe11d6 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/GenericVirtualAdapter.java @@ -0,0 +1,28 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + + +public class GenericVirtualAdapter { + + public String id; + public String type; + public Integer viosId; + public String physicalLocation; + + @FirstElement + public Number numOfReads; + + @FirstElement + public Number numOfWrites; + + @FirstElement + public Number readBytes; + + @FirstElement + public Number writeBytes; + + @FirstElement + public Number transmittedBytes; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/LparMemory.java b/src/main/java/biz/nellemann/hmci/pcm/LparMemory.java new file mode 100644 index 0000000..1ab35f0 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/LparMemory.java @@ -0,0 +1,13 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class LparMemory { + + @FirstElement + public Number logicalMem; + + @FirstElement + public Number backedPhysicalMem; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/LparProcessor.java b/src/main/java/biz/nellemann/hmci/pcm/LparProcessor.java new file mode 100644 index 0000000..5ed252a --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/LparProcessor.java @@ -0,0 +1,44 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class LparProcessor { + + public Integer poolId; + public Integer weight; + public String mode; + + @FirstElement + public Number maxVirtualProcessors; + + @FirstElement + public Number currentVirtualProcessors; + + @FirstElement + public Number maxProcUnits; + + @FirstElement + public Number entitledProcUnits; + + @FirstElement + public Number utilizedProcUnits; + + @FirstElement + public Number utilizedCappedProcUnits; + + @FirstElement + public Number utilizedUncappedProcUnits; + + @FirstElement + public Number idleProcUnits; + + @FirstElement + public Number donatedProcUnits; + + @FirstElement + public Number timeSpentWaitingForDispatch; + + @FirstElement + public Number timePerInstructionExecution; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/LparUtil.java b/src/main/java/biz/nellemann/hmci/pcm/LparUtil.java new file mode 100644 index 0000000..9e2f918 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/LparUtil.java @@ -0,0 +1,18 @@ +package biz.nellemann.hmci.pcm; + +public class LparUtil { + + public Integer id; + public String uuid; + public String name; + public String state; + public String type; + public String osType; + public Number affinityScore; + + public LparMemory memory; + public LparProcessor processor; + public Network network = new Network(); + public Storage storage = new Storage(); + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/Network.java b/src/main/java/biz/nellemann/hmci/pcm/Network.java new file mode 100644 index 0000000..dd2df78 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/Network.java @@ -0,0 +1,12 @@ +package biz.nellemann.hmci.pcm; + +import java.util.ArrayList; +import java.util.List; + +public class Network { + + public List genericAdapters = new ArrayList<>(); + public List sharedAdapters = new ArrayList<>(); + public List virtualEthernetAdapters = new ArrayList<>(); + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/PcmData.java b/src/main/java/biz/nellemann/hmci/pcm/PcmData.java new file mode 100644 index 0000000..b3d6d04 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/PcmData.java @@ -0,0 +1,7 @@ +package biz.nellemann.hmci.pcm; + +public class PcmData { + + public SystemUtil systemUtil; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/PhysicalProcessorPool.java b/src/main/java/biz/nellemann/hmci/pcm/PhysicalProcessorPool.java new file mode 100644 index 0000000..09082cd --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/PhysicalProcessorPool.java @@ -0,0 +1,13 @@ +package biz.nellemann.hmci.pcm; + +import java.util.List; + +public class PhysicalProcessorPool { + + public List assignedProcUnits; + public List utilizedProcUnits; + public List availableProcUnits; + public List configuredProcUnits; + public List borrowedProcUnits; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/SampleInfo.java b/src/main/java/biz/nellemann/hmci/pcm/SampleInfo.java new file mode 100644 index 0000000..2665d51 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/SampleInfo.java @@ -0,0 +1,8 @@ +package biz.nellemann.hmci.pcm; + +public class SampleInfo { + + public String timeStamp; + public Integer status; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/ServerMemory.java b/src/main/java/biz/nellemann/hmci/pcm/ServerMemory.java new file mode 100644 index 0000000..cc78c83 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/ServerMemory.java @@ -0,0 +1,19 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class ServerMemory { + + @FirstElement + public Number totalMem; + + @FirstElement + public Number availableMem; + + @FirstElement + public Number configurableMem; + + @FirstElement + public Number assignedMemToLpars; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/ServerProcessor.java b/src/main/java/biz/nellemann/hmci/pcm/ServerProcessor.java new file mode 100644 index 0000000..47ffe57 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/ServerProcessor.java @@ -0,0 +1,19 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class ServerProcessor { + + @FirstElement + public Number totalProcUnits; + + @FirstElement + public Number utilizedProcUnits; + + @FirstElement + public Number availableProcUnits; + + @FirstElement + public Number configurableProcUnits; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/ServerUtil.java b/src/main/java/biz/nellemann/hmci/pcm/ServerUtil.java new file mode 100644 index 0000000..ea6778f --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/ServerUtil.java @@ -0,0 +1,12 @@ +package biz.nellemann.hmci.pcm; + +import java.util.List; + +public class ServerUtil { + + public ServerProcessor processor; + public ServerMemory memory; + public PhysicalProcessorPool physicalProcessorPool; + public List sharedProcessorPool; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/SharedAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/SharedAdapter.java new file mode 100644 index 0000000..5a481e9 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/SharedAdapter.java @@ -0,0 +1,32 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class SharedAdapter { + + public String id; + public String type; + public String physicalLocation; + + @FirstElement + public Number receivedPackets; + + @FirstElement + public Number sentPackets; + + @FirstElement + public Number droppedPackets; + + @FirstElement + public Number sentBytes; + + @FirstElement + public Number receivedBytes; + + @FirstElement + public Number transferredBytes; + + @FirstElement + public String bridgedAdapters; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/SharedProcessorPool.java b/src/main/java/biz/nellemann/hmci/pcm/SharedProcessorPool.java new file mode 100644 index 0000000..6947081 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/SharedProcessorPool.java @@ -0,0 +1,25 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class SharedProcessorPool { + + public String id; + public String name; + + @FirstElement + public Number assignedProcUnits; + + @FirstElement + public Number utilizedProcUnits; + + @FirstElement + public Number availableProcUnits; + + @FirstElement + public Number configuredProcUnits; + + @FirstElement + public Number borrowedProcUnits; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/Storage.java b/src/main/java/biz/nellemann/hmci/pcm/Storage.java new file mode 100644 index 0000000..7dd9240 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/Storage.java @@ -0,0 +1,14 @@ +package biz.nellemann.hmci.pcm; + +import java.util.ArrayList; +import java.util.List; + +public class Storage { + + public List clientLpars = new ArrayList<>(); + public List genericPhysicalAdapters = new ArrayList<>(); + public List genericVirtualAdapters = new ArrayList<>(); + public List fiberChannelAdapters = new ArrayList<>(); + public List virtualFiberChannelAdapters = new ArrayList<>(); + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/SystemUtil.java b/src/main/java/biz/nellemann/hmci/pcm/SystemUtil.java new file mode 100644 index 0000000..783cb93 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/SystemUtil.java @@ -0,0 +1,14 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; +import com.squareup.moshi.Json; + +public class SystemUtil { + + public UtilInfo utilInfo; + + @FirstElement + @Json(name = "utilSamples") + public UtilSample sample; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/UtilInfo.java b/src/main/java/biz/nellemann/hmci/pcm/UtilInfo.java new file mode 100644 index 0000000..8a3271a --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/UtilInfo.java @@ -0,0 +1,17 @@ +package biz.nellemann.hmci.pcm; + +import java.util.List; + +public class UtilInfo { + + public String version; + public String metricType; + public Integer frequency; + public String startTimeStamp; + public String endTimeStamp; + public String mtms; + public String name; + public String uuid; + public List metricArrayOrder; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/UtilSample.java b/src/main/java/biz/nellemann/hmci/pcm/UtilSample.java new file mode 100644 index 0000000..5652644 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/UtilSample.java @@ -0,0 +1,28 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +import java.util.ArrayList; +import java.util.List; + +public class UtilSample { + + public String sampleType; + public SampleInfo sampleInfo; + public ServerUtil serverUtil; + public List viosUtil = new ArrayList<>(); + + @FirstElement + public LparUtil lparsUtil; + + /* + public LparUtil getLparsUtil() { + if(lparsUtil == null || lparsUtil.isEmpty()) { + return new LparUtil(); + } else { + return lparsUtil.get(0); + } + }*/ + + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/ViosMemory.java b/src/main/java/biz/nellemann/hmci/pcm/ViosMemory.java new file mode 100644 index 0000000..8643321 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/ViosMemory.java @@ -0,0 +1,13 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class ViosMemory { + + @FirstElement + public Number assignedMem; + + @FirstElement + public Number utilizedMem; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/ViosUtil.java b/src/main/java/biz/nellemann/hmci/pcm/ViosUtil.java new file mode 100644 index 0000000..178b39e --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/ViosUtil.java @@ -0,0 +1,16 @@ +package biz.nellemann.hmci.pcm; + +public class ViosUtil { + + public String id; + public String uuid; + public String name; + public String state; + public Integer affinityScore; + + public ViosMemory memory; + public LparProcessor processor; + public Network network; + public Storage storage; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.java new file mode 100644 index 0000000..87333d8 --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/VirtualEthernetAdapter.java @@ -0,0 +1,50 @@ +package biz.nellemann.hmci.pcm; + +import com.serjltt.moshi.adapters.FirstElement; + +public class VirtualEthernetAdapter { + + public String physicalLocation; + public Integer vlanId; + public Integer vswitchId; + public Boolean isPortVlanId; + public Integer viosId; + public String sharedEthernetAdapterId; + + @FirstElement + public Number receivedPackets; + + @FirstElement + public Number sentPackets; + + @FirstElement + public Number droppedPackets; + + @FirstElement + public Number sentBytes; + + @FirstElement + public Number receivedBytes; + + @FirstElement + public Number receivedPhysicalPackets; + + @FirstElement + public Number sentPhysicalPackets; + + @FirstElement + public Number droppedPhysicalPackets; + + @FirstElement + public Number sentPhysicalBytes; + + @FirstElement + public Number receivedPhysicalBytes; + + @FirstElement + public Number transferredBytes; + + @FirstElement + public Number transferredPhysicalBytes; + +} diff --git a/src/main/java/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.java b/src/main/java/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.java new file mode 100644 index 0000000..3a3066c --- /dev/null +++ b/src/main/java/biz/nellemann/hmci/pcm/VirtualFiberChannelAdapter.java @@ -0,0 +1,19 @@ +package biz.nellemann.hmci.pcm; + +import java.util.List; + +public class VirtualFiberChannelAdapter { + + public String wwpn; + public String wwpn2; + public String physicalLocation; + public String physicalPortWWPN; + public Integer viosId; + public List numOfReads; + public List numOfWrites; + public List readBytes; + public List writeBytes; + public List runningSpeed; + public List transmittedBytes; + +} diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index feefcff..cee78fd 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -3,7 +3,7 @@ - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{16} - %msg%n diff --git a/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy b/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy new file mode 100644 index 0000000..5725185 --- /dev/null +++ b/src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy @@ -0,0 +1,30 @@ +package biz.nellemann.hmci + +import spock.lang.Specification + + +class ConfigurationTest extends Specification { + + String testConfigurationFile = new File(getClass().getResource('/hmci.toml').toURI()).absolutePath + + void "test parsing"() { + + when: + Configuration conf = new Configuration(testConfigurationFile) + + then: + conf != null + + } + + void "test lookup influx"() { + + when: + Configuration conf = new Configuration(testConfigurationFile) + + then: + conf != null + + } + +} diff --git a/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy b/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy index a29e1ad..8a3eb21 100644 --- a/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/HmcClientTest.groovy @@ -7,12 +7,17 @@ import spock.lang.Specification class HmcClientTest extends Specification { HmcClient hmc - MockWebServer mockServer = new MockWebServer(); + MockWebServer mockServer = new MockWebServer() def setup() { - mockServer.start(); - hmc = new HmcClient("site", mockServer.url("/").toString(), "testUser", "testPassword") + mockServer.start() + Configuration.HmcObject configHmc = new Configuration.HmcObject() + configHmc.name = "site1" + configHmc.url = mockServer.url("/").toString() + configHmc.username = "testUser" + configHmc.password = "testPassword" + hmc = new HmcClient(configHmc) hmc.authToken = "blaBla" } @@ -24,7 +29,7 @@ class HmcClientTest extends Specification { void "test against empty xml"() { setup: def testXml = "" - mockServer.enqueue(new MockResponse().setBody(testXml)); + mockServer.enqueue(new MockResponse().setBody(testXml)) when: Map systems = hmc.getManagedSystems() @@ -38,7 +43,7 @@ class HmcClientTest extends Specification { setup: def testFile = new File(getClass().getResource('/managed-systems.xml').toURI()) def testXml = testFile.getText('UTF-8') - mockServer.enqueue(new MockResponse().setBody(testXml)); + mockServer.enqueue(new MockResponse().setBody(testXml)) when: Map systems = hmc.getManagedSystems() @@ -53,7 +58,7 @@ class HmcClientTest extends Specification { setup: def testFile = new File(getClass().getResource('/logical-partitions.xml').toURI()) def testXml = testFile.getText('UTF-8') - mockServer.enqueue(new MockResponse().setBody(testXml)); + mockServer.enqueue(new MockResponse().setBody(testXml)) when: ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N") @@ -70,7 +75,7 @@ class HmcClientTest extends Specification { setup: def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI()) def testJson = testFile.getText('UTF-8') - mockServer.enqueue(new MockResponse().setBody(testJson)); + mockServer.enqueue(new MockResponse().setBody(testJson)) when: String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/ManagedSystem_e09834d1-c930-3883-bdad-405d8e26e166_20200807T122600+0200_20200807T122600+0200_30.json") as String)) @@ -84,7 +89,7 @@ class HmcClientTest extends Specification { setup: def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI()) def testJson = testFile.getText('UTF-8') - mockServer.enqueue(new MockResponse().setBody(testJson)); + mockServer.enqueue(new MockResponse().setBody(testJson)) when: String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/LogicalPartition_2DE05DB6-8AD5-448F-8327-0F488D287E82_20200807T123730+0200_20200807T123730+0200_30.json") as String)) diff --git a/src/test/groovy/biz/nellemann/hmci/InfluxClientTest.groovy b/src/test/groovy/biz/nellemann/hmci/InfluxClientTest.groovy index c350134..c55bcc0 100644 --- a/src/test/groovy/biz/nellemann/hmci/InfluxClientTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/InfluxClientTest.groovy @@ -9,7 +9,7 @@ class InfluxClientTest extends Specification { InfluxClient influxClient def setup() { - influxClient = new InfluxClient("http://localhost:8086", "root", "", "hmci") + influxClient = new InfluxClient(new Configuration.InfluxObject("http://localhost:8086", "root", "", "hmci")) influxClient.login() } diff --git a/src/test/groovy/biz/nellemann/hmci/AppTest.groovy b/src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy similarity index 75% rename from src/test/groovy/biz/nellemann/hmci/AppTest.groovy rename to src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy index ef9c7e7..241827a 100644 --- a/src/test/groovy/biz/nellemann/hmci/AppTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/InsightsTest.groovy @@ -5,6 +5,6 @@ package biz.nellemann.hmci import spock.lang.Specification -class AppTest extends Specification { - +class InsightsTest extends Specification { + } diff --git a/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy b/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy index 0984707..9089ee7 100644 --- a/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/LogicalPartitionTest.groovy @@ -17,9 +17,9 @@ class LogicalPartitionTest extends Specification { lpar.processMetrics(testJson) then: - lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().memory.logicalMem.first() == 8192.000 - lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.utilizedProcUnits.first() == 0.001 - lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().network.virtualEthernetAdapters.first().receivedBytes.first() == 276.467 + lpar.metrics.systemUtil.sample.lparsUtil.memory.logicalMem == 8192.000 + lpar.metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits == 0.001 + lpar.metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.first().receivedBytes == 276.467 } @@ -34,12 +34,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getMemoryMetrics() + List listOfMeasurements = lpar.getMemoryMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['logicalMem'] == 8192.000 - listOfMaps.first().get("tags")['partition'] == '9Flash01' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['logicalMem'] == 8192.000 + listOfMeasurements.first().tags['partition'] == '9Flash01' } @@ -53,12 +53,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getProcessorMetrics() + List listOfMeasurements = lpar.getProcessorMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['utilizedProcUnits'] == 0.001 - listOfMaps.first().get("tags")['partition'] == '9Flash01' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['utilizedProcUnits'] == 0.001 + listOfMeasurements.first().tags['partition'] == '9Flash01' } @@ -72,12 +72,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getVirtualEthernetAdapterMetrics() + List listOfMeasurements = lpar.getVirtualEthernetAdapterMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['receivedBytes'] == 276.467 - listOfMaps.first().get("tags")['sea'] == 'ent5' + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['receivedBytes'] == 276.467 + listOfMeasurements.first().tags['sea'] == 'ent5' } void "test getVirtualFiberChannelAdaptersMetrics"() { @@ -90,12 +90,12 @@ class LogicalPartitionTest extends Specification { when: lpar.processMetrics(testJson) - List listOfMaps = lpar.getVirtualFiberChannelAdaptersMetrics() + List listOfMeasurements = lpar.getVirtualFiberChannelAdaptersMetrics() then: - listOfMaps.size() == 4 - listOfMaps.first().get("fields")['writeBytes'] == 6690.133 - listOfMaps.first().get("tags")['viosId'] == '1' + listOfMeasurements.size() == 4 + listOfMeasurements.first().fields['writeBytes'] == 6690.133 + listOfMeasurements.first().tags['viosId'] == '1' } diff --git a/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy b/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy index 115b78f..6643322 100644 --- a/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy +++ b/src/test/groovy/biz/nellemann/hmci/ManagedSystemTest.groovy @@ -15,12 +15,12 @@ class ManagedSystemTest extends Specification { system.processMetrics(testJson) then: - system.metrics.systemUtil.utilSamples.first().serverUtil.memory.assignedMemToLpars.first() == 40960.000 - system.metrics.systemUtil.utilSamples.first().serverUtil.processor.totalProcUnits.first() == 24.000 - system.metrics.systemUtil.utilSamples.first().viosUtil.first().name == "VIOS1" - system.metrics.systemUtil.utilSamples.first().viosUtil.first().memory.assignedMem.first() == 8192.000 - system.metrics.systemUtil.utilSamples.first().viosUtil.first().storage.genericPhysicalAdapters.first().transmittedBytes.first() == 9966.933 - system.metrics.systemUtil.utilSamples.first().viosUtil.first().storage.fiberChannelAdapters.first().numOfPorts == 3 + system.metrics.systemUtil.sample.serverUtil.memory.assignedMemToLpars == 40960.000 + system.metrics.systemUtil.sample.serverUtil.processor.totalProcUnits == 24.000 + system.metrics.systemUtil.sample.viosUtil.first().name == "VIOS1" + system.metrics.systemUtil.sample.viosUtil.first().memory.assignedMem == 8192.0 + system.metrics.systemUtil.sample.viosUtil.first().storage.genericPhysicalAdapters.first().transmittedBytes == 9966.933 + system.metrics.systemUtil.sample.viosUtil.first().storage.fiberChannelAdapters.first().numOfPorts == 3 } @@ -33,11 +33,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getMemoryMetrics() + List listOfMeasurements = system.getMemoryMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['totalMem'] == 1048576.000 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['totalMem'] == 1048576.000 } void "test getProcessorMetrics"() { @@ -49,11 +49,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getProcessorMetrics() + List listOfMeasurements = system.getProcessorMetrics() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['availableProcUnits'] == 16.000 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['availableProcUnits'] == 16.000 } void "test getSystemSharedProcessorPools"() { @@ -65,11 +65,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getSharedProcessorPools() + List listOfMeasurements = system.getSharedProcessorPools() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['assignedProcUnits'] == 23.767 } void "test VIOS data"() { @@ -80,11 +80,11 @@ class ManagedSystemTest extends Specification { when: system.processMetrics(testJson) - List listOfMaps = system.getSharedProcessorPools() + List listOfMeasurements = system.getSharedProcessorPools() then: - listOfMaps.size() == 1 - listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767 + listOfMeasurements.size() == 1 + listOfMeasurements.first().fields['assignedProcUnits'] == 23.767 } } diff --git a/src/test/resources/hmci.toml b/src/test/resources/hmci.toml new file mode 100644 index 0000000..a5c87f5 --- /dev/null +++ b/src/test/resources/hmci.toml @@ -0,0 +1,32 @@ +# HMCi Configuration + +# How often to query HMC's for data - in seconds +hmci.refresh = 30 + +# Rescan HMC's for new systems and partitions - every x refresh +hmci.rescan = 60 + +# InfluxDB to save metrics +[influx] +url = "http://localhost:8086" +username = "root" +password = "" +database = "hmci" + + +# One or more HMC's to query for data and metrics +[hmc] + + # HMC on our primary site + [hmc.site1] + url = "https://10.10.10.10:12443" + username = "hmci" + password = "hmcihmci" + unsafe = true # Ignore SSL cert. errors + + # Example + #[hmc.site2] + #url = "https://10.10.20.20:12443" + #username = "viewer" + #password = "someSecret" + #unsafe = false