commit
806e6e9631
|
@ -10,7 +10,7 @@ HMCi is a small utility to fetch metrics from one or more HMC's and push those t
|
||||||
|
|
||||||
- Ensure you have correct date/time and use a NTP service to keep it accurate!
|
- Ensure you have correct date/time and use a NTP service to keep it accurate!
|
||||||
- Install the HMCi package (*.deb* or *.rpm*) from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) or compile from source.
|
- Install the HMCi package (*.deb* or *.rpm*) from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) or compile from source.
|
||||||
- Copy the *doc/hmci.groovy.tpl* configuration template into */etc/hmci.groovy* and edit the configuration to suit your environment. You can use the *-c [conf-file]* switch if you place this file elsewhere.
|
- Copy the *doc/hmci.tpml* configuration template into */etc/hmci.toml* and edit the configuration to suit your environment. You can use the *-c* option if you place this file elsewhere.
|
||||||
- Configure Grafana to communicate with your InfluxDB and import dashboards from *doc/* into Grafana (The dashboards are slightly modified versions of the dashboard provided by the nmon2influxdb tool).
|
- Configure Grafana to communicate with your InfluxDB and import dashboards from *doc/* into Grafana (The dashboards are slightly modified versions of the dashboard provided by the nmon2influxdb tool).
|
||||||
- Run the *bin/hmci* program in a shell, as a @reboot cron task or setup a proper service :)
|
- Run the *bin/hmci* program in a shell, as a @reboot cron task or setup a proper service :)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
image: adoptopenjdk:8-openj9
|
image: openjdk:8
|
||||||
#image: openjdk:8
|
|
||||||
|
|
||||||
pipelines:
|
pipelines:
|
||||||
branches:
|
branches:
|
||||||
|
|
34
build.gradle
34
build.gradle
|
@ -1,8 +1,6 @@
|
||||||
plugins {
|
plugins {
|
||||||
// Apply the groovy plugin to add support for Groovy
|
id 'java'
|
||||||
id 'groovy'
|
id 'groovy'
|
||||||
|
|
||||||
// Apply the application plugin to add support for building a CLI application.
|
|
||||||
id 'application'
|
id 'application'
|
||||||
|
|
||||||
// Code coverage of tests
|
// Code coverage of tests
|
||||||
|
@ -18,12 +16,18 @@ repositories {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.codehaus.groovy:groovy-all:3.0.5'
|
annotationProcessor 'info.picocli:picocli-codegen:4.5.1'
|
||||||
|
implementation 'info.picocli:picocli:4.5.1'
|
||||||
|
implementation 'org.jsoup:jsoup:1.13.1'
|
||||||
implementation 'com.squareup.okhttp3:okhttp:4.8.0'
|
implementation 'com.squareup.okhttp3:okhttp:4.8.0'
|
||||||
|
implementation 'com.squareup.moshi:moshi:1.11.0'
|
||||||
|
implementation 'com.serjltt.moshi:moshi-lazy-adapters:2.2'
|
||||||
|
implementation 'org.tomlj:tomlj:1.0.0'
|
||||||
implementation 'org.influxdb:influxdb-java:2.19'
|
implementation 'org.influxdb:influxdb-java:2.19'
|
||||||
implementation 'org.slf4j:slf4j-api:1.7.+'
|
implementation 'org.slf4j:slf4j-api:1.7.+'
|
||||||
runtimeOnly 'ch.qos.logback:logback-classic:1.+'
|
runtimeOnly 'ch.qos.logback:logback-classic:1.+'
|
||||||
|
|
||||||
|
testImplementation 'org.codehaus.groovy:groovy-all:3.0.5'
|
||||||
testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0')
|
testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0')
|
||||||
testImplementation("org.slf4j:slf4j-simple:1.7.+")
|
testImplementation("org.slf4j:slf4j-simple:1.7.+")
|
||||||
testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0')
|
testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0')
|
||||||
|
@ -32,7 +36,7 @@ dependencies {
|
||||||
}
|
}
|
||||||
|
|
||||||
application {
|
application {
|
||||||
mainClassName = 'biz.nellemann.hmci.App'
|
mainClassName = 'biz.nellemann.hmci.Main'
|
||||||
}
|
}
|
||||||
|
|
||||||
test {
|
test {
|
||||||
|
@ -68,8 +72,7 @@ ospackage {
|
||||||
|
|
||||||
buildRpm {
|
buildRpm {
|
||||||
dependsOn startShadowScripts
|
dependsOn startShadowScripts
|
||||||
//requires('java-1.8.0-openjdk-headless')
|
os = "LINUX"
|
||||||
os = LINUX
|
|
||||||
}
|
}
|
||||||
|
|
||||||
buildDeb {
|
buildDeb {
|
||||||
|
@ -77,7 +80,6 @@ buildDeb {
|
||||||
requires('default-jre-headless')
|
requires('default-jre-headless')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
jacoco {
|
jacoco {
|
||||||
toolVersion = "0.8.5"
|
toolVersion = "0.8.5"
|
||||||
}
|
}
|
||||||
|
@ -103,22 +105,16 @@ jacocoTestCoverageVerification {
|
||||||
}
|
}
|
||||||
check.dependsOn jacocoTestCoverageVerification
|
check.dependsOn jacocoTestCoverageVerification
|
||||||
|
|
||||||
|
|
||||||
processResources.dependsOn.add("versionFile")
|
|
||||||
versionFile {
|
|
||||||
// Path to the file to be written
|
|
||||||
file = new File(project.buildDir, 'resources/main/version.properties')
|
|
||||||
}
|
|
||||||
|
|
||||||
jar {
|
jar {
|
||||||
manifest {
|
manifest {
|
||||||
attributes(
|
attributes(
|
||||||
'Built-By' : System.properties['user.name'],
|
|
||||||
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
|
|
||||||
'Build-Revision' : versioning.info.commit,
|
|
||||||
'Created-By' : "Gradle ${gradle.gradleVersion}",
|
'Created-By' : "Gradle ${gradle.gradleVersion}",
|
||||||
|
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}",
|
||||||
'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})",
|
'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})",
|
||||||
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}"
|
'Build-User' : System.properties['user.name'],
|
||||||
|
'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build),
|
||||||
|
'Build-Revision' : versioning.info.commit,
|
||||||
|
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
/*
|
|
||||||
Copy this file to /etc/hmci.groovy and change it to suit your environment.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// How often to query HMC's for data - in seconds
|
|
||||||
hmci.refresh = 30
|
|
||||||
|
|
||||||
// Rescan HMC's for new systems and partitions - every x refresh
|
|
||||||
hmci.rescan = 60
|
|
||||||
|
|
||||||
// InfluxDB to save metrics
|
|
||||||
influx {
|
|
||||||
url = "http://localhost:8086"
|
|
||||||
username = "root"
|
|
||||||
password = ""
|
|
||||||
database = "hmci"
|
|
||||||
}
|
|
||||||
|
|
||||||
// One or more HMC's to query for data and metrics
|
|
||||||
hmc {
|
|
||||||
|
|
||||||
// HMC on our primary site
|
|
||||||
site1 {
|
|
||||||
url = "https://10.10.10.10:12443"
|
|
||||||
username = "hmci"
|
|
||||||
password = "hmcihmci"
|
|
||||||
unsafe = true // Ignore SSL cert. errors
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
site2 {
|
|
||||||
url = "https://10.10.20.20:12443"
|
|
||||||
username = "viewer"
|
|
||||||
password = "someSecret"
|
|
||||||
unsafe = false
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
}
|
|
31
doc/hmci.toml
Normal file
31
doc/hmci.toml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# HMCi Configuration
|
||||||
|
|
||||||
|
# How often to query HMC's for data - in seconds
|
||||||
|
hmci.refresh = 30
|
||||||
|
|
||||||
|
# Rescan HMC's for new systems and partitions - every x refresh
|
||||||
|
hmci.rescan = 60
|
||||||
|
|
||||||
|
# InfluxDB to save metrics
|
||||||
|
[influx]
|
||||||
|
url = "http://localhost:8086"
|
||||||
|
username = "root"
|
||||||
|
password = ""
|
||||||
|
database = "hmci"
|
||||||
|
|
||||||
|
# One or more HMC's to query for data and metrics
|
||||||
|
[hmc]
|
||||||
|
|
||||||
|
# HMC on our primary site
|
||||||
|
[hmc.site1]
|
||||||
|
url = "https://10.10.10.10:12443"
|
||||||
|
username = "hmci"
|
||||||
|
password = "hmcihmci"
|
||||||
|
unsafe = true # Ignore SSL cert. errors
|
||||||
|
|
||||||
|
# Example
|
||||||
|
#[hmc.site2]
|
||||||
|
#url = "https://10.10.20.20:12443"
|
||||||
|
#username = "viewer"
|
||||||
|
#password = "someSecret"
|
||||||
|
#unsafe = false
|
|
@ -1,3 +1,3 @@
|
||||||
id = hmci
|
id = hmci
|
||||||
group = biz.nellemann.hmci
|
group = biz.nellemann.hmci
|
||||||
version = 1.0.10
|
version = 0.2.1
|
||||||
|
|
|
@ -1,246 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import groovy.cli.picocli.CliBuilder
|
|
||||||
import groovy.cli.picocli.OptionAccessor
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
class App implements Runnable {
|
|
||||||
|
|
||||||
final ConfigObject configuration
|
|
||||||
final Integer refreshEverySec
|
|
||||||
final Integer rescanHmcEvery
|
|
||||||
|
|
||||||
InfluxClient influxClient
|
|
||||||
Map<String, HmcClient> hmcClients = new HashMap<>()
|
|
||||||
Map<String,ManagedSystem> systems = new HashMap<String, ManagedSystem>()
|
|
||||||
Map<String, LogicalPartition> partitions = new HashMap<String, LogicalPartition>()
|
|
||||||
|
|
||||||
|
|
||||||
App(ConfigObject configuration) {
|
|
||||||
this.configuration = configuration
|
|
||||||
log.debug configuration.toString()
|
|
||||||
|
|
||||||
refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60
|
|
||||||
rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15
|
|
||||||
|
|
||||||
String influxUrl = configuration.get('influx')['url']
|
|
||||||
String influxUsername = configuration.get('influx')['username']
|
|
||||||
String influxPassword = configuration.get('influx')['password']
|
|
||||||
String influxDatabase = configuration.get('influx')['database']
|
|
||||||
|
|
||||||
try {
|
|
||||||
influxClient = new InfluxClient(influxUrl, influxUsername, influxPassword, influxDatabase)
|
|
||||||
influxClient.login()
|
|
||||||
} catch(Exception e) {
|
|
||||||
System.exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initial scan
|
|
||||||
discover()
|
|
||||||
|
|
||||||
run()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void discover() {
|
|
||||||
|
|
||||||
configuration.get('hmc').each { Object key, Object hmc ->
|
|
||||||
if(!hmcClients?.containsKey(key)) {
|
|
||||||
log.info("Adding HMC: " + hmc.toString())
|
|
||||||
String hmcKey = key
|
|
||||||
String hmcUrl = hmc['url']
|
|
||||||
String hmcUsername = hmc['username']
|
|
||||||
String hmcPassword = hmc['password']
|
|
||||||
Boolean hmcUnsafe = hmc['unsafe']
|
|
||||||
HmcClient hmcClient = new HmcClient(hmcKey, hmcUrl, hmcUsername, hmcPassword, hmcUnsafe)
|
|
||||||
hmcClients.put(hmcKey, hmcClient)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hmcClients.each { hmcId, hmcClient ->
|
|
||||||
|
|
||||||
|
|
||||||
try {
|
|
||||||
hmcClient.login()
|
|
||||||
hmcClient.getManagedSystems().each { systemId, system ->
|
|
||||||
|
|
||||||
// Add to list of known systems
|
|
||||||
systems.putIfAbsent(systemId, system)
|
|
||||||
|
|
||||||
// Get LPAR's for this system
|
|
||||||
hmcClient.getLogicalPartitionsForManagedSystem(system).each { partitionId, partition ->
|
|
||||||
|
|
||||||
// Add to list of known partitions
|
|
||||||
partitions.putIfAbsent(partitionId, partition)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error("discover() - " + hmcId + " error: " + e.message)
|
|
||||||
//hmcClients.remove(hmcId)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void getMetricsForSystems() {
|
|
||||||
|
|
||||||
try {
|
|
||||||
|
|
||||||
systems.each {systemId, system ->
|
|
||||||
|
|
||||||
HmcClient hmcClient = hmcClients.get(system.hmcId)
|
|
||||||
|
|
||||||
// Get and process metrics for this system
|
|
||||||
String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system)
|
|
||||||
if(tmpJsonString && !tmpJsonString.empty) {
|
|
||||||
system.processMetrics(tmpJsonString)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error(e.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void getMetricsForPartitions() {
|
|
||||||
|
|
||||||
try {
|
|
||||||
|
|
||||||
// Get LPAR's for this system
|
|
||||||
partitions.each { partitionId, partition ->
|
|
||||||
|
|
||||||
HmcClient hmcClient = hmcClients.get(partition.system.hmcId)
|
|
||||||
|
|
||||||
// Get and process metrics for this partition
|
|
||||||
String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition)
|
|
||||||
if(tmpJsonString2 && !tmpJsonString2.empty) {
|
|
||||||
partition.processMetrics(tmpJsonString2)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error(e.message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void writeMetricsForManagedSystems() {
|
|
||||||
systems.each {systemId, system ->
|
|
||||||
influxClient.writeManagedSystem(system)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void writeMetricsForLogicalPartitions() {
|
|
||||||
partitions.each {partitionId, partition ->
|
|
||||||
influxClient.writeLogicalPartition(partition)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static String getVersion() {
|
|
||||||
URL url = getClass().getResource("/version.properties");
|
|
||||||
if (url == null) {
|
|
||||||
return "No version.txt file found in the classpath."
|
|
||||||
}
|
|
||||||
Properties properties = new Properties();
|
|
||||||
properties.load(url.openStream());
|
|
||||||
return properties.getProperty("VERSION_GRADLE") + "-" + properties.getProperty("VERSION_BUILD")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void main(String... args) {
|
|
||||||
|
|
||||||
def cli = new CliBuilder(name: "hmci")
|
|
||||||
cli.h(longOpt: 'help', usageHelp: true, 'display usage information')
|
|
||||||
cli.v(longOpt: 'version', versionHelp: true, 'display version information')
|
|
||||||
cli.c(longOpt: 'config', args: 1, required: true, paramLabel: "FILE", defaultValue: '/etc/hmci.groovy', 'configuration file')
|
|
||||||
|
|
||||||
OptionAccessor options = cli.parse(args)
|
|
||||||
if (options.h) {
|
|
||||||
cli.usage()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if(options.v) {
|
|
||||||
println("Version " + getVersion())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ConfigObject configuration
|
|
||||||
if(options.c) {
|
|
||||||
|
|
||||||
File configurationFile = new File((String)options.config)
|
|
||||||
if(!configurationFile.exists()) {
|
|
||||||
println("Error - No configuration file found at: " + configurationFile.toString())
|
|
||||||
System.exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL());
|
|
||||||
}
|
|
||||||
|
|
||||||
if(configuration == null || configuration.isEmpty()) {
|
|
||||||
println("Error - Empty or faulty configuration")
|
|
||||||
System.exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
new App(configuration)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
void run() {
|
|
||||||
|
|
||||||
log.debug("run()")
|
|
||||||
|
|
||||||
boolean keepRunning = true
|
|
||||||
int executions = 0
|
|
||||||
|
|
||||||
while(keepRunning) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
getMetricsForSystems()
|
|
||||||
getMetricsForPartitions()
|
|
||||||
|
|
||||||
writeMetricsForManagedSystems()
|
|
||||||
writeMetricsForLogicalPartitions()
|
|
||||||
influxClient.writeBatchPoints()
|
|
||||||
|
|
||||||
// Refresh HMC's
|
|
||||||
if(executions > rescanHmcEvery) {
|
|
||||||
executions = 0
|
|
||||||
discover()
|
|
||||||
}
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error(e.message, e)
|
|
||||||
}
|
|
||||||
|
|
||||||
executions++
|
|
||||||
Thread.sleep(refreshEverySec * 1000)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,394 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
import groovy.xml.XmlSlurper
|
|
||||||
import okhttp3.MediaType
|
|
||||||
import okhttp3.OkHttpClient
|
|
||||||
import okhttp3.Request
|
|
||||||
import okhttp3.RequestBody
|
|
||||||
import okhttp3.Response
|
|
||||||
|
|
||||||
import javax.net.ssl.HostnameVerifier
|
|
||||||
import javax.net.ssl.SSLContext
|
|
||||||
import javax.net.ssl.SSLSession
|
|
||||||
import javax.net.ssl.SSLSocketFactory
|
|
||||||
import javax.net.ssl.TrustManager
|
|
||||||
import javax.net.ssl.X509TrustManager
|
|
||||||
import java.security.SecureRandom
|
|
||||||
import java.security.cert.CertificateException
|
|
||||||
import java.security.cert.X509Certificate;
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
class HmcClient {
|
|
||||||
|
|
||||||
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
|
|
||||||
|
|
||||||
private final String hmcId
|
|
||||||
private final String baseUrl
|
|
||||||
private final String username
|
|
||||||
private final String password
|
|
||||||
private final Boolean unsafe
|
|
||||||
|
|
||||||
protected Integer responseErrors = 0
|
|
||||||
protected String authToken
|
|
||||||
private final OkHttpClient client
|
|
||||||
|
|
||||||
HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) {
|
|
||||||
this.hmcId = hmcId
|
|
||||||
this.baseUrl = baseUrl
|
|
||||||
this.username = username
|
|
||||||
this.password = password
|
|
||||||
this.unsafe = unsafe
|
|
||||||
|
|
||||||
if(unsafe) {
|
|
||||||
this.client = getUnsafeOkHttpClient()
|
|
||||||
} else {
|
|
||||||
this.client = new OkHttpClient()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Logon to the HMC and get an authentication token for further requests.
|
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
void login(Boolean force = false) throws IOException {
|
|
||||||
|
|
||||||
if(authToken && !force) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
String payload = """\
|
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<LogonRequest xmlns="http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/" schemaVersion="V1_0">
|
|
||||||
<UserID>${username}</UserID>
|
|
||||||
<Password>${password}</Password>
|
|
||||||
</LogonRequest>"""
|
|
||||||
|
|
||||||
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl))
|
|
||||||
Request request = new Request.Builder()
|
|
||||||
.url(url)
|
|
||||||
//.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
|
|
||||||
.addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse")
|
|
||||||
.addHeader("X-Audit-Memento", "hmci")
|
|
||||||
.put(RequestBody.create(payload, MEDIA_TYPE_IBM_XML_LOGIN))
|
|
||||||
.build();
|
|
||||||
|
|
||||||
try {
|
|
||||||
Response response = client.newCall(request).execute();
|
|
||||||
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
|
||||||
|
|
||||||
// Get response body and parse
|
|
||||||
String responseBody = response.body.string()
|
|
||||||
response.body().close()
|
|
||||||
|
|
||||||
def xml = new XmlSlurper().parseText(responseBody)
|
|
||||||
authToken = xml.toString()
|
|
||||||
|
|
||||||
log.debug("login() - Auth Token: " + authToken)
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error(e.message)
|
|
||||||
throw new Exception(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Logoff from the HMC and remove any session
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
void logoff() {
|
|
||||||
|
|
||||||
if(!authToken) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl))
|
|
||||||
Request request = new Request.Builder()
|
|
||||||
.url(absUrl)
|
|
||||||
.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
|
|
||||||
.addHeader("X-API-Session", authToken)
|
|
||||||
.delete()
|
|
||||||
.build();
|
|
||||||
|
|
||||||
Response response = client.newCall(request).execute();
|
|
||||||
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
|
||||||
|
|
||||||
authToken = null
|
|
||||||
log.debug("logoff()")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return Map of ManagedSystems seen by this HMC
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
Map<String, ManagedSystem> getManagedSystems() {
|
|
||||||
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl))
|
|
||||||
Response response = getResponse(url)
|
|
||||||
String responseBody = response.body.string()
|
|
||||||
Map<String,ManagedSystem> managedSystemsMap = new HashMap<String, ManagedSystem>()
|
|
||||||
|
|
||||||
// Do not try to parse empty response
|
|
||||||
if(responseBody.empty || responseBody.size() < 1) {
|
|
||||||
responseErrors++
|
|
||||||
return managedSystemsMap
|
|
||||||
}
|
|
||||||
|
|
||||||
def feed = new XmlSlurper().parseText(responseBody)
|
|
||||||
feed?.entry?.each { entry ->
|
|
||||||
entry.content.each { content ->
|
|
||||||
content.ManagedSystem.each { system ->
|
|
||||||
ManagedSystem managedSystem = new ManagedSystem(
|
|
||||||
hmcId,
|
|
||||||
entry.id as String,
|
|
||||||
system.SystemName as String,
|
|
||||||
system.MachineTypeModelAndSerialNumber?.MachineType as String,
|
|
||||||
system.MachineTypeModelAndSerialNumber?.Model as String,
|
|
||||||
system.MachineTypeModelAndSerialNumber?.SerialNumber as String
|
|
||||||
)
|
|
||||||
managedSystemsMap.put(managedSystem.id, managedSystem)
|
|
||||||
log.debug("getManagedSystems() - Found system: " + managedSystem.toString())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return managedSystemsMap
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return Map of LogicalPartitions seen by a ManagedSystem on this HMC
|
|
||||||
|
|
||||||
* @param UUID of managed system
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) {
|
|
||||||
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id))
|
|
||||||
Response response = getResponse(url)
|
|
||||||
String responseBody = response.body.string()
|
|
||||||
Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>() {}
|
|
||||||
|
|
||||||
// Do not try to parse empty response
|
|
||||||
if(responseBody.empty || responseBody.size() < 1) {
|
|
||||||
responseErrors++
|
|
||||||
return partitionMap
|
|
||||||
}
|
|
||||||
|
|
||||||
def feed = new XmlSlurper().parseText(responseBody)
|
|
||||||
feed?.entry?.each { entry ->
|
|
||||||
//log.debug("Entry")
|
|
||||||
entry.content.each { content ->
|
|
||||||
//log.debug("Content")
|
|
||||||
content.LogicalPartition.each { partition ->
|
|
||||||
LogicalPartition logicalPartition = new LogicalPartition(
|
|
||||||
partition.PartitionUUID as String,
|
|
||||||
partition.PartitionName as String,
|
|
||||||
partition.PartitionType as String,
|
|
||||||
system
|
|
||||||
)
|
|
||||||
partitionMap.put(logicalPartition.id, logicalPartition)
|
|
||||||
log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition.toString())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return partitionMap
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse XML feed to get PCM Data in JSON format
|
|
||||||
* @param systemId
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
String getPcmDataForManagedSystem(ManagedSystem system) {
|
|
||||||
log.debug("getPcmDataForManagedSystem() - " + system.id)
|
|
||||||
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id))
|
|
||||||
Response response = getResponse(url)
|
|
||||||
String responseBody = response.body.string()
|
|
||||||
String jsonBody
|
|
||||||
|
|
||||||
// Do not try to parse empty response
|
|
||||||
if(responseBody.empty || responseBody.size() < 1) {
|
|
||||||
responseErrors++
|
|
||||||
return jsonBody
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse XML and fetch JSON link
|
|
||||||
def feed = new XmlSlurper().parseText(responseBody)
|
|
||||||
feed?.entry?.each { entry ->
|
|
||||||
String link = entry.link["@href"]
|
|
||||||
if(entry.category["@term"] == "ManagedSystem") {
|
|
||||||
jsonBody = getResponseBody(new URL(link))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonBody
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse XML feed to get PCM Data in JSON format
|
|
||||||
* @param systemId
|
|
||||||
* @param partitionId
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
String getPcmDataForLogicalPartition(LogicalPartition partition) {
|
|
||||||
|
|
||||||
log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id))
|
|
||||||
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id))
|
|
||||||
Response response = getResponse(url)
|
|
||||||
String responseBody = response.body.string()
|
|
||||||
String jsonBody
|
|
||||||
|
|
||||||
// Do not try to parse empty response
|
|
||||||
if(responseBody.empty || responseBody.size() < 1) {
|
|
||||||
responseErrors++
|
|
||||||
return jsonBody
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse XML and fetch JSON link
|
|
||||||
def feed = new XmlSlurper().parseText(responseBody)
|
|
||||||
feed?.entry?.each { entry ->
|
|
||||||
String link = entry.link["@href"]
|
|
||||||
if(entry.category["@term"] == "LogicalPartition") {
|
|
||||||
jsonBody = getResponseBody(new URL(link))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonBody
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return body text from a HTTP response from the HMC
|
|
||||||
*
|
|
||||||
* @param url
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
protected String getResponseBody(URL url) {
|
|
||||||
Response response = getResponse(url)
|
|
||||||
String body = response.body().string()
|
|
||||||
response.body().close()
|
|
||||||
return body
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return a Response from the HMC
|
|
||||||
*
|
|
||||||
* @param url
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private Response getResponse(URL url, Integer retry = 0) {
|
|
||||||
|
|
||||||
if(responseErrors > 2) {
|
|
||||||
responseErrors = 0
|
|
||||||
login(true)
|
|
||||||
return getResponse(url, retry++)
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = new Request.Builder()
|
|
||||||
.url(url)
|
|
||||||
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
|
||||||
.addHeader("X-API-Session", authToken)
|
|
||||||
.get()
|
|
||||||
.build();
|
|
||||||
|
|
||||||
Response response = client.newCall(request).execute();
|
|
||||||
if (!response.isSuccessful()) {
|
|
||||||
response.body().close()
|
|
||||||
|
|
||||||
if(response.code == 401) {
|
|
||||||
login(true)
|
|
||||||
return getResponse(url, retry++)
|
|
||||||
}
|
|
||||||
|
|
||||||
if(retry < 2) {
|
|
||||||
log.warn("getResponse() - Retrying due to unexpected response: " + response.code)
|
|
||||||
return getResponse(url, retry++)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error("getResponse() - Unexpected response: " + response.code)
|
|
||||||
throw new IOException("getResponse() - Unexpected response: " + response.code)
|
|
||||||
};
|
|
||||||
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provide an unsafe (ignoring SSL problems) OkHttpClient
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private static OkHttpClient getUnsafeOkHttpClient() {
|
|
||||||
try {
|
|
||||||
// Create a trust manager that does not validate certificate chains
|
|
||||||
final TrustManager[] trustAllCerts = new TrustManager[] {
|
|
||||||
new X509TrustManager() {
|
|
||||||
@Override
|
|
||||||
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public X509Certificate[] getAcceptedIssuers() {
|
|
||||||
return new X509Certificate[]{};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Install the all-trusting trust manager
|
|
||||||
final SSLContext sslContext = SSLContext.getInstance("SSL");
|
|
||||||
sslContext.init(null, trustAllCerts, new SecureRandom());
|
|
||||||
|
|
||||||
// Create an ssl socket factory with our all-trusting manager
|
|
||||||
final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
|
|
||||||
|
|
||||||
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
|
||||||
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
|
|
||||||
builder.hostnameVerifier(new HostnameVerifier() {
|
|
||||||
@Override
|
|
||||||
public boolean verify(String hostname, SSLSession session) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
OkHttpClient okHttpClient = builder.build();
|
|
||||||
return okHttpClient;
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,285 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
import org.influxdb.BatchOptions
|
|
||||||
import org.influxdb.InfluxDB
|
|
||||||
import org.influxdb.InfluxDBFactory
|
|
||||||
import org.influxdb.dto.BatchPoints
|
|
||||||
import org.influxdb.dto.Point
|
|
||||||
import org.influxdb.dto.Query
|
|
||||||
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
class InfluxClient {
|
|
||||||
|
|
||||||
final String url
|
|
||||||
final String username
|
|
||||||
final String password
|
|
||||||
final String database
|
|
||||||
|
|
||||||
InfluxDB influxDB
|
|
||||||
BatchPoints batchPoints
|
|
||||||
|
|
||||||
|
|
||||||
InfluxClient(String url, String username, String password, String database) {
|
|
||||||
this.url = url
|
|
||||||
this.username = username
|
|
||||||
this.password = password
|
|
||||||
this.database = database
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void login() {
|
|
||||||
if(!influxDB) {
|
|
||||||
try {
|
|
||||||
influxDB = InfluxDBFactory.connect(url, username, password);
|
|
||||||
createDatabase()
|
|
||||||
|
|
||||||
// Enable batch writes to get better performance.
|
|
||||||
//BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
|
|
||||||
influxDB.enableBatch(BatchOptions.DEFAULTS);
|
|
||||||
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
|
|
||||||
|
|
||||||
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
|
|
||||||
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error(e.message)
|
|
||||||
throw new Exception(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void logoff() {
|
|
||||||
influxDB?.close();
|
|
||||||
influxDB = null
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void createDatabase() {
|
|
||||||
// Create our database... with a default retention of 156w == 3 years
|
|
||||||
influxDB.query(new Query("CREATE DATABASE " + database + " WITH DURATION 156w"));
|
|
||||||
influxDB.setDatabase(database);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void writeBatchPoints() {
|
|
||||||
log.debug("writeBatchPoints()")
|
|
||||||
try {
|
|
||||||
influxDB.write(batchPoints);
|
|
||||||
} catch(Exception e) {
|
|
||||||
log.error("writeBatchPoints() error - " + e.message)
|
|
||||||
logoff()
|
|
||||||
login()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
Managed System
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
void writeManagedSystem(ManagedSystem system) {
|
|
||||||
|
|
||||||
if(system.metrics == null) {
|
|
||||||
log.warn("writeManagedSystem() - null metrics, skipping")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
Instant timestamp = system.getTimestamp()
|
|
||||||
if(!timestamp) {
|
|
||||||
log.warn("writeManagedSystem() - no timestamp, skipping")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
|
||||||
|
|
||||||
getSystemMemory(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemProcessor(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemSharedProcessorPools(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemSharedAdapters(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemFiberChannelAdapters(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemGenericPhysicalAdapters(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemGenericVirtualAdapters(system, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getMemoryMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemMemory")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemProcessor(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getProcessorMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemProcessor")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getSharedProcessorPools()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemSharedAdapters(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getSystemSharedAdapters()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getSystemFiberChannelAdapters()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getSystemGenericPhysicalAdapters()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) {
|
|
||||||
List<Map> metrics = system.getSystemGenericVirtualAdapters()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
Logical Partitions
|
|
||||||
*/
|
|
||||||
|
|
||||||
void writeLogicalPartition(LogicalPartition partition) {
|
|
||||||
|
|
||||||
if(partition.metrics == null) {
|
|
||||||
log.warn("writeLogicalPartition() - null metrics, skipping")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
Instant timestamp = partition.getTimestamp()
|
|
||||||
if(!timestamp) {
|
|
||||||
log.warn("writeLogicalPartition() - no timestamp, skipping")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
|
||||||
|
|
||||||
getPartitionAffinityScore(partition, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getPartitionMemory(partition, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getPartitionProcessor(partition, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getPartitionVirtualEthernetAdapter(partition, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
getPartitionVirtualFiberChannelAdapter(partition, timestamp).each {
|
|
||||||
batchPoints.point(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
//influxDB.write(batchPoints);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) {
|
|
||||||
List<Map> metrics = partition.getAffinityScore()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {
|
|
||||||
List<Map> metrics = partition.getMemoryMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "PartitionMemory")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getPartitionProcessor(LogicalPartition partition, Instant timestamp) {
|
|
||||||
List<Map> metrics = partition.getProcessorMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "PartitionProcessor")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) {
|
|
||||||
List<Map> metrics = partition.getVirtualEthernetAdapterMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Point> getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) {
|
|
||||||
List<Map> metrics = partition.getVirtualFiberChannelAdaptersMetrics()
|
|
||||||
return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
Shared
|
|
||||||
*/
|
|
||||||
|
|
||||||
private static List<Point> processMeasurementMap(List<Map> listOfMaps, Instant timestamp, String measurement) {
|
|
||||||
|
|
||||||
List<Point> list = new ArrayList<>()
|
|
||||||
|
|
||||||
listOfMaps.each { map ->
|
|
||||||
|
|
||||||
// Iterate fields
|
|
||||||
map.get("fields").each { String fieldName, BigDecimal fieldValue ->
|
|
||||||
log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue)
|
|
||||||
|
|
||||||
Point.Builder builder = Point.measurement(measurement)
|
|
||||||
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
|
|
||||||
.tag("name", fieldName)
|
|
||||||
.addField("value", fieldValue)
|
|
||||||
|
|
||||||
// For each field, we add all tags
|
|
||||||
map.get("tags").each { String tagName, String tagValue ->
|
|
||||||
builder.tag(tagName, tagValue)
|
|
||||||
log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue)
|
|
||||||
}
|
|
||||||
|
|
||||||
list.add(builder.build())
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,182 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
class LogicalPartition extends MetaSystem {
|
|
||||||
|
|
||||||
public String id
|
|
||||||
public String name
|
|
||||||
public String type
|
|
||||||
ManagedSystem system
|
|
||||||
|
|
||||||
LogicalPartition(String id, String name, String type, ManagedSystem system) {
|
|
||||||
this.id = id
|
|
||||||
this.name = name
|
|
||||||
this.type = type
|
|
||||||
this.system = system
|
|
||||||
}
|
|
||||||
|
|
||||||
String toString() {
|
|
||||||
return "[${id}] ${name} (${type})"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getAffinityScore() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: system.name,
|
|
||||||
partition: name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getAffinityScore() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
affinityScore: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.affinityScore,
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getAffinityScore() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getMemoryMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: system.name,
|
|
||||||
partition: name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
logicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.logicalMem?.first(),
|
|
||||||
backedPhysicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.backedPhysicalMem?.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getProcessorMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: system.name,
|
|
||||||
partition: name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
utilizedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedProcUnits?.first(),
|
|
||||||
maxVirtualProcessors: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxVirtualProcessors.first(),
|
|
||||||
currentVirtualProcessors: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.currentVirtualProcessors.first(),
|
|
||||||
//donatedProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.donatedProcUnits.first(),
|
|
||||||
entitledProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.entitledProcUnits.first(),
|
|
||||||
//idleProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.idleProcUnits.first(),
|
|
||||||
//maxProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxProcUnits.first(),
|
|
||||||
utilizedCappedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedCappedProcUnits?.first(),
|
|
||||||
utilizedUncappedProcUnits: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.utilizedUncappedProcUnits?.first(),
|
|
||||||
timePerInstructionExecution: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timeSpentWaitingForDispatch?.first(),
|
|
||||||
timeSpentWaitingForDispatch: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timePerInstructionExecution?.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getVirtualEthernetAdapterMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.network?.virtualEthernetAdapters?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: system.name,
|
|
||||||
partition: name,
|
|
||||||
sea: it.sharedEthernetAdapterId as String,
|
|
||||||
viosId: it.viosId as String,
|
|
||||||
vlanId: it.vlanId as String,
|
|
||||||
vswitchId: it.vswitchId as String,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
receivedPhysicalBytes: it.receivedPhysicalBytes.first(),
|
|
||||||
sentPhysicalBytes: it.sentPhysicalBytes.first(),
|
|
||||||
receivedBytes: it.receivedBytes.first(),
|
|
||||||
sentBytes: it.sentBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//PartitionVirtualFiberChannelAdapters
|
|
||||||
List<Map> getVirtualFiberChannelAdaptersMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.storage?.virtualFiberChannelAdapters?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: system.name,
|
|
||||||
partition: name,
|
|
||||||
viosId: it.viosId as String,
|
|
||||||
wwpn: it.wwpn,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
transmittedBytes: it.transmittedBytes.first(),
|
|
||||||
writeBytes: it.writeBytes.first(),
|
|
||||||
readBytes: it.readBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,256 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
class ManagedSystem extends MetaSystem {
|
|
||||||
|
|
||||||
public final String hmcId
|
|
||||||
public final String id
|
|
||||||
public final String name
|
|
||||||
public final String type
|
|
||||||
public final String model
|
|
||||||
public final String serialNumber
|
|
||||||
|
|
||||||
|
|
||||||
ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) {
|
|
||||||
this.hmcId = hmcId
|
|
||||||
this.id = id
|
|
||||||
this.name = name
|
|
||||||
this.type = type
|
|
||||||
this.model = model
|
|
||||||
this.serialNumber = serialNumber
|
|
||||||
}
|
|
||||||
|
|
||||||
String toString() {
|
|
||||||
return "[${id}] ${name} (${type}-${model} ${serialNumber})"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getMemoryMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
totalMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(),
|
|
||||||
availableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(),
|
|
||||||
configurableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(),
|
|
||||||
assignedMemToLpars: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getProcessorMetrics() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.totalProcUnits?.first(),
|
|
||||||
utilizedProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.utilizedProcUnits?.first(),
|
|
||||||
availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.availableProcUnits?.first(),
|
|
||||||
configurableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.configurableProcUnits?.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getSharedProcessorPools() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.serverUtil?.sharedProcessorPool?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: name,
|
|
||||||
pool: it.name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
assignedProcUnits: it.assignedProcUnits.first(),
|
|
||||||
availableProcUnits: it.availableProcUnits.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getSystemSharedAdapters() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each {vios ->
|
|
||||||
vios.network.sharedAdapters.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
system: name,
|
|
||||||
type: it.type,
|
|
||||||
vios: vios.name,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
sentBytes: it.sentBytes.first(),
|
|
||||||
receivedBytes: it.receivedBytes.first(),
|
|
||||||
transferredBytes: it.transferredBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getSystemFiberChannelAdapters() {
|
|
||||||
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
|
|
||||||
log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name)
|
|
||||||
vios.storage?.fiberChannelAdapters?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
id: it.id,
|
|
||||||
system: name,
|
|
||||||
wwpn: it.wwpn,
|
|
||||||
vios: vios.name,
|
|
||||||
device: it.physicalLocation,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
writeBytes: it.writeBytes.first(),
|
|
||||||
readBytes: it.readBytes.first(),
|
|
||||||
transmittedBytes: it.transmittedBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getSystemGenericPhysicalAdapters() {
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
|
|
||||||
vios.storage?.genericPhysicalAdapters?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
id: it.id,
|
|
||||||
system: name,
|
|
||||||
vios: vios.name,
|
|
||||||
device: it.physicalLocation,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
writeBytes: it.writeBytes.first(),
|
|
||||||
readBytes: it.readBytes.first(),
|
|
||||||
transmittedBytes: it.transmittedBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
List<Map> getSystemGenericVirtualAdapters() {
|
|
||||||
List<Map> list = new ArrayList<>()
|
|
||||||
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
|
|
||||||
vios.storage?.genericVirtualAdapters?.each {
|
|
||||||
Map<String, Map> map = new HashMap<String, Map>()
|
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = [
|
|
||||||
id: it.id,
|
|
||||||
system: name,
|
|
||||||
vios: vios.name,
|
|
||||||
device: it.physicalLocation,
|
|
||||||
]
|
|
||||||
map.put("tags", tagsMap)
|
|
||||||
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString())
|
|
||||||
|
|
||||||
HashMap<String, BigDecimal> fieldsMap = [
|
|
||||||
writeBytes: it.writeBytes.first(),
|
|
||||||
readBytes: it.readBytes.first(),
|
|
||||||
transmittedBytes: it.transmittedBytes.first(),
|
|
||||||
]
|
|
||||||
map.put("fields", fieldsMap)
|
|
||||||
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString())
|
|
||||||
|
|
||||||
list.add(map)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return list
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package biz.nellemann.hmci
|
|
||||||
|
|
||||||
import biz.nellemann.hmci.pcm.PcmData
|
|
||||||
import groovy.json.JsonSlurper
|
|
||||||
import groovy.util.logging.Slf4j
|
|
||||||
|
|
||||||
import java.time.Instant
|
|
||||||
import java.time.format.DateTimeFormatter
|
|
||||||
import java.time.format.DateTimeParseException
|
|
||||||
|
|
||||||
@Slf4j
|
|
||||||
abstract class MetaSystem {
|
|
||||||
|
|
||||||
protected PcmData metrics
|
|
||||||
|
|
||||||
void processMetrics(String json) {
|
|
||||||
def pcmMap = new JsonSlurper().parseText(json)
|
|
||||||
metrics = new PcmData(pcmMap as Map)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Instant getTimestamp() {
|
|
||||||
|
|
||||||
String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp
|
|
||||||
Instant instant
|
|
||||||
try {
|
|
||||||
log.debug("getTimeStamp() - PMC Timestamp: " + timestamp)
|
|
||||||
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
|
|
||||||
instant = Instant.from(dateTimeFormatter.parse(timestamp))
|
|
||||||
log.debug("getTimestamp() - Instant: " + instant.toString())
|
|
||||||
} catch(DateTimeParseException e) {
|
|
||||||
log.warn("getTimestamp() - parse error: " + timestamp)
|
|
||||||
}
|
|
||||||
|
|
||||||
return instant ?: Instant.now()
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class FiberChannelAdapter {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String wwpn
|
|
||||||
String physicalLocation
|
|
||||||
Integer numOfPorts
|
|
||||||
List<BigDecimal> numOfReads
|
|
||||||
List<BigDecimal> numOfWrites
|
|
||||||
List<BigDecimal> readBytes
|
|
||||||
List<BigDecimal> writeBytes
|
|
||||||
List<BigDecimal> runningSpeed
|
|
||||||
List<BigDecimal> transmittedBytes
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class GenericAdapter {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String type
|
|
||||||
String physicalLocation
|
|
||||||
List<BigDecimal> receivedPackets
|
|
||||||
List<BigDecimal> sentPackets
|
|
||||||
List<BigDecimal> droppedPackets
|
|
||||||
List<BigDecimal> sentBytes
|
|
||||||
List<BigDecimal> receivedBytes
|
|
||||||
List<BigDecimal> transferredBytes
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class GenericPhysicalAdapters {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String type
|
|
||||||
String physicalLocation
|
|
||||||
List<BigDecimal> numOfReads
|
|
||||||
List<BigDecimal> numOfWrites
|
|
||||||
List<BigDecimal> readBytes
|
|
||||||
List<BigDecimal> writeBytes
|
|
||||||
List<BigDecimal> transmittedBytes
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class GenericVirtualAdapter {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String type
|
|
||||||
Integer viosId
|
|
||||||
String physicalLocation
|
|
||||||
List<BigDecimal> numOfReads
|
|
||||||
List<BigDecimal> numOfWrites
|
|
||||||
List<BigDecimal> readBytes
|
|
||||||
List<BigDecimal> writeBytes
|
|
||||||
List<BigDecimal> transmittedBytes
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class LparMemory {
|
|
||||||
|
|
||||||
List<BigDecimal> logicalMem
|
|
||||||
List<BigDecimal> backedPhysicalMem
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class LparProcessor {
|
|
||||||
|
|
||||||
Integer poolId
|
|
||||||
Integer weight
|
|
||||||
String mode
|
|
||||||
List<BigDecimal> maxVirtualProcessors
|
|
||||||
List<BigDecimal> currentVirtualProcessors
|
|
||||||
List<BigDecimal> maxProcUnits
|
|
||||||
List<BigDecimal> entitledProcUnits
|
|
||||||
List<BigDecimal> utilizedProcUnits
|
|
||||||
List<BigDecimal> utilizedCappedProcUnits
|
|
||||||
List<BigDecimal> utilizedUncappedProcUnits
|
|
||||||
List<BigDecimal> idleProcUnits
|
|
||||||
List<BigDecimal> donatedProcUnits
|
|
||||||
List<BigDecimal> timeSpentWaitingForDispatch
|
|
||||||
List<BigDecimal> timePerInstructionExecution
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,21 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class LparUtil {
|
|
||||||
|
|
||||||
Integer id
|
|
||||||
String uuid
|
|
||||||
String name
|
|
||||||
String state
|
|
||||||
String type
|
|
||||||
String osType
|
|
||||||
Integer affinityScore
|
|
||||||
|
|
||||||
LparMemory memory
|
|
||||||
LparProcessor processor
|
|
||||||
Network network
|
|
||||||
Storage storage
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class Network {
|
|
||||||
List<GenericAdapter> genericAdapters
|
|
||||||
List<SharedAdapter> sharedAdapters
|
|
||||||
List<VirtualEthernetAdapter> virtualEthernetAdapters
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class PcmData {
|
|
||||||
|
|
||||||
SystemUtil systemUtil
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class PhysicalProcessorPool {
|
|
||||||
|
|
||||||
List<BigDecimal> assignedProcUnits
|
|
||||||
List<BigDecimal> utilizedProcUnits
|
|
||||||
List<BigDecimal> availableProcUnits
|
|
||||||
List<BigDecimal> configuredProcUnits
|
|
||||||
List<BigDecimal> borrowedProcUnits
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class SampleInfo {
|
|
||||||
|
|
||||||
String timeStamp
|
|
||||||
Integer status
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class ServerMemory {
|
|
||||||
|
|
||||||
List<BigDecimal> totalMem
|
|
||||||
List<BigDecimal> availableMem
|
|
||||||
List<BigDecimal> configurableMem
|
|
||||||
List<BigDecimal> assignedMemToLpars
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class ServerProcessor {
|
|
||||||
|
|
||||||
List<BigDecimal> totalProcUnits
|
|
||||||
List<BigDecimal> utilizedProcUnits
|
|
||||||
List<BigDecimal> availableProcUnits
|
|
||||||
List<BigDecimal> configurableProcUnits
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class ServerUtil {
|
|
||||||
|
|
||||||
ServerProcessor processor
|
|
||||||
ServerMemory memory
|
|
||||||
PhysicalProcessorPool physicalProcessorPool
|
|
||||||
List<SharedProcessorPool> sharedProcessorPool
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class SharedAdapter {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String type
|
|
||||||
String physicalLocation
|
|
||||||
List<BigDecimal> receivedPackets
|
|
||||||
List<BigDecimal> sentPackets
|
|
||||||
List<BigDecimal> droppedPackets
|
|
||||||
List<BigDecimal> sentBytes
|
|
||||||
List<BigDecimal> receivedBytes
|
|
||||||
List<BigDecimal> transferredBytes
|
|
||||||
List<String> bridgedAdapters
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class SharedProcessorPool {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String name
|
|
||||||
List<BigDecimal> assignedProcUnits
|
|
||||||
List<BigDecimal> utilizedProcUnits
|
|
||||||
List<BigDecimal> availableProcUnits
|
|
||||||
List<BigDecimal> configuredProcUnits
|
|
||||||
List<BigDecimal> borrowedProcUnits
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class Storage {
|
|
||||||
|
|
||||||
List<String> clientLpars
|
|
||||||
List<GenericPhysicalAdapters> genericPhysicalAdapters
|
|
||||||
List<GenericVirtualAdapter> genericVirtualAdapters
|
|
||||||
List<FiberChannelAdapter> fiberChannelAdapters
|
|
||||||
List<VirtualFiberChannelAdapter> virtualFiberChannelAdapters
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class SystemUtil {
|
|
||||||
|
|
||||||
UtilInfo utilInfo
|
|
||||||
List<UtilSample> utilSamples
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class UtilInfo {
|
|
||||||
|
|
||||||
String version
|
|
||||||
String metricType
|
|
||||||
Integer frequency
|
|
||||||
String startTimeStamp
|
|
||||||
String endTimeStamp
|
|
||||||
String mtms
|
|
||||||
String name
|
|
||||||
String uuid
|
|
||||||
List<String> metricArrayOrder
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class UtilSample {
|
|
||||||
|
|
||||||
String sampleType
|
|
||||||
SampleInfo sampleInfo
|
|
||||||
ServerUtil serverUtil
|
|
||||||
List<ViosUtil> viosUtil
|
|
||||||
List<LparUtil> lparsUtil
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,24 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class ViosUtil {
|
|
||||||
|
|
||||||
String id
|
|
||||||
String uuid
|
|
||||||
String name
|
|
||||||
String state
|
|
||||||
Integer affinityScore
|
|
||||||
|
|
||||||
Memory memory
|
|
||||||
LparProcessor processor
|
|
||||||
Network network
|
|
||||||
Storage storage
|
|
||||||
|
|
||||||
class Memory {
|
|
||||||
List<BigDecimal> assignedMem
|
|
||||||
List<BigDecimal> utilizedMem
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,27 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class VirtualEthernetAdapter {
|
|
||||||
|
|
||||||
String physicalLocation
|
|
||||||
Integer vlanId
|
|
||||||
Integer vswitchId
|
|
||||||
Boolean isPortVlanId
|
|
||||||
Integer viosId
|
|
||||||
String sharedEthernetAdapterId
|
|
||||||
List<BigDecimal> receivedPackets
|
|
||||||
List<BigDecimal> sentPackets
|
|
||||||
List<BigDecimal> droppedPackets
|
|
||||||
List<BigDecimal> sentBytes
|
|
||||||
List<BigDecimal> receivedBytes
|
|
||||||
List<BigDecimal> receivedPhysicalPackets
|
|
||||||
List<BigDecimal> sentPhysicalPackets
|
|
||||||
List<BigDecimal> droppedPhysicalPackets
|
|
||||||
List<BigDecimal> sentPhysicalBytes
|
|
||||||
List<BigDecimal> receivedPhysicalBytes
|
|
||||||
List<BigDecimal> transferredBytes
|
|
||||||
List<BigDecimal> transferredPhysicalBytes
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
package biz.nellemann.hmci.pcm
|
|
||||||
|
|
||||||
import groovy.transform.ToString
|
|
||||||
|
|
||||||
@ToString
|
|
||||||
class VirtualFiberChannelAdapter {
|
|
||||||
|
|
||||||
String wwpn
|
|
||||||
String wwpn2
|
|
||||||
String physicalLocation
|
|
||||||
String physicalPortWWPN
|
|
||||||
Integer viosId
|
|
||||||
List<BigDecimal> numOfReads
|
|
||||||
List<BigDecimal> numOfWrites
|
|
||||||
List<BigDecimal> readBytes
|
|
||||||
List<BigDecimal> writeBytes
|
|
||||||
List<BigDecimal> runningSpeed
|
|
||||||
List<BigDecimal> transmittedBytes
|
|
||||||
|
|
||||||
}
|
|
186
src/main/java/biz/nellemann/hmci/Configuration.java
Normal file
186
src/main/java/biz/nellemann/hmci/Configuration.java
Normal file
|
@ -0,0 +1,186 @@
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.tomlj.Toml;
|
||||||
|
import org.tomlj.TomlParseResult;
|
||||||
|
import org.tomlj.TomlTable;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class Configuration {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(Configuration.class);
|
||||||
|
|
||||||
|
final public Long refresh;
|
||||||
|
final public Long rescan;
|
||||||
|
final public InfluxObject influx;
|
||||||
|
final public List<HmcObject> hmc;
|
||||||
|
|
||||||
|
Configuration(String configurationFile) throws IOException {
|
||||||
|
|
||||||
|
Path source = Paths.get(configurationFile);
|
||||||
|
TomlParseResult result = Toml.parse(source);
|
||||||
|
result.errors().forEach(error -> System.err.println(error.toString()));
|
||||||
|
|
||||||
|
if(result.contains("refresh")) {
|
||||||
|
refresh = result.getLong("refresh");
|
||||||
|
} else {
|
||||||
|
refresh = 15L;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(result.contains("rescan")) {
|
||||||
|
rescan = result.getLong("rescan");
|
||||||
|
} else {
|
||||||
|
rescan = 60L;
|
||||||
|
}
|
||||||
|
|
||||||
|
hmc = getHmc(result);
|
||||||
|
influx = getInflux(result);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<HmcObject> getHmc(TomlParseResult result) {
|
||||||
|
|
||||||
|
ArrayList<HmcObject> list = new ArrayList<>();
|
||||||
|
|
||||||
|
if(result.contains("hmc") && result.isTable("hmc")) {
|
||||||
|
TomlTable hmcTable = result.getTable("hmc");
|
||||||
|
if(hmcTable == null) {
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
for(String key : hmcTable.keySet()) {
|
||||||
|
|
||||||
|
HmcObject c = new HmcObject();
|
||||||
|
c.name = key;
|
||||||
|
|
||||||
|
if(hmcTable.contains(key+".url")) {
|
||||||
|
c.url = hmcTable.getString(key+".url");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(hmcTable.contains(key+".username")) {
|
||||||
|
c.username = hmcTable.getString(key+".username");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(hmcTable.contains(key+".password")) {
|
||||||
|
c.password = hmcTable.getString(key+".password");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(hmcTable.contains(key+".unsafe")) {
|
||||||
|
c.unsafe = hmcTable.getBoolean(key+".unsafe");
|
||||||
|
} else {
|
||||||
|
c.unsafe = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
list.add(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
InfluxObject getInflux(TomlParseResult result) {
|
||||||
|
|
||||||
|
InfluxObject c = new InfluxObject();
|
||||||
|
|
||||||
|
if(result.contains("influx")) {
|
||||||
|
TomlTable t = result.getTable("influx");
|
||||||
|
|
||||||
|
if(t != null && t.contains("url")) {
|
||||||
|
c.url = t.getString("url");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(t != null && t.contains("username")) {
|
||||||
|
c.username = t.getString("username");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(t != null && t.contains("password")) {
|
||||||
|
c.password = t.getString("password");
|
||||||
|
}
|
||||||
|
|
||||||
|
if(t != null && t.contains("database")) {
|
||||||
|
c.database = t.getString("database");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static class InfluxObject {
|
||||||
|
|
||||||
|
String url = "http://localhost:8086";
|
||||||
|
String username = "root";
|
||||||
|
String password = "";
|
||||||
|
String database = "hmci";
|
||||||
|
|
||||||
|
private boolean validated = false;
|
||||||
|
|
||||||
|
InfluxObject() { }
|
||||||
|
|
||||||
|
InfluxObject(String url, String username, String password, String database) {
|
||||||
|
this.url = url;
|
||||||
|
this.username = username;
|
||||||
|
this.password = password;
|
||||||
|
this.database = database;
|
||||||
|
}
|
||||||
|
|
||||||
|
Boolean isValid() {
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Fixme
|
||||||
|
void validate() {
|
||||||
|
validated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static class HmcObject {
|
||||||
|
|
||||||
|
String name;
|
||||||
|
String url;
|
||||||
|
String username;
|
||||||
|
String password;
|
||||||
|
Boolean unsafe = false;
|
||||||
|
|
||||||
|
private boolean validated = false;
|
||||||
|
|
||||||
|
HmcObject() { }
|
||||||
|
|
||||||
|
HmcObject(String url, String username, String password, Boolean unsafe) {
|
||||||
|
this.url = url;
|
||||||
|
this.username = username;
|
||||||
|
this.password = password;
|
||||||
|
this.unsafe = unsafe;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Boolean isValid() {
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Fixme
|
||||||
|
void validate() {
|
||||||
|
validated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
431
src/main/java/biz/nellemann/hmci/HmcClient.java
Normal file
431
src/main/java/biz/nellemann/hmci/HmcClient.java
Normal file
|
@ -0,0 +1,431 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import biz.nellemann.hmci.Configuration.HmcObject;
|
||||||
|
import okhttp3.*;
|
||||||
|
import org.jsoup.Jsoup;
|
||||||
|
import org.jsoup.nodes.Document;
|
||||||
|
import org.jsoup.nodes.Element;
|
||||||
|
import org.jsoup.select.Elements;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import javax.net.ssl.*;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.security.SecureRandom;
|
||||||
|
import java.security.cert.CertificateException;
|
||||||
|
import java.security.cert.X509Certificate;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
class HmcClient {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(HmcClient.class);
|
||||||
|
|
||||||
|
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
|
||||||
|
|
||||||
|
private final String hmcId;
|
||||||
|
private final String baseUrl;
|
||||||
|
private final String username;
|
||||||
|
private final String password;
|
||||||
|
|
||||||
|
protected Integer responseErrors = 0;
|
||||||
|
protected String authToken;
|
||||||
|
private final OkHttpClient client;
|
||||||
|
|
||||||
|
|
||||||
|
HmcClient(HmcObject configHmc) {
|
||||||
|
|
||||||
|
this.hmcId = configHmc.name;
|
||||||
|
this.baseUrl = configHmc.url;
|
||||||
|
this.username = configHmc.username;
|
||||||
|
this.password = configHmc.password;
|
||||||
|
Boolean unsafe = configHmc.unsafe;
|
||||||
|
|
||||||
|
if(unsafe) {
|
||||||
|
this.client = getUnsafeOkHttpClient();
|
||||||
|
} else {
|
||||||
|
this.client = new OkHttpClient();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logon to the HMC and get an authentication token for further requests.
|
||||||
|
*/
|
||||||
|
void login() throws Exception {
|
||||||
|
this.login(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logon to the HMC and get an authentication token for further requests.
|
||||||
|
* @param force
|
||||||
|
*/
|
||||||
|
void login(Boolean force) throws Exception {
|
||||||
|
|
||||||
|
if(authToken != null && !force) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Connecting to HMC - " + baseUrl);
|
||||||
|
|
||||||
|
StringBuilder payload = new StringBuilder();
|
||||||
|
payload.append("<?xml version='1.0' encoding='UTF-8' standalone='yes'?>");
|
||||||
|
payload.append("<LogonRequest xmlns='http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/' schemaVersion='V1_0'>");
|
||||||
|
payload.append("<UserID>").append(username).append("</UserID>");
|
||||||
|
payload.append("<Password>").append(password).append("</Password>");
|
||||||
|
payload.append("</LogonRequest>");
|
||||||
|
|
||||||
|
try {
|
||||||
|
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
|
||||||
|
Request request = new Request.Builder()
|
||||||
|
.url(url)
|
||||||
|
//.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
|
||||||
|
.addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse")
|
||||||
|
.addHeader("X-Audit-Memento", "hmci")
|
||||||
|
.put(RequestBody.create(payload.toString(), MEDIA_TYPE_IBM_XML_LOGIN))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Response response = client.newCall(request).execute();
|
||||||
|
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
||||||
|
|
||||||
|
// Get response body and parse
|
||||||
|
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||||
|
Objects.requireNonNull(response.body()).close();
|
||||||
|
|
||||||
|
Document doc = Jsoup.parse(responseBody);
|
||||||
|
authToken = doc.select("X-API-Session").text();
|
||||||
|
|
||||||
|
log.debug("login() - Auth Token: " + authToken);
|
||||||
|
} catch (MalformedURLException e) {
|
||||||
|
log.error("login() - url error", e);
|
||||||
|
throw new Exception(new Throwable("Login URL Error: " + e.getMessage()));
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.error("login() - general error", e);
|
||||||
|
throw new Exception(new Throwable("Login General Error: " + e.getMessage()));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logoff from the HMC and remove any session
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
void logoff() throws IOException {
|
||||||
|
|
||||||
|
if(authToken == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
|
||||||
|
Request request = new Request.Builder()
|
||||||
|
.url(absUrl)
|
||||||
|
.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
|
||||||
|
.addHeader("X-API-Session", authToken)
|
||||||
|
.delete()
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Response response = client.newCall(request).execute();
|
||||||
|
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
||||||
|
|
||||||
|
authToken = null;
|
||||||
|
log.debug("logoff()");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return Map of ManagedSystems seen by this HMC
|
||||||
|
*
|
||||||
|
* @return Map of system-id and ManagedSystem
|
||||||
|
*/
|
||||||
|
Map<String, ManagedSystem> getManagedSystems() throws Exception {
|
||||||
|
|
||||||
|
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl));
|
||||||
|
Response response = getResponse(url);
|
||||||
|
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||||
|
Map<String,ManagedSystem> managedSystemsMap = new HashMap<>();
|
||||||
|
|
||||||
|
// Do not try to parse empty response
|
||||||
|
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||||
|
responseErrors++;
|
||||||
|
return managedSystemsMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Document doc = Jsoup.parse(responseBody);
|
||||||
|
Elements managedSystems = doc.select("ManagedSystem|ManagedSystem"); // doc.select("img[src$=.png]");
|
||||||
|
for(Element el : managedSystems) {
|
||||||
|
ManagedSystem system = new ManagedSystem(
|
||||||
|
hmcId,
|
||||||
|
el.select("Metadata > Atom > AtomID").text(),
|
||||||
|
el.select("SystemName").text(),
|
||||||
|
el.select("MachineTypeModelAndSerialNumber > MachineType").text(),
|
||||||
|
el.select("MachineTypeModelAndSerialNumber > Model").text(),
|
||||||
|
el.select("MachineTypeModelAndSerialNumber > SerialNumber").text()
|
||||||
|
);
|
||||||
|
managedSystemsMap.put(system.id, system);
|
||||||
|
log.debug("getManagedSystems() - Found system: " + system.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("getManagedSystems() - xml parse error", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return managedSystemsMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return Map of LogicalPartitions seen by a ManagedSystem on this HMC
|
||||||
|
* @param system a valid ManagedSystem
|
||||||
|
* @return Map of partition-id and LogicalPartition
|
||||||
|
*/
|
||||||
|
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) throws Exception {
|
||||||
|
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id));
|
||||||
|
Response response = getResponse(url);
|
||||||
|
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||||
|
Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>() {};
|
||||||
|
|
||||||
|
// Do not try to parse empty response
|
||||||
|
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||||
|
responseErrors++;
|
||||||
|
return partitionMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Document doc = Jsoup.parse(responseBody);
|
||||||
|
Elements logicalPartitions = doc.select("LogicalPartition|LogicalPartition"); // doc.select("img[src$=.png]");
|
||||||
|
for(Element el : logicalPartitions) {
|
||||||
|
LogicalPartition logicalPartition = new LogicalPartition(
|
||||||
|
el.select("PartitionUUID").text(),
|
||||||
|
el.select("PartitionName").text(),
|
||||||
|
el.select("PartitionType").text(),
|
||||||
|
system
|
||||||
|
);
|
||||||
|
partitionMap.put(logicalPartition.id, logicalPartition);
|
||||||
|
log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("getLogicalPartitionsForManagedSystem() - xml parse error", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return partitionMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse XML feed to get PCM Data in JSON format
|
||||||
|
* @param system a valid ManagedSystem
|
||||||
|
* @return JSON string with PCM data for this ManagedSystem
|
||||||
|
*/
|
||||||
|
String getPcmDataForManagedSystem(ManagedSystem system) throws Exception {
|
||||||
|
|
||||||
|
log.debug("getPcmDataForManagedSystem() - " + system.id);
|
||||||
|
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id));
|
||||||
|
Response response = getResponse(url);
|
||||||
|
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||||
|
String jsonBody = null;
|
||||||
|
|
||||||
|
// Do not try to parse empty response
|
||||||
|
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||||
|
responseErrors++;
|
||||||
|
log.warn("getPcmDataForManagedSystem() - empty response");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Document doc = Jsoup.parse(responseBody);
|
||||||
|
Element entry = doc.select("feed > entry").first();
|
||||||
|
Element link = entry.select("link[href]").first();
|
||||||
|
|
||||||
|
if(link.attr("type").equals("application/json")) {
|
||||||
|
String href = link.attr("href");
|
||||||
|
log.debug("getPcmDataForManagedSystem() - json url: " + href);
|
||||||
|
jsonBody = getResponseBody(new URL(href));
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("getPcmDataForManagedSystem() - xml parse error", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonBody;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse XML feed to get PCM Data in JSON format
|
||||||
|
* @param partition a valid LogicalPartition
|
||||||
|
* @return JSON string with PCM data for this LogicalPartition
|
||||||
|
*/
|
||||||
|
String getPcmDataForLogicalPartition(LogicalPartition partition) throws Exception {
|
||||||
|
|
||||||
|
log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id));
|
||||||
|
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id));
|
||||||
|
Response response = getResponse(url);
|
||||||
|
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||||
|
String jsonBody = null;
|
||||||
|
|
||||||
|
// Do not try to parse empty response
|
||||||
|
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||||
|
responseErrors++;
|
||||||
|
log.warn("getPcmDataForLogicalPartition() - empty response");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Document doc = Jsoup.parse(responseBody);
|
||||||
|
Element entry = doc.select("feed > entry").first();
|
||||||
|
Element link = entry.select("link[href]").first();
|
||||||
|
|
||||||
|
if(link.attr("type").equals("application/json")) {
|
||||||
|
String href = link.attr("href");
|
||||||
|
log.debug("getPcmDataForLogicalPartition() - json url: " + href);
|
||||||
|
jsonBody = getResponseBody(new URL(href));
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("getPcmDataForLogicalPartition() - xml parse error", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonBody;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return body text from a HTTP response from the HMC
|
||||||
|
*
|
||||||
|
* @param url URL to get response body as String
|
||||||
|
* @return String with http reponse body
|
||||||
|
*/
|
||||||
|
protected String getResponseBody(URL url) throws Exception {
|
||||||
|
Response response = getResponse(url);
|
||||||
|
String body = Objects.requireNonNull(response.body()).string();
|
||||||
|
Objects.requireNonNull(response.body()).close();
|
||||||
|
return body;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a Response from the HMC
|
||||||
|
* @param url to get Response from
|
||||||
|
* @return Response object
|
||||||
|
*/
|
||||||
|
private Response getResponse(URL url) throws Exception {
|
||||||
|
return getResponse(url, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a Response from the HMC
|
||||||
|
* @param url to get Response from
|
||||||
|
* @param retry number of retries for this call
|
||||||
|
* @return Response object
|
||||||
|
*/
|
||||||
|
private Response getResponse(URL url, Integer retry) throws Exception {
|
||||||
|
|
||||||
|
log.debug("getResponse() - " + url.toString());
|
||||||
|
|
||||||
|
if(responseErrors > 2) {
|
||||||
|
responseErrors = 0;
|
||||||
|
login(true);
|
||||||
|
return getResponse(url, retry++);
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = new Request.Builder()
|
||||||
|
.url(url)
|
||||||
|
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||||
|
.addHeader("X-API-Session", authToken)
|
||||||
|
.get()
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Response response = client.newCall(request).execute();
|
||||||
|
if (!response.isSuccessful()) {
|
||||||
|
Objects.requireNonNull(response.body()).close();
|
||||||
|
|
||||||
|
if(response.code() == 401) {
|
||||||
|
login(true);
|
||||||
|
return getResponse(url, retry++);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(retry < 2) {
|
||||||
|
log.warn("getResponse() - Retrying due to unexpected response: " + response.code());
|
||||||
|
return getResponse(url, retry++);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.error("getResponse() - Unexpected response: " + response.code());
|
||||||
|
throw new IOException("getResponse() - Unexpected response: " + response.code());
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provide an unsafe (ignoring SSL problems) OkHttpClient
|
||||||
|
*
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private static OkHttpClient getUnsafeOkHttpClient() {
|
||||||
|
try {
|
||||||
|
// Create a trust manager that does not validate certificate chains
|
||||||
|
final TrustManager[] trustAllCerts = new TrustManager[] {
|
||||||
|
new X509TrustManager() {
|
||||||
|
@Override
|
||||||
|
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public X509Certificate[] getAcceptedIssuers() {
|
||||||
|
return new X509Certificate[]{};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Install the all-trusting trust manager
|
||||||
|
final SSLContext sslContext = SSLContext.getInstance("SSL");
|
||||||
|
sslContext.init(null, trustAllCerts, new SecureRandom());
|
||||||
|
|
||||||
|
// Create an ssl socket factory with our all-trusting manager
|
||||||
|
final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
|
||||||
|
|
||||||
|
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
||||||
|
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
|
||||||
|
builder.hostnameVerifier((hostname, session) -> true);
|
||||||
|
|
||||||
|
return builder.build();
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
297
src/main/java/biz/nellemann/hmci/InfluxClient.java
Normal file
297
src/main/java/biz/nellemann/hmci/InfluxClient.java
Normal file
|
@ -0,0 +1,297 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import biz.nellemann.hmci.Configuration.InfluxObject;
|
||||||
|
import org.influxdb.BatchOptions;
|
||||||
|
import org.influxdb.InfluxDB;
|
||||||
|
import org.influxdb.InfluxDBFactory;
|
||||||
|
import org.influxdb.dto.BatchPoints;
|
||||||
|
import org.influxdb.dto.Point;
|
||||||
|
import org.influxdb.dto.Query;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
class InfluxClient {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
|
||||||
|
|
||||||
|
final private String url;
|
||||||
|
final private String username;
|
||||||
|
final private String password;
|
||||||
|
final private String database;
|
||||||
|
|
||||||
|
private InfluxDB influxDB;
|
||||||
|
private BatchPoints batchPoints;
|
||||||
|
|
||||||
|
|
||||||
|
InfluxClient(InfluxObject config) {
|
||||||
|
this.url = config.url;
|
||||||
|
this.username = config.username;
|
||||||
|
this.password = config.password;
|
||||||
|
this.database = config.database;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void login() throws Exception {
|
||||||
|
|
||||||
|
if(influxDB != null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
log.info("Connecting to InfluxDB - " + url);
|
||||||
|
influxDB = InfluxDBFactory.connect(url, username, password);
|
||||||
|
createDatabase();
|
||||||
|
|
||||||
|
// Enable batch writes to get better performance.
|
||||||
|
//BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
|
||||||
|
influxDB.enableBatch(BatchOptions.DEFAULTS);
|
||||||
|
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
|
||||||
|
|
||||||
|
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.error(e.getMessage());
|
||||||
|
throw new Exception(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void logoff() {
|
||||||
|
if(influxDB != null) {
|
||||||
|
influxDB.close();
|
||||||
|
}
|
||||||
|
influxDB = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void createDatabase() {
|
||||||
|
// Create our database... with a default retention of 156w == 3 years
|
||||||
|
influxDB.query(new Query("CREATE DATABASE " + database + " WITH DURATION 156w"));
|
||||||
|
influxDB.setDatabase(database);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void writeBatchPoints() throws Exception {
|
||||||
|
log.debug("writeBatchPoints()");
|
||||||
|
try {
|
||||||
|
influxDB.write(batchPoints);
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.error("writeBatchPoints() error - " + e.getMessage());
|
||||||
|
logoff();
|
||||||
|
login();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
Managed System
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
void writeManagedSystem(ManagedSystem system) {
|
||||||
|
|
||||||
|
if(system.metrics == null) {
|
||||||
|
log.warn("writeManagedSystem() - null metrics, skipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Instant timestamp = system.getTimestamp();
|
||||||
|
if(timestamp == null) {
|
||||||
|
log.warn("writeManagedSystem() - no timestamp, skipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
||||||
|
|
||||||
|
getSystemMemory(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemProcessor(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemSharedProcessorPools(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemSharedAdapters(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemFiberChannelAdapters(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getMemoryMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemMemory");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemProcessor(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getProcessorMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemProcessor");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getSharedProcessorPools();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemSharedAdapters(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getSystemSharedAdapters();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getSystemFiberChannelAdapters();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getSystemGenericPhysicalAdapters();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = system.getSystemGenericVirtualAdapters();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
Logical Partitions
|
||||||
|
*/
|
||||||
|
|
||||||
|
void writeLogicalPartition(LogicalPartition partition) {
|
||||||
|
|
||||||
|
if(partition.metrics == null) {
|
||||||
|
log.warn("writeLogicalPartition() - null metrics, skipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Instant timestamp = partition.getTimestamp();
|
||||||
|
if(timestamp == null) {
|
||||||
|
log.warn("writeLogicalPartition() - no timestamp, skipping");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
||||||
|
|
||||||
|
getPartitionAffinityScore(partition, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getPartitionMemory(partition, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getPartitionProcessor(partition, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> {
|
||||||
|
batchPoints.point(it);
|
||||||
|
});
|
||||||
|
|
||||||
|
//influxDB.write(batchPoints);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = partition.getAffinityScore();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = partition.getMemoryMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "PartitionMemory");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getPartitionProcessor(LogicalPartition partition, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = partition.getProcessorMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "PartitionProcessor");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = partition.getVirtualEthernetAdapterMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Point> getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) {
|
||||||
|
List<Measurement> metrics = partition.getVirtualFiberChannelAdaptersMetrics();
|
||||||
|
return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
Shared
|
||||||
|
*/
|
||||||
|
|
||||||
|
private static List<Point> processMeasurementMap(List<Measurement> measurements, Instant timestamp, String measurement) {
|
||||||
|
|
||||||
|
List<Point> listOfPoints = new ArrayList<>();
|
||||||
|
measurements.forEach( m -> {
|
||||||
|
|
||||||
|
// Iterate fields
|
||||||
|
//Map<String, BigDecimal> fieldsMap = m.get("fields");
|
||||||
|
m.fields.forEach((fieldName, fieldValue) -> {
|
||||||
|
log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue);
|
||||||
|
|
||||||
|
Point.Builder builder = Point.measurement(measurement)
|
||||||
|
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
|
||||||
|
.tag("name", fieldName)
|
||||||
|
.addField("value", fieldValue);
|
||||||
|
|
||||||
|
// For each field, we add all tags
|
||||||
|
//Map<String, String> tagsMap = m.get("tags");
|
||||||
|
m.tags.forEach((tagName, tagValue) -> {
|
||||||
|
builder.tag(tagName, tagValue);
|
||||||
|
log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
listOfPoints.add(builder.build());
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return listOfPoints;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
194
src/main/java/biz/nellemann/hmci/Insights.java
Normal file
194
src/main/java/biz/nellemann/hmci/Insights.java
Normal file
|
@ -0,0 +1,194 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
import static java.lang.Thread.*;
|
||||||
|
|
||||||
|
class Insights {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(Insights.class);
|
||||||
|
|
||||||
|
final Configuration configuration;
|
||||||
|
|
||||||
|
InfluxClient influxClient;
|
||||||
|
final Map<String, HmcClient> hmcClients = new HashMap<>();
|
||||||
|
final Map<String,ManagedSystem> systems = new HashMap<>();
|
||||||
|
final Map<String, LogicalPartition> partitions = new HashMap<>();
|
||||||
|
|
||||||
|
|
||||||
|
Insights(Configuration configuration) {
|
||||||
|
this.configuration = configuration;
|
||||||
|
|
||||||
|
try {
|
||||||
|
influxClient = new InfluxClient(configuration.influx);
|
||||||
|
influxClient.login();
|
||||||
|
} catch(Exception e) {
|
||||||
|
System.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initial scan
|
||||||
|
discover();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void discover() {
|
||||||
|
|
||||||
|
configuration.hmc.forEach( configHmc -> {
|
||||||
|
if(hmcClients != null && !hmcClients.containsKey(configHmc.name)) {
|
||||||
|
HmcClient hmcClient = new HmcClient(configHmc);
|
||||||
|
hmcClients.put(configHmc.name, hmcClient);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
hmcClients.forEach(( hmcId, hmcClient) -> {
|
||||||
|
|
||||||
|
try {
|
||||||
|
hmcClient.login();
|
||||||
|
hmcClient.getManagedSystems().forEach((systemId, system) -> {
|
||||||
|
|
||||||
|
// Add to list of known systems
|
||||||
|
systems.putIfAbsent(systemId, system);
|
||||||
|
|
||||||
|
// Get LPAR's for this system
|
||||||
|
try {
|
||||||
|
hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
|
||||||
|
|
||||||
|
// Add to list of known partitions
|
||||||
|
partitions.putIfAbsent(partitionId, partition);
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("discover()", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.error("discover() - " + hmcId + " error: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void getMetricsForSystems() {
|
||||||
|
|
||||||
|
systems.forEach((systemId, system) -> {
|
||||||
|
|
||||||
|
HmcClient hmcClient = hmcClients.get(system.hmcId);
|
||||||
|
|
||||||
|
// Get and process metrics for this system
|
||||||
|
String tmpJsonString = null;
|
||||||
|
try {
|
||||||
|
tmpJsonString = hmcClient.getPcmDataForManagedSystem(system);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("getMetricsForSystems()", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(tmpJsonString != null && !tmpJsonString.isEmpty()) {
|
||||||
|
system.processMetrics(tmpJsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void getMetricsForPartitions() {
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Get LPAR's for this system
|
||||||
|
partitions.forEach((partitionId, partition) -> {
|
||||||
|
|
||||||
|
HmcClient hmcClient = hmcClients.get(partition.system.hmcId);
|
||||||
|
|
||||||
|
// Get and process metrics for this partition
|
||||||
|
String tmpJsonString2 = null;
|
||||||
|
try {
|
||||||
|
tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("getMetricsForPartitions() - getPcmDataForLogicalPartition", e);
|
||||||
|
}
|
||||||
|
if(tmpJsonString2 != null && !tmpJsonString2.isEmpty()) {
|
||||||
|
partition.processMetrics(tmpJsonString2);
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.error("getMetricsForPartitions()", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void writeMetricsForManagedSystems() {
|
||||||
|
systems.forEach((systemId, system) -> {
|
||||||
|
influxClient.writeManagedSystem(system);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void writeMetricsForLogicalPartitions() {
|
||||||
|
partitions.forEach((partitionId, partition) -> {
|
||||||
|
influxClient.writeLogicalPartition(partition);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void run() throws InterruptedException {
|
||||||
|
|
||||||
|
log.debug("run()");
|
||||||
|
int executions = 0;
|
||||||
|
AtomicBoolean keepRunning = new AtomicBoolean(true);
|
||||||
|
|
||||||
|
Thread shutdownHook = new Thread(() -> keepRunning.set(false));
|
||||||
|
Runtime.getRuntime().addShutdownHook(shutdownHook);
|
||||||
|
|
||||||
|
do {
|
||||||
|
|
||||||
|
try {
|
||||||
|
getMetricsForSystems();
|
||||||
|
getMetricsForPartitions();
|
||||||
|
|
||||||
|
writeMetricsForManagedSystems();
|
||||||
|
writeMetricsForLogicalPartitions();
|
||||||
|
influxClient.writeBatchPoints();
|
||||||
|
|
||||||
|
// Refresh HMC's
|
||||||
|
if (executions > configuration.rescan) {
|
||||||
|
executions = 0;
|
||||||
|
discover();
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("run()", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
executions++;
|
||||||
|
sleep(configuration.refresh * 1000);
|
||||||
|
|
||||||
|
} while (keepRunning.get());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
225
src/main/java/biz/nellemann/hmci/LogicalPartition.java
Normal file
225
src/main/java/biz/nellemann/hmci/LogicalPartition.java
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
class LogicalPartition extends MetaSystem {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(LogicalPartition.class);
|
||||||
|
|
||||||
|
public final String id;
|
||||||
|
public final String name;
|
||||||
|
public final String type;
|
||||||
|
public final ManagedSystem system;
|
||||||
|
|
||||||
|
|
||||||
|
LogicalPartition(String id, String name, String type, ManagedSystem system) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
this.type = type;
|
||||||
|
this.system = system;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return String.format("[%s] %s (%s)", id, name, type);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getAffinityScore() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
Map<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", system.name);
|
||||||
|
put("partition", name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getAffinityScore() - tags: " + tagsMap.toString());
|
||||||
|
Map<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("affinityScore", metrics.systemUtil.sample.lparsUtil.affinityScore);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getAffinityScore() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getMemoryMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
Map<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", system.name);
|
||||||
|
put("partition", name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
Map<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("logicalMem", metrics.systemUtil.sample.lparsUtil.memory.logicalMem);
|
||||||
|
put("backedPhysicalMem", metrics.systemUtil.sample.lparsUtil.memory.backedPhysicalMem);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
//@CompileDynamic
|
||||||
|
List<Measurement> getProcessorMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", system.name);
|
||||||
|
put("partition", name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("utilizedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits);
|
||||||
|
put("maxVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.maxVirtualProcessors);
|
||||||
|
put("currentVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.currentVirtualProcessors);
|
||||||
|
//donatedProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.donatedProcUnits.first(),
|
||||||
|
put("entitledProcUnits", metrics.systemUtil.sample.lparsUtil.processor.entitledProcUnits);
|
||||||
|
//idleProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.idleProcUnits.first(),
|
||||||
|
//maxProcUnits: metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.maxProcUnits.first(),
|
||||||
|
put("utilizedCappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedCappedProcUnits);
|
||||||
|
put("utilizedUncappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedUncappedProcUnits);
|
||||||
|
put("timePerInstructionExecution", metrics.systemUtil.sample.lparsUtil.processor.timeSpentWaitingForDispatch);
|
||||||
|
put("timeSpentWaitingForDispatch", metrics.systemUtil.sample.lparsUtil.processor.timePerInstructionExecution);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
//@CompileDynamic
|
||||||
|
List<Measurement> getVirtualEthernetAdapterMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.forEach( adapter -> {
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", system.name);
|
||||||
|
put("partition", name);
|
||||||
|
put("sea", adapter.sharedEthernetAdapterId);
|
||||||
|
put("viosId", adapter.viosId.toString());
|
||||||
|
put("vlanId", adapter.vlanId.toString());
|
||||||
|
put("vswitchId", adapter.vswitchId.toString());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("receivedPhysicalBytes", adapter.receivedPhysicalBytes);
|
||||||
|
put("sentPhysicalBytes", adapter.sentPhysicalBytes);
|
||||||
|
put("receivedBytes", adapter.receivedBytes);
|
||||||
|
put("sentBytes", adapter.sentBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//PartitionVirtualFiberChannelAdapters
|
||||||
|
//@CompileDynamic
|
||||||
|
List<Measurement> getVirtualFiberChannelAdaptersMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
metrics.systemUtil.sample.lparsUtil.storage.virtualFiberChannelAdapters.forEach( adapter -> {
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", system.name);
|
||||||
|
put("partition", name);
|
||||||
|
put("viosId", adapter.viosId.toString());
|
||||||
|
put("wwpn", adapter.wwpn);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("transmittedBytes", adapter.transmittedBytes.get(0));
|
||||||
|
put("writeBytes", adapter.writeBytes.get(0));
|
||||||
|
put("readBytes", adapter.readBytes.get(0));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
65
src/main/java/biz/nellemann/hmci/Main.java
Normal file
65
src/main/java/biz/nellemann/hmci/Main.java
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
/*
|
||||||
|
Copyright 2020 mark.nellemann@gmail.com
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import picocli.CommandLine;
|
||||||
|
import picocli.CommandLine.Command;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
|
@Command(name = "hmci",
|
||||||
|
mixinStandardHelpOptions = true,
|
||||||
|
description = "HMC Insights.",
|
||||||
|
versionProvider = biz.nellemann.hmci.VersionProvider.class)
|
||||||
|
public class Main implements Callable<Integer> {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(Main.class);
|
||||||
|
|
||||||
|
@SuppressWarnings("FieldMayBeFinal")
|
||||||
|
@CommandLine.Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/hmci.toml'].")
|
||||||
|
private String configurationFile = "/etc/hmci.toml";
|
||||||
|
|
||||||
|
public static void main(String... args) {
|
||||||
|
int exitCode = new CommandLine(new Main()).execute(args);
|
||||||
|
System.exit(exitCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Integer call() throws IOException {
|
||||||
|
|
||||||
|
File file = new File(configurationFile);
|
||||||
|
if(!file.exists()) {
|
||||||
|
System.err.println("Error - No configuration file found at: " + file.toString());
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Configuration configuration = new Configuration(configurationFile);
|
||||||
|
Insights insights = new Insights(configuration);
|
||||||
|
try {
|
||||||
|
insights.run();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
log.error(e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
326
src/main/java/biz/nellemann/hmci/ManagedSystem.java
Normal file
326
src/main/java/biz/nellemann/hmci/ManagedSystem.java
Normal file
|
@ -0,0 +1,326 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
class ManagedSystem extends MetaSystem {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(ManagedSystem.class);
|
||||||
|
|
||||||
|
public final String hmcId;
|
||||||
|
public final String id;
|
||||||
|
public final String name;
|
||||||
|
public final String type;
|
||||||
|
public final String model;
|
||||||
|
public final String serialNumber;
|
||||||
|
|
||||||
|
|
||||||
|
ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) {
|
||||||
|
this.hmcId = hmcId;
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
this.type = type;
|
||||||
|
this.model = model;
|
||||||
|
this.serialNumber = serialNumber;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return String.format("[%s] %s (%s-%s %s)", id, name, type, model, serialNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getMemoryMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
Map<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("totalMem", metrics.systemUtil.sample.serverUtil.memory.totalMem);
|
||||||
|
put("availableMem", metrics.systemUtil.sample.serverUtil.memory.availableMem);
|
||||||
|
put("configurableMem", metrics.systemUtil.sample.serverUtil.memory.configurableMem);
|
||||||
|
put("assignedMemToLpars", metrics.systemUtil.sample.serverUtil.memory.assignedMemToLpars);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getProcessorMetrics() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
//Map<String, Map> map = new HashMap<>()
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
//measurement.tags = tagsMap;
|
||||||
|
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("totalProcUnits", metrics.systemUtil.sample.serverUtil.processor.totalProcUnits);
|
||||||
|
put("utilizedProcUnits", metrics.systemUtil.sample.serverUtil.processor.utilizedProcUnits);
|
||||||
|
put("availableProcUnits", metrics.systemUtil.sample.serverUtil.processor.availableProcUnits);
|
||||||
|
put("configurableProcUnits", metrics.systemUtil.sample.serverUtil.processor.configurableProcUnits);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
//measurement.fields = fieldsMap;
|
||||||
|
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getSharedProcessorPools() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
metrics.systemUtil.sample.serverUtil.sharedProcessorPool.forEach(adapter -> {
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", name);
|
||||||
|
put("pool", adapter.name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("assignedProcUnits", adapter.assignedProcUnits);
|
||||||
|
put("availableProcUnits", adapter.availableProcUnits);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
Measurement measurement = new Measurement(tagsMap, fieldsMap);
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getSystemSharedAdapters() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
metrics.systemUtil.sample.viosUtil.forEach(vios -> {
|
||||||
|
|
||||||
|
vios.network.sharedAdapters.forEach(adapter -> {
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
Measurement measurement = new Measurement();
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("system", name);
|
||||||
|
put("type", adapter.type);
|
||||||
|
put("vios", vios.name);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
measurement.tags = tagsMap;
|
||||||
|
log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("sentBytes", adapter.sentBytes);
|
||||||
|
put("receivedBytes", adapter.receivedBytes);
|
||||||
|
put("transferredBytes", adapter.transferredBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
measurement.fields = fieldsMap;
|
||||||
|
log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getSystemFiberChannelAdapters() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
metrics.systemUtil.sample.viosUtil.forEach( vios -> {
|
||||||
|
log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name);
|
||||||
|
|
||||||
|
vios.storage.fiberChannelAdapters.forEach( adapter -> {
|
||||||
|
//HashMap<String, Map> map = new HashMap<>()
|
||||||
|
Measurement measurement = new Measurement();
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("id", adapter.id);
|
||||||
|
put("system", name);
|
||||||
|
put("wwpn", adapter.wwpn);
|
||||||
|
put("vios", vios.name);
|
||||||
|
put("device", adapter.physicalLocation);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
measurement.tags = tagsMap;
|
||||||
|
log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("writeBytes", adapter.writeBytes);
|
||||||
|
put("readBytes", adapter.readBytes);
|
||||||
|
put("transmittedBytes", adapter.transmittedBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
measurement.fields = fieldsMap;
|
||||||
|
log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getSystemGenericPhysicalAdapters() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
|
metrics.systemUtil.sample.viosUtil.forEach( vios -> {
|
||||||
|
|
||||||
|
vios.storage.genericPhysicalAdapters.forEach( adapter -> {
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
Measurement measurement = new Measurement();
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("id", adapter.id);
|
||||||
|
put("system", name);
|
||||||
|
put("vios", vios.name);
|
||||||
|
put("device", adapter.physicalLocation);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
measurement.tags = tagsMap;
|
||||||
|
log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("writeBytes", adapter.writeBytes);
|
||||||
|
put("readBytes", adapter.readBytes);
|
||||||
|
put("transmittedBytes", adapter.transmittedBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
measurement.fields = fieldsMap;
|
||||||
|
log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
List<Measurement> getSystemGenericVirtualAdapters() {
|
||||||
|
|
||||||
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
|
metrics.systemUtil.sample.viosUtil.forEach( vios -> {
|
||||||
|
|
||||||
|
vios.storage.genericVirtualAdapters.forEach( adapter -> {
|
||||||
|
|
||||||
|
//Map<String, Map> map = new HashMap<String, Map>()
|
||||||
|
Measurement measurement = new Measurement();
|
||||||
|
|
||||||
|
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put("id", adapter.id);
|
||||||
|
put("system", name);
|
||||||
|
put("vios", vios.name);
|
||||||
|
put("device", adapter.physicalLocation);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("tags", tagsMap)
|
||||||
|
measurement.tags = tagsMap;
|
||||||
|
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString());
|
||||||
|
|
||||||
|
HashMap<String, Number> fieldsMap = new HashMap<String, Number>() {
|
||||||
|
{
|
||||||
|
put("writeBytes", adapter.writeBytes);
|
||||||
|
put("readBytes", adapter.readBytes);
|
||||||
|
put("transmittedBytes", adapter.transmittedBytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//map.put("fields", fieldsMap)
|
||||||
|
measurement.fields = fieldsMap;
|
||||||
|
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
|
list.add(measurement);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
17
src/main/java/biz/nellemann/hmci/Measurement.java
Normal file
17
src/main/java/biz/nellemann/hmci/Measurement.java
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class Measurement {
|
||||||
|
|
||||||
|
Map<String, String> tags;
|
||||||
|
Map<String, Number> fields;
|
||||||
|
|
||||||
|
Measurement() {
|
||||||
|
}
|
||||||
|
|
||||||
|
Measurement(Map<String, String> tags, Map<String, Number> fields) {
|
||||||
|
this.tags = tags;
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
}
|
108
src/main/java/biz/nellemann/hmci/MetaSystem.java
Normal file
108
src/main/java/biz/nellemann/hmci/MetaSystem.java
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import biz.nellemann.hmci.pcm.PcmData;
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
import com.squareup.moshi.FromJson;
|
||||||
|
import com.squareup.moshi.JsonAdapter;
|
||||||
|
import com.squareup.moshi.Moshi;
|
||||||
|
import com.squareup.moshi.ToJson;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.time.format.DateTimeParseException;
|
||||||
|
|
||||||
|
abstract class MetaSystem {
|
||||||
|
|
||||||
|
private final static Logger log = LoggerFactory.getLogger(MetaSystem.class);
|
||||||
|
|
||||||
|
private final JsonAdapter<PcmData> jsonAdapter;
|
||||||
|
|
||||||
|
protected PcmData metrics;
|
||||||
|
|
||||||
|
MetaSystem() {
|
||||||
|
try {
|
||||||
|
Moshi moshi = new Moshi.Builder().add(new NumberAdapter()).add(new BigDecimalAdapter()).add(FirstElement.ADAPTER_FACTORY).build();
|
||||||
|
jsonAdapter = moshi.adapter(PcmData.class);
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("MetaSystem() error", e);
|
||||||
|
throw new ExceptionInInitializerError(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//@CompileDynamic
|
||||||
|
void processMetrics(String json) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
metrics = jsonAdapter.fromJson(json);
|
||||||
|
} catch(Exception e) {
|
||||||
|
log.warn("processMetrics() error", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Map pcmMap = new JsonSlurper().parseText(json) as Map
|
||||||
|
//metrics = new PcmData(pcmMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
//@CompileDynamic
|
||||||
|
Instant getTimestamp() {
|
||||||
|
|
||||||
|
String timestamp = metrics.systemUtil.sample.sampleInfo.timeStamp;
|
||||||
|
Instant instant = Instant.now();
|
||||||
|
try {
|
||||||
|
log.debug("getTimeStamp() - PMC Timestamp: " + timestamp);
|
||||||
|
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
|
||||||
|
instant = Instant.from(dateTimeFormatter.parse(timestamp));
|
||||||
|
log.debug("getTimestamp() - Instant: " + instant.toString());
|
||||||
|
} catch(DateTimeParseException e) {
|
||||||
|
log.warn("getTimestamp() - parse error: " + timestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
return instant;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static class BigDecimalAdapter {
|
||||||
|
|
||||||
|
@FromJson
|
||||||
|
BigDecimal fromJson(String string) {
|
||||||
|
return new BigDecimal(string);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ToJson
|
||||||
|
String toJson(BigDecimal value) {
|
||||||
|
return value.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static class NumberAdapter {
|
||||||
|
|
||||||
|
@FromJson
|
||||||
|
Number fromJson(String string) {
|
||||||
|
return new Double(string);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ToJson
|
||||||
|
String toJson(Number value) {
|
||||||
|
return value.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
19
src/main/java/biz/nellemann/hmci/VersionProvider.java
Normal file
19
src/main/java/biz/nellemann/hmci/VersionProvider.java
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import picocli.CommandLine;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.jar.Attributes;
|
||||||
|
import java.util.jar.Manifest;
|
||||||
|
|
||||||
|
class VersionProvider implements CommandLine.IVersionProvider {
|
||||||
|
|
||||||
|
public String[] getVersion() throws IOException {
|
||||||
|
|
||||||
|
Manifest manifest = new Manifest(getClass().getResourceAsStream("/META-INF/MANIFEST.MF"));
|
||||||
|
Attributes attrs = manifest.getMainAttributes();
|
||||||
|
|
||||||
|
return new String[] { "${COMMAND-FULL-NAME} " + attrs.getValue("Build-Version") };
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class FiberChannelAdapter {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String wwpn;
|
||||||
|
public String physicalLocation;
|
||||||
|
public Integer numOfPorts;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfReads;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfWrites;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number readBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number writeBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number runningSpeed;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transmittedBytes;
|
||||||
|
|
||||||
|
}
|
17
src/main/java/biz/nellemann/hmci/pcm/GenericAdapter.java
Normal file
17
src/main/java/biz/nellemann/hmci/pcm/GenericAdapter.java
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class GenericAdapter {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String type;
|
||||||
|
public String physicalLocation;
|
||||||
|
public List<Number> receivedPackets;
|
||||||
|
public List<Number> sentPackets;
|
||||||
|
public List<Number> droppedPackets;
|
||||||
|
public List<Number> sentBytes;
|
||||||
|
public List<Number> receivedBytes;
|
||||||
|
public List<Number> transferredBytes;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class GenericPhysicalAdapters {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String type;
|
||||||
|
public String physicalLocation;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfReads;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfWrites;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number readBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number writeBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transmittedBytes;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
|
||||||
|
public class GenericVirtualAdapter {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String type;
|
||||||
|
public Integer viosId;
|
||||||
|
public String physicalLocation;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfReads;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number numOfWrites;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number readBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number writeBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transmittedBytes;
|
||||||
|
|
||||||
|
}
|
13
src/main/java/biz/nellemann/hmci/pcm/LparMemory.java
Normal file
13
src/main/java/biz/nellemann/hmci/pcm/LparMemory.java
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class LparMemory {
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number logicalMem;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number backedPhysicalMem;
|
||||||
|
|
||||||
|
}
|
44
src/main/java/biz/nellemann/hmci/pcm/LparProcessor.java
Normal file
44
src/main/java/biz/nellemann/hmci/pcm/LparProcessor.java
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class LparProcessor {
|
||||||
|
|
||||||
|
public Integer poolId;
|
||||||
|
public Integer weight;
|
||||||
|
public String mode;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number maxVirtualProcessors;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number currentVirtualProcessors;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number maxProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number entitledProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedCappedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedUncappedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number idleProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number donatedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number timeSpentWaitingForDispatch;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number timePerInstructionExecution;
|
||||||
|
|
||||||
|
}
|
18
src/main/java/biz/nellemann/hmci/pcm/LparUtil.java
Normal file
18
src/main/java/biz/nellemann/hmci/pcm/LparUtil.java
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
public class LparUtil {
|
||||||
|
|
||||||
|
public Integer id;
|
||||||
|
public String uuid;
|
||||||
|
public String name;
|
||||||
|
public String state;
|
||||||
|
public String type;
|
||||||
|
public String osType;
|
||||||
|
public Number affinityScore;
|
||||||
|
|
||||||
|
public LparMemory memory;
|
||||||
|
public LparProcessor processor;
|
||||||
|
public Network network = new Network();
|
||||||
|
public Storage storage = new Storage();
|
||||||
|
|
||||||
|
}
|
12
src/main/java/biz/nellemann/hmci/pcm/Network.java
Normal file
12
src/main/java/biz/nellemann/hmci/pcm/Network.java
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class Network {
|
||||||
|
|
||||||
|
public List<GenericAdapter> genericAdapters = new ArrayList<>();
|
||||||
|
public List<SharedAdapter> sharedAdapters = new ArrayList<>();
|
||||||
|
public List<VirtualEthernetAdapter> virtualEthernetAdapters = new ArrayList<>();
|
||||||
|
|
||||||
|
}
|
7
src/main/java/biz/nellemann/hmci/pcm/PcmData.java
Normal file
7
src/main/java/biz/nellemann/hmci/pcm/PcmData.java
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
public class PcmData {
|
||||||
|
|
||||||
|
public SystemUtil systemUtil;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class PhysicalProcessorPool {
|
||||||
|
|
||||||
|
public List<Number> assignedProcUnits;
|
||||||
|
public List<Number> utilizedProcUnits;
|
||||||
|
public List<Number> availableProcUnits;
|
||||||
|
public List<Number> configuredProcUnits;
|
||||||
|
public List<Number> borrowedProcUnits;
|
||||||
|
|
||||||
|
}
|
8
src/main/java/biz/nellemann/hmci/pcm/SampleInfo.java
Normal file
8
src/main/java/biz/nellemann/hmci/pcm/SampleInfo.java
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
public class SampleInfo {
|
||||||
|
|
||||||
|
public String timeStamp;
|
||||||
|
public Integer status;
|
||||||
|
|
||||||
|
}
|
19
src/main/java/biz/nellemann/hmci/pcm/ServerMemory.java
Normal file
19
src/main/java/biz/nellemann/hmci/pcm/ServerMemory.java
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class ServerMemory {
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number totalMem;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number availableMem;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number configurableMem;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number assignedMemToLpars;
|
||||||
|
|
||||||
|
}
|
19
src/main/java/biz/nellemann/hmci/pcm/ServerProcessor.java
Normal file
19
src/main/java/biz/nellemann/hmci/pcm/ServerProcessor.java
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class ServerProcessor {
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number totalProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number availableProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number configurableProcUnits;
|
||||||
|
|
||||||
|
}
|
12
src/main/java/biz/nellemann/hmci/pcm/ServerUtil.java
Normal file
12
src/main/java/biz/nellemann/hmci/pcm/ServerUtil.java
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class ServerUtil {
|
||||||
|
|
||||||
|
public ServerProcessor processor;
|
||||||
|
public ServerMemory memory;
|
||||||
|
public PhysicalProcessorPool physicalProcessorPool;
|
||||||
|
public List<SharedProcessorPool> sharedProcessorPool;
|
||||||
|
|
||||||
|
}
|
32
src/main/java/biz/nellemann/hmci/pcm/SharedAdapter.java
Normal file
32
src/main/java/biz/nellemann/hmci/pcm/SharedAdapter.java
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class SharedAdapter {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String type;
|
||||||
|
public String physicalLocation;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number droppedPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transferredBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public String bridgedAdapters;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class SharedProcessorPool {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String name;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number assignedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number availableProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number configuredProcUnits;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number borrowedProcUnits;
|
||||||
|
|
||||||
|
}
|
14
src/main/java/biz/nellemann/hmci/pcm/Storage.java
Normal file
14
src/main/java/biz/nellemann/hmci/pcm/Storage.java
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class Storage {
|
||||||
|
|
||||||
|
public List<String> clientLpars = new ArrayList<>();
|
||||||
|
public List<GenericPhysicalAdapters> genericPhysicalAdapters = new ArrayList<>();
|
||||||
|
public List<GenericVirtualAdapter> genericVirtualAdapters = new ArrayList<>();
|
||||||
|
public List<FiberChannelAdapter> fiberChannelAdapters = new ArrayList<>();
|
||||||
|
public List<VirtualFiberChannelAdapter> virtualFiberChannelAdapters = new ArrayList<>();
|
||||||
|
|
||||||
|
}
|
14
src/main/java/biz/nellemann/hmci/pcm/SystemUtil.java
Normal file
14
src/main/java/biz/nellemann/hmci/pcm/SystemUtil.java
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
import com.squareup.moshi.Json;
|
||||||
|
|
||||||
|
public class SystemUtil {
|
||||||
|
|
||||||
|
public UtilInfo utilInfo;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
@Json(name = "utilSamples")
|
||||||
|
public UtilSample sample;
|
||||||
|
|
||||||
|
}
|
17
src/main/java/biz/nellemann/hmci/pcm/UtilInfo.java
Normal file
17
src/main/java/biz/nellemann/hmci/pcm/UtilInfo.java
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class UtilInfo {
|
||||||
|
|
||||||
|
public String version;
|
||||||
|
public String metricType;
|
||||||
|
public Integer frequency;
|
||||||
|
public String startTimeStamp;
|
||||||
|
public String endTimeStamp;
|
||||||
|
public String mtms;
|
||||||
|
public String name;
|
||||||
|
public String uuid;
|
||||||
|
public List<String> metricArrayOrder;
|
||||||
|
|
||||||
|
}
|
28
src/main/java/biz/nellemann/hmci/pcm/UtilSample.java
Normal file
28
src/main/java/biz/nellemann/hmci/pcm/UtilSample.java
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class UtilSample {
|
||||||
|
|
||||||
|
public String sampleType;
|
||||||
|
public SampleInfo sampleInfo;
|
||||||
|
public ServerUtil serverUtil;
|
||||||
|
public List<ViosUtil> viosUtil = new ArrayList<>();
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public LparUtil lparsUtil;
|
||||||
|
|
||||||
|
/*
|
||||||
|
public LparUtil getLparsUtil() {
|
||||||
|
if(lparsUtil == null || lparsUtil.isEmpty()) {
|
||||||
|
return new LparUtil();
|
||||||
|
} else {
|
||||||
|
return lparsUtil.get(0);
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
|
||||||
|
}
|
13
src/main/java/biz/nellemann/hmci/pcm/ViosMemory.java
Normal file
13
src/main/java/biz/nellemann/hmci/pcm/ViosMemory.java
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class ViosMemory {
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number assignedMem;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number utilizedMem;
|
||||||
|
|
||||||
|
}
|
16
src/main/java/biz/nellemann/hmci/pcm/ViosUtil.java
Normal file
16
src/main/java/biz/nellemann/hmci/pcm/ViosUtil.java
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
public class ViosUtil {
|
||||||
|
|
||||||
|
public String id;
|
||||||
|
public String uuid;
|
||||||
|
public String name;
|
||||||
|
public String state;
|
||||||
|
public Integer affinityScore;
|
||||||
|
|
||||||
|
public ViosMemory memory;
|
||||||
|
public LparProcessor processor;
|
||||||
|
public Network network;
|
||||||
|
public Storage storage;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import com.serjltt.moshi.adapters.FirstElement;
|
||||||
|
|
||||||
|
public class VirtualEthernetAdapter {
|
||||||
|
|
||||||
|
public String physicalLocation;
|
||||||
|
public Integer vlanId;
|
||||||
|
public Integer vswitchId;
|
||||||
|
public Boolean isPortVlanId;
|
||||||
|
public Integer viosId;
|
||||||
|
public String sharedEthernetAdapterId;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number droppedPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedPhysicalPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentPhysicalPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number droppedPhysicalPackets;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number sentPhysicalBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number receivedPhysicalBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transferredBytes;
|
||||||
|
|
||||||
|
@FirstElement
|
||||||
|
public Number transferredPhysicalBytes;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,19 @@
|
||||||
|
package biz.nellemann.hmci.pcm;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class VirtualFiberChannelAdapter {
|
||||||
|
|
||||||
|
public String wwpn;
|
||||||
|
public String wwpn2;
|
||||||
|
public String physicalLocation;
|
||||||
|
public String physicalPortWWPN;
|
||||||
|
public Integer viosId;
|
||||||
|
public List<Number> numOfReads;
|
||||||
|
public List<Number> numOfWrites;
|
||||||
|
public List<Number> readBytes;
|
||||||
|
public List<Number> writeBytes;
|
||||||
|
public List<Number> runningSpeed;
|
||||||
|
public List<Number> transmittedBytes;
|
||||||
|
|
||||||
|
}
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
|
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
|
||||||
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
|
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{16} - %msg%n</pattern>
|
||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
|
|
30
src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy
Normal file
30
src/test/groovy/biz/nellemann/hmci/ConfigurationTest.groovy
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
package biz.nellemann.hmci
|
||||||
|
|
||||||
|
import spock.lang.Specification
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationTest extends Specification {
|
||||||
|
|
||||||
|
String testConfigurationFile = new File(getClass().getResource('/hmci.toml').toURI()).absolutePath
|
||||||
|
|
||||||
|
void "test parsing"() {
|
||||||
|
|
||||||
|
when:
|
||||||
|
Configuration conf = new Configuration(testConfigurationFile)
|
||||||
|
|
||||||
|
then:
|
||||||
|
conf != null
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
void "test lookup influx"() {
|
||||||
|
|
||||||
|
when:
|
||||||
|
Configuration conf = new Configuration(testConfigurationFile)
|
||||||
|
|
||||||
|
then:
|
||||||
|
conf != null
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -7,12 +7,17 @@ import spock.lang.Specification
|
||||||
class HmcClientTest extends Specification {
|
class HmcClientTest extends Specification {
|
||||||
|
|
||||||
HmcClient hmc
|
HmcClient hmc
|
||||||
MockWebServer mockServer = new MockWebServer();
|
MockWebServer mockServer = new MockWebServer()
|
||||||
|
|
||||||
|
|
||||||
def setup() {
|
def setup() {
|
||||||
mockServer.start();
|
mockServer.start()
|
||||||
hmc = new HmcClient("site", mockServer.url("/").toString(), "testUser", "testPassword")
|
Configuration.HmcObject configHmc = new Configuration.HmcObject()
|
||||||
|
configHmc.name = "site1"
|
||||||
|
configHmc.url = mockServer.url("/").toString()
|
||||||
|
configHmc.username = "testUser"
|
||||||
|
configHmc.password = "testPassword"
|
||||||
|
hmc = new HmcClient(configHmc)
|
||||||
hmc.authToken = "blaBla"
|
hmc.authToken = "blaBla"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,7 +29,7 @@ class HmcClientTest extends Specification {
|
||||||
void "test against empty xml"() {
|
void "test against empty xml"() {
|
||||||
setup:
|
setup:
|
||||||
def testXml = ""
|
def testXml = ""
|
||||||
mockServer.enqueue(new MockResponse().setBody(testXml));
|
mockServer.enqueue(new MockResponse().setBody(testXml))
|
||||||
|
|
||||||
when:
|
when:
|
||||||
Map<String, ManagedSystem> systems = hmc.getManagedSystems()
|
Map<String, ManagedSystem> systems = hmc.getManagedSystems()
|
||||||
|
@ -38,7 +43,7 @@ class HmcClientTest extends Specification {
|
||||||
setup:
|
setup:
|
||||||
def testFile = new File(getClass().getResource('/managed-systems.xml').toURI())
|
def testFile = new File(getClass().getResource('/managed-systems.xml').toURI())
|
||||||
def testXml = testFile.getText('UTF-8')
|
def testXml = testFile.getText('UTF-8')
|
||||||
mockServer.enqueue(new MockResponse().setBody(testXml));
|
mockServer.enqueue(new MockResponse().setBody(testXml))
|
||||||
|
|
||||||
when:
|
when:
|
||||||
Map<String, ManagedSystem> systems = hmc.getManagedSystems()
|
Map<String, ManagedSystem> systems = hmc.getManagedSystems()
|
||||||
|
@ -53,7 +58,7 @@ class HmcClientTest extends Specification {
|
||||||
setup:
|
setup:
|
||||||
def testFile = new File(getClass().getResource('/logical-partitions.xml').toURI())
|
def testFile = new File(getClass().getResource('/logical-partitions.xml').toURI())
|
||||||
def testXml = testFile.getText('UTF-8')
|
def testXml = testFile.getText('UTF-8')
|
||||||
mockServer.enqueue(new MockResponse().setBody(testXml));
|
mockServer.enqueue(new MockResponse().setBody(testXml))
|
||||||
|
|
||||||
when:
|
when:
|
||||||
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
|
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
|
||||||
|
@ -70,7 +75,7 @@ class HmcClientTest extends Specification {
|
||||||
setup:
|
setup:
|
||||||
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
|
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
|
||||||
def testJson = testFile.getText('UTF-8')
|
def testJson = testFile.getText('UTF-8')
|
||||||
mockServer.enqueue(new MockResponse().setBody(testJson));
|
mockServer.enqueue(new MockResponse().setBody(testJson))
|
||||||
|
|
||||||
when:
|
when:
|
||||||
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/ManagedSystem_e09834d1-c930-3883-bdad-405d8e26e166_20200807T122600+0200_20200807T122600+0200_30.json") as String))
|
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/ManagedSystem_e09834d1-c930-3883-bdad-405d8e26e166_20200807T122600+0200_20200807T122600+0200_30.json") as String))
|
||||||
|
@ -84,7 +89,7 @@ class HmcClientTest extends Specification {
|
||||||
setup:
|
setup:
|
||||||
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
|
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
|
||||||
def testJson = testFile.getText('UTF-8')
|
def testJson = testFile.getText('UTF-8')
|
||||||
mockServer.enqueue(new MockResponse().setBody(testJson));
|
mockServer.enqueue(new MockResponse().setBody(testJson))
|
||||||
|
|
||||||
when:
|
when:
|
||||||
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/LogicalPartition_2DE05DB6-8AD5-448F-8327-0F488D287E82_20200807T123730+0200_20200807T123730+0200_30.json") as String))
|
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/LogicalPartition_2DE05DB6-8AD5-448F-8327-0F488D287E82_20200807T123730+0200_20200807T123730+0200_30.json") as String))
|
||||||
|
|
|
@ -9,7 +9,7 @@ class InfluxClientTest extends Specification {
|
||||||
InfluxClient influxClient
|
InfluxClient influxClient
|
||||||
|
|
||||||
def setup() {
|
def setup() {
|
||||||
influxClient = new InfluxClient("http://localhost:8086", "root", "", "hmci")
|
influxClient = new InfluxClient(new Configuration.InfluxObject("http://localhost:8086", "root", "", "hmci"))
|
||||||
influxClient.login()
|
influxClient.login()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,6 @@ package biz.nellemann.hmci
|
||||||
|
|
||||||
import spock.lang.Specification
|
import spock.lang.Specification
|
||||||
|
|
||||||
class AppTest extends Specification {
|
class InsightsTest extends Specification {
|
||||||
|
|
||||||
}
|
}
|
|
@ -17,9 +17,9 @@ class LogicalPartitionTest extends Specification {
|
||||||
lpar.processMetrics(testJson)
|
lpar.processMetrics(testJson)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().memory.logicalMem.first() == 8192.000
|
lpar.metrics.systemUtil.sample.lparsUtil.memory.logicalMem == 8192.000
|
||||||
lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().processor.utilizedProcUnits.first() == 0.001
|
lpar.metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits == 0.001
|
||||||
lpar.metrics.systemUtil.utilSamples.first().lparsUtil.first().network.virtualEthernetAdapters.first().receivedBytes.first() == 276.467
|
lpar.metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.first().receivedBytes == 276.467
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,12 +34,12 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
lpar.processMetrics(testJson)
|
lpar.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = lpar.getMemoryMetrics()
|
List<Measurement> listOfMeasurements = lpar.getMemoryMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['logicalMem'] == 8192.000
|
listOfMeasurements.first().fields['logicalMem'] == 8192.000
|
||||||
listOfMaps.first().get("tags")['partition'] == '9Flash01'
|
listOfMeasurements.first().tags['partition'] == '9Flash01'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,12 +53,12 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
lpar.processMetrics(testJson)
|
lpar.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = lpar.getProcessorMetrics()
|
List<Measurement> listOfMeasurements = lpar.getProcessorMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['utilizedProcUnits'] == 0.001
|
listOfMeasurements.first().fields['utilizedProcUnits'] == 0.001
|
||||||
listOfMaps.first().get("tags")['partition'] == '9Flash01'
|
listOfMeasurements.first().tags['partition'] == '9Flash01'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,12 +72,12 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
lpar.processMetrics(testJson)
|
lpar.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = lpar.getVirtualEthernetAdapterMetrics()
|
List<Measurement> listOfMeasurements = lpar.getVirtualEthernetAdapterMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['receivedBytes'] == 276.467
|
listOfMeasurements.first().fields['receivedBytes'] == 276.467
|
||||||
listOfMaps.first().get("tags")['sea'] == 'ent5'
|
listOfMeasurements.first().tags['sea'] == 'ent5'
|
||||||
}
|
}
|
||||||
|
|
||||||
void "test getVirtualFiberChannelAdaptersMetrics"() {
|
void "test getVirtualFiberChannelAdaptersMetrics"() {
|
||||||
|
@ -90,12 +90,12 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
lpar.processMetrics(testJson)
|
lpar.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = lpar.getVirtualFiberChannelAdaptersMetrics()
|
List<Measurement> listOfMeasurements = lpar.getVirtualFiberChannelAdaptersMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 4
|
listOfMeasurements.size() == 4
|
||||||
listOfMaps.first().get("fields")['writeBytes'] == 6690.133
|
listOfMeasurements.first().fields['writeBytes'] == 6690.133
|
||||||
listOfMaps.first().get("tags")['viosId'] == '1'
|
listOfMeasurements.first().tags['viosId'] == '1'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,12 +15,12 @@ class ManagedSystemTest extends Specification {
|
||||||
system.processMetrics(testJson)
|
system.processMetrics(testJson)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
system.metrics.systemUtil.utilSamples.first().serverUtil.memory.assignedMemToLpars.first() == 40960.000
|
system.metrics.systemUtil.sample.serverUtil.memory.assignedMemToLpars == 40960.000
|
||||||
system.metrics.systemUtil.utilSamples.first().serverUtil.processor.totalProcUnits.first() == 24.000
|
system.metrics.systemUtil.sample.serverUtil.processor.totalProcUnits == 24.000
|
||||||
system.metrics.systemUtil.utilSamples.first().viosUtil.first().name == "VIOS1"
|
system.metrics.systemUtil.sample.viosUtil.first().name == "VIOS1"
|
||||||
system.metrics.systemUtil.utilSamples.first().viosUtil.first().memory.assignedMem.first() == 8192.000
|
system.metrics.systemUtil.sample.viosUtil.first().memory.assignedMem == 8192.0
|
||||||
system.metrics.systemUtil.utilSamples.first().viosUtil.first().storage.genericPhysicalAdapters.first().transmittedBytes.first() == 9966.933
|
system.metrics.systemUtil.sample.viosUtil.first().storage.genericPhysicalAdapters.first().transmittedBytes == 9966.933
|
||||||
system.metrics.systemUtil.utilSamples.first().viosUtil.first().storage.fiberChannelAdapters.first().numOfPorts == 3
|
system.metrics.systemUtil.sample.viosUtil.first().storage.fiberChannelAdapters.first().numOfPorts == 3
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,11 +33,11 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
system.processMetrics(testJson)
|
system.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = system.getMemoryMetrics()
|
List<Measurement> listOfMeasurements = system.getMemoryMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['totalMem'] == 1048576.000
|
listOfMeasurements.first().fields['totalMem'] == 1048576.000
|
||||||
}
|
}
|
||||||
|
|
||||||
void "test getProcessorMetrics"() {
|
void "test getProcessorMetrics"() {
|
||||||
|
@ -49,11 +49,11 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
system.processMetrics(testJson)
|
system.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = system.getProcessorMetrics()
|
List<Measurement> listOfMeasurements = system.getProcessorMetrics()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['availableProcUnits'] == 16.000
|
listOfMeasurements.first().fields['availableProcUnits'] == 16.000
|
||||||
}
|
}
|
||||||
|
|
||||||
void "test getSystemSharedProcessorPools"() {
|
void "test getSystemSharedProcessorPools"() {
|
||||||
|
@ -65,11 +65,11 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
system.processMetrics(testJson)
|
system.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = system.getSharedProcessorPools()
|
List<Measurement> listOfMeasurements = system.getSharedProcessorPools()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767
|
listOfMeasurements.first().fields['assignedProcUnits'] == 23.767
|
||||||
}
|
}
|
||||||
|
|
||||||
void "test VIOS data"() {
|
void "test VIOS data"() {
|
||||||
|
@ -80,11 +80,11 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
system.processMetrics(testJson)
|
system.processMetrics(testJson)
|
||||||
List<Map> listOfMaps = system.getSharedProcessorPools()
|
List<Measurement> listOfMeasurements = system.getSharedProcessorPools()
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMaps.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767
|
listOfMeasurements.first().fields['assignedProcUnits'] == 23.767
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
32
src/test/resources/hmci.toml
Normal file
32
src/test/resources/hmci.toml
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# HMCi Configuration
|
||||||
|
|
||||||
|
# How often to query HMC's for data - in seconds
|
||||||
|
hmci.refresh = 30
|
||||||
|
|
||||||
|
# Rescan HMC's for new systems and partitions - every x refresh
|
||||||
|
hmci.rescan = 60
|
||||||
|
|
||||||
|
# InfluxDB to save metrics
|
||||||
|
[influx]
|
||||||
|
url = "http://localhost:8086"
|
||||||
|
username = "root"
|
||||||
|
password = ""
|
||||||
|
database = "hmci"
|
||||||
|
|
||||||
|
|
||||||
|
# One or more HMC's to query for data and metrics
|
||||||
|
[hmc]
|
||||||
|
|
||||||
|
# HMC on our primary site
|
||||||
|
[hmc.site1]
|
||||||
|
url = "https://10.10.10.10:12443"
|
||||||
|
username = "hmci"
|
||||||
|
password = "hmcihmci"
|
||||||
|
unsafe = true # Ignore SSL cert. errors
|
||||||
|
|
||||||
|
# Example
|
||||||
|
#[hmc.site2]
|
||||||
|
#url = "https://10.10.20.20:12443"
|
||||||
|
#username = "viewer"
|
||||||
|
#password = "someSecret"
|
||||||
|
#unsafe = false
|
Loading…
Reference in a new issue