Refactoring work for more Java compatibility and static compilation.

This commit is contained in:
Mark Nellemann 2020-10-09 10:20:50 +02:00
parent c94b01aae9
commit 6a1742e72a
20 changed files with 837 additions and 526 deletions

View file

@ -1,5 +1,4 @@
image: adoptopenjdk:8-openj9
#image: openjdk:8
image: openjdk:8
pipelines:
branches:

View file

@ -1,8 +1,6 @@
plugins {
// Apply the groovy plugin to add support for Groovy
id 'java'
id 'groovy'
// Apply the application plugin to add support for building a CLI application.
id 'application'
// Code coverage of tests
@ -18,6 +16,9 @@ repositories {
}
dependencies {
implementation 'info.picocli:picocli:4.5.1'
annotationProcessor 'info.picocli:picocli-codegen:4.5.1'
implementation 'org.tomlj:tomlj:1.0.0'
implementation 'org.codehaus.groovy:groovy-all:3.0.5'
implementation 'com.squareup.okhttp3:okhttp:4.8.0'
implementation 'org.influxdb:influxdb-java:2.19'
@ -32,7 +33,7 @@ dependencies {
}
application {
mainClassName = 'biz.nellemann.hmci.App'
mainClassName = 'biz.nellemann.hmci.Main'
}
test {
@ -68,7 +69,6 @@ ospackage {
buildRpm {
dependsOn startShadowScripts
//requires('java-1.8.0-openjdk-headless')
os = LINUX
}
@ -77,7 +77,6 @@ buildDeb {
requires('default-jre-headless')
}
jacoco {
toolVersion = "0.8.5"
}
@ -103,22 +102,16 @@ jacocoTestCoverageVerification {
}
check.dependsOn jacocoTestCoverageVerification
processResources.dependsOn.add("versionFile")
versionFile {
// Path to the file to be written
file = new File(project.buildDir, 'resources/main/version.properties')
}
jar {
manifest {
attributes(
'Built-By' : System.properties['user.name'],
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
'Build-Revision' : versioning.info.commit,
'Created-By' : "Gradle ${gradle.gradleVersion}",
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}",
'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})",
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}"
'Build-User' : System.properties['user.name'],
'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build),
'Build-Revision' : versioning.info.commit,
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
)
}
}

View file

@ -1,246 +0,0 @@
/**
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci
import groovy.cli.picocli.CliBuilder
import groovy.cli.picocli.OptionAccessor
import groovy.util.logging.Slf4j
@Slf4j
class App implements Runnable {
final ConfigObject configuration
final Integer refreshEverySec
final Integer rescanHmcEvery
InfluxClient influxClient
Map<String, HmcClient> hmcClients = new HashMap<>()
Map<String,ManagedSystem> systems = new HashMap<String, ManagedSystem>()
Map<String, LogicalPartition> partitions = new HashMap<String, LogicalPartition>()
App(ConfigObject configuration) {
this.configuration = configuration
log.debug configuration.toString()
refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60
rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15
String influxUrl = configuration.get('influx')['url']
String influxUsername = configuration.get('influx')['username']
String influxPassword = configuration.get('influx')['password']
String influxDatabase = configuration.get('influx')['database']
try {
influxClient = new InfluxClient(influxUrl, influxUsername, influxPassword, influxDatabase)
influxClient.login()
} catch(Exception e) {
System.exit(1)
}
// Initial scan
discover()
run()
}
void discover() {
configuration.get('hmc').each { Object key, Object hmc ->
if(!hmcClients?.containsKey(key)) {
log.info("Adding HMC: " + hmc.toString())
String hmcKey = key
String hmcUrl = hmc['url']
String hmcUsername = hmc['username']
String hmcPassword = hmc['password']
Boolean hmcUnsafe = hmc['unsafe']
HmcClient hmcClient = new HmcClient(hmcKey, hmcUrl, hmcUsername, hmcPassword, hmcUnsafe)
hmcClients.put(hmcKey, hmcClient)
}
}
hmcClients.each { hmcId, hmcClient ->
try {
hmcClient.login()
hmcClient.getManagedSystems().each { systemId, system ->
// Add to list of known systems
systems.putIfAbsent(systemId, system)
// Get LPAR's for this system
hmcClient.getLogicalPartitionsForManagedSystem(system).each { partitionId, partition ->
// Add to list of known partitions
partitions.putIfAbsent(partitionId, partition)
}
}
} catch(Exception e) {
log.error("discover() - " + hmcId + " error: " + e.message)
//hmcClients.remove(hmcId)
}
}
}
void getMetricsForSystems() {
try {
systems.each {systemId, system ->
HmcClient hmcClient = hmcClients.get(system.hmcId)
// Get and process metrics for this system
String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system)
if(tmpJsonString && !tmpJsonString.empty) {
system.processMetrics(tmpJsonString)
}
}
} catch(Exception e) {
log.error(e.message)
}
}
void getMetricsForPartitions() {
try {
// Get LPAR's for this system
partitions.each { partitionId, partition ->
HmcClient hmcClient = hmcClients.get(partition.system.hmcId)
// Get and process metrics for this partition
String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition)
if(tmpJsonString2 && !tmpJsonString2.empty) {
partition.processMetrics(tmpJsonString2)
}
}
} catch(Exception e) {
log.error(e.message)
}
}
void writeMetricsForManagedSystems() {
systems.each {systemId, system ->
influxClient.writeManagedSystem(system)
}
}
void writeMetricsForLogicalPartitions() {
partitions.each {partitionId, partition ->
influxClient.writeLogicalPartition(partition)
}
}
static String getVersion() {
URL url = getClass().getResource("/version.properties");
if (url == null) {
return "No version.txt file found in the classpath."
}
Properties properties = new Properties();
properties.load(url.openStream());
return properties.getProperty("VERSION_GRADLE") + "-" + properties.getProperty("VERSION_BUILD")
}
static void main(String... args) {
def cli = new CliBuilder(name: "hmci")
cli.h(longOpt: 'help', usageHelp: true, 'display usage information')
cli.v(longOpt: 'version', versionHelp: true, 'display version information')
cli.c(longOpt: 'config', args: 1, required: true, paramLabel: "FILE", defaultValue: '/etc/hmci.groovy', 'configuration file')
OptionAccessor options = cli.parse(args)
if (options.h) {
cli.usage()
return
}
if(options.v) {
println("Version " + getVersion())
return
}
ConfigObject configuration
if(options.c) {
File configurationFile = new File((String)options.config)
if(!configurationFile.exists()) {
println("Error - No configuration file found at: " + configurationFile.toString())
System.exit(1)
}
configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL());
}
if(configuration == null || configuration.isEmpty()) {
println("Error - Empty or faulty configuration")
System.exit(1)
}
new App(configuration)
}
@Override
void run() {
log.debug("run()")
boolean keepRunning = true
int executions = 0
while(keepRunning) {
try {
getMetricsForSystems()
getMetricsForPartitions()
writeMetricsForManagedSystems()
writeMetricsForLogicalPartitions()
influxClient.writeBatchPoints()
// Refresh HMC's
if(executions > rescanHmcEvery) {
executions = 0
discover()
}
} catch(Exception e) {
log.error(e.message, e)
}
executions++
Thread.sleep(refreshEverySec * 1000)
}
}
}

View file

@ -15,6 +15,9 @@
*/
package biz.nellemann.hmci
import biz.nellemann.hmci.Configuration.HmcObject
import groovy.transform.CompileDynamic
import groovy.transform.CompileStatic
import groovy.util.logging.Slf4j
import groovy.xml.XmlSlurper
import okhttp3.MediaType
@ -34,6 +37,7 @@ import java.security.cert.CertificateException
import java.security.cert.X509Certificate;
@Slf4j
@CompileStatic
class HmcClient {
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
@ -48,18 +52,21 @@ class HmcClient {
protected String authToken
private final OkHttpClient client
HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) {
this.hmcId = hmcId
this.baseUrl = baseUrl
this.username = username
this.password = password
this.unsafe = unsafe
HmcClient(HmcObject configHmc) {
this.hmcId = configHmc.name
this.baseUrl = configHmc.url
this.username = configHmc.username
this.password = configHmc.password
this.unsafe = configHmc.unsafe
if(unsafe) {
this.client = getUnsafeOkHttpClient()
} else {
this.client = new OkHttpClient()
}
}
@ -69,12 +76,15 @@ class HmcClient {
*
* @throws IOException
*/
//@CompileDynamic
void login(Boolean force = false) throws IOException {
if(authToken && !force) {
return
}
log.info("Connecting to HMC - " + baseUrl);
String payload = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<LogonRequest xmlns="http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/" schemaVersion="V1_0">
@ -96,7 +106,7 @@ class HmcClient {
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
// Get response body and parse
String responseBody = response.body.string()
String responseBody = response.body().string();
response.body().close()
def xml = new XmlSlurper().parseText(responseBody)
@ -144,6 +154,7 @@ class HmcClient {
*
* @return
*/
@CompileDynamic
Map<String, ManagedSystem> getManagedSystems() {
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl))
Response response = getResponse(url)
@ -185,6 +196,7 @@ class HmcClient {
* @param UUID of managed system
* @return
*/
@CompileDynamic
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) {
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id))
Response response = getResponse(url)
@ -225,6 +237,7 @@ class HmcClient {
* @param systemId
* @return
*/
@CompileDynamic
String getPcmDataForManagedSystem(ManagedSystem system) {
log.debug("getPcmDataForManagedSystem() - " + system.id)
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id))
@ -257,6 +270,7 @@ class HmcClient {
* @param partitionId
* @return
*/
@CompileDynamic
String getPcmDataForLogicalPartition(LogicalPartition partition) {
log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id))
@ -305,6 +319,7 @@ class HmcClient {
* @param url
* @return
*/
//@CompileDynamic
private Response getResponse(URL url, Integer retry = 0) {
if(responseErrors > 2) {
@ -324,18 +339,18 @@ class HmcClient {
if (!response.isSuccessful()) {
response.body().close()
if(response.code == 401) {
if(response.code() == 401) {
login(true)
return getResponse(url, retry++)
}
if(retry < 2) {
log.warn("getResponse() - Retrying due to unexpected response: " + response.code)
log.warn("getResponse() - Retrying due to unexpected response: " + response.code())
return getResponse(url, retry++)
}
log.error("getResponse() - Unexpected response: " + response.code)
throw new IOException("getResponse() - Unexpected response: " + response.code)
log.error("getResponse() - Unexpected response: " + response.code())
throw new IOException("getResponse() - Unexpected response: " + response.code())
};
return response

View file

@ -13,63 +13,74 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci
package biz.nellemann.hmci;
import groovy.util.logging.Slf4j
import org.influxdb.BatchOptions
import org.influxdb.InfluxDB
import org.influxdb.InfluxDBFactory
import org.influxdb.dto.BatchPoints
import org.influxdb.dto.Point
import org.influxdb.dto.Query
import biz.nellemann.hmci.Configuration.InfluxObject
import groovy.transform.CompileStatic;
import org.influxdb.BatchOptions;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.Query;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Instant
import java.util.concurrent.TimeUnit
import java.util.concurrent.TimeUnit;
@Slf4j
@CompileStatic
class InfluxClient {
final String url
final String username
final String password
final String database
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
InfluxDB influxDB
BatchPoints batchPoints
final private String url;
final private String username;
final private String password;
final private String database;
private InfluxDB influxDB;
private BatchPoints batchPoints;
InfluxClient(String url, String username, String password, String database) {
this.url = url
this.username = username
this.password = password
this.database = database
InfluxClient(InfluxObject config) {
this.url = config.url;
this.username = config.username;
this.password = config.password;
this.database = config.database;
}
void login() {
if(!influxDB) {
try {
influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase()
void login() throws Exception {
// Enable batch writes to get better performance.
//BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
influxDB.enableBatch(BatchOptions.DEFAULTS);
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
if(influxDB != null) {
return
}
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
try {
log.info("Connecting to InfluxDB - " + url);
influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase();
} catch(Exception e) {
log.error(e.message)
throw new Exception(e)
}
// Enable batch writes to get better performance.
//BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
influxDB.enableBatch(BatchOptions.DEFAULTS);
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
} catch(Exception e) {
log.error(e.getMessage());
throw new Exception(e);
}
}
void logoff() {
influxDB?.close();
influxDB = null
if(influxDB != null) {
influxDB.close();
}
influxDB = null;
}
@ -81,13 +92,13 @@ class InfluxClient {
void writeBatchPoints() {
log.debug("writeBatchPoints()")
log.debug("writeBatchPoints()");
try {
influxDB.write(batchPoints);
} catch(Exception e) {
log.error("writeBatchPoints() error - " + e.message)
logoff()
login()
log.error("writeBatchPoints() error - " + e.getMessage());
logoff();
login();
}
}
@ -101,81 +112,81 @@ class InfluxClient {
void writeManagedSystem(ManagedSystem system) {
if(system.metrics == null) {
log.warn("writeManagedSystem() - null metrics, skipping")
return
log.warn("writeManagedSystem() - null metrics, skipping");
return;
}
Instant timestamp = system.getTimestamp()
if(!timestamp) {
log.warn("writeManagedSystem() - no timestamp, skipping")
return
Instant timestamp = system.getTimestamp();
if(timestamp == null) {
log.warn("writeManagedSystem() - no timestamp, skipping");
return;
}
//BatchPoints batchPoints = BatchPoints.database(database).build();
getSystemMemory(system, timestamp).each {
batchPoints.point(it)
}
getSystemMemory(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemProcessor(system, timestamp).each {
batchPoints.point(it)
}
getSystemProcessor(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemSharedProcessorPools(system, timestamp).each {
batchPoints.point(it)
}
getSystemSharedProcessorPools(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemSharedAdapters(system, timestamp).each {
batchPoints.point(it)
}
getSystemSharedAdapters(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemFiberChannelAdapters(system, timestamp).each {
batchPoints.point(it)
}
getSystemFiberChannelAdapters(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemGenericPhysicalAdapters(system, timestamp).each {
batchPoints.point(it)
}
getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
getSystemGenericVirtualAdapters(system, timestamp).each {
batchPoints.point(it)
}
getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
}
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getMemoryMetrics()
return processMeasurementMap(metrics, timestamp, "SystemMemory")
List<Measurement> metrics = system.getMemoryMetrics();
return processMeasurementMap(metrics, timestamp, "SystemMemory");
}
private static List<Point> getSystemProcessor(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getProcessorMetrics()
return processMeasurementMap(metrics, timestamp, "SystemProcessor")
List<Measurement> metrics = system.getProcessorMetrics();
return processMeasurementMap(metrics, timestamp, "SystemProcessor");
}
private static List<Point> getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSharedProcessorPools()
return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool")
List<Measurement> metrics = system.getSharedProcessorPools();
return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool");
}
private static List<Point> getSystemSharedAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemSharedAdapters()
return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters")
List<Measurement> metrics = system.getSystemSharedAdapters();
return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters");
}
private static List<Point> getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemFiberChannelAdapters()
return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters")
List<Measurement> metrics = system.getSystemFiberChannelAdapters();
return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters");
}
private static List<Point> getSystemGenericPhysicalAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemGenericPhysicalAdapters()
return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters")
List<Measurement> metrics = system.getSystemGenericPhysicalAdapters();
return processMeasurementMap(metrics, timestamp, "SystemGenericPhysicalAdapters");
}
private static List<Point> getSystemGenericVirtualAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemGenericVirtualAdapters()
return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters")
List<Measurement> metrics = system.getSystemGenericVirtualAdapters();
return processMeasurementMap(metrics, timestamp, "SystemGenericVirtualAdapters");
}
@ -186,64 +197,64 @@ class InfluxClient {
void writeLogicalPartition(LogicalPartition partition) {
if(partition.metrics == null) {
log.warn("writeLogicalPartition() - null metrics, skipping")
return
log.warn("writeLogicalPartition() - null metrics, skipping");
return;
}
Instant timestamp = partition.getTimestamp()
if(!timestamp) {
log.warn("writeLogicalPartition() - no timestamp, skipping")
return
Instant timestamp = partition.getTimestamp();
if(timestamp == null) {
log.warn("writeLogicalPartition() - no timestamp, skipping");
return;
}
//BatchPoints batchPoints = BatchPoints.database(database).build();
getPartitionAffinityScore(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionAffinityScore(partition, timestamp).forEach( it -> {
batchPoints.point(it);
});
getPartitionMemory(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionMemory(partition, timestamp).forEach( it -> {
batchPoints.point(it);
});
getPartitionProcessor(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionProcessor(partition, timestamp).forEach( it -> {
batchPoints.point(it);
});
getPartitionVirtualEthernetAdapter(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> {
batchPoints.point(it);
});
getPartitionVirtualFiberChannelAdapter(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> {
batchPoints.point(it);
});
//influxDB.write(batchPoints);
}
private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getAffinityScore()
return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore")
List<Measurement> metrics = partition.getAffinityScore();
return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore");
}
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getMemoryMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionMemory")
List<Measurement> metrics = partition.getMemoryMetrics();
return processMeasurementMap(metrics, timestamp, "PartitionMemory");
}
private static List<Point> getPartitionProcessor(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getProcessorMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionProcessor")
List<Measurement> metrics = partition.getProcessorMetrics();
return processMeasurementMap(metrics, timestamp, "PartitionProcessor");
}
private static List<Point> getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getVirtualEthernetAdapterMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters")
List<Measurement> metrics = partition.getVirtualEthernetAdapterMetrics();
return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters");
}
private static List<Point> getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getVirtualFiberChannelAdaptersMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters")
List<Measurement> metrics = partition.getVirtualFiberChannelAdaptersMetrics();
return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters");
}
@ -252,33 +263,34 @@ class InfluxClient {
Shared
*/
private static List<Point> processMeasurementMap(List<Map> listOfMaps, Instant timestamp, String measurement) {
private static List<Point> processMeasurementMap(List<Measurement> measurements, Instant timestamp, String measurement) {
List<Point> list = new ArrayList<>()
listOfMaps.each { map ->
List<Point> listOfPoints = new ArrayList<>();
measurements.forEach( m -> {
// Iterate fields
map.get("fields").each { String fieldName, BigDecimal fieldValue ->
//Map<String, BigDecimal> fieldsMap = m.get("fields");
m.fields.forEach((fieldName, fieldValue) -> {
log.debug("processMeasurementMap() " + measurement + " - fieldName: " + fieldName + ", fieldValue: " + fieldValue)
Point.Builder builder = Point.measurement(measurement)
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
.tag("name", fieldName)
.addField("value", fieldValue)
.addField("value", fieldValue);
// For each field, we add all tags
map.get("tags").each { String tagName, String tagValue ->
builder.tag(tagName, tagValue)
log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue)
}
//Map<String, String> tagsMap = m.get("tags");
m.tags.forEach((tagName, tagValue) -> {
builder.tag(tagName, tagValue);
log.debug("processMeasurementMap() " + measurement + " - tagName: " + tagName + ", tagValue: " + tagValue);
});
list.add(builder.build())
}
listOfPoints.add(builder.build());
});
}
});
return list
return listOfPoints;
}

View file

@ -0,0 +1,176 @@
/**
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci
import groovy.transform.CompileStatic
import groovy.util.logging.Slf4j;
@Slf4j
@CompileStatic
class Insights {
final Configuration configuration;
InfluxClient influxClient;
Map<String, HmcClient> hmcClients = new HashMap<>();
Map<String,ManagedSystem> systems = new HashMap<String, ManagedSystem>();
Map<String, LogicalPartition> partitions = new HashMap<String, LogicalPartition>();
Insights(Configuration configuration) {
this.configuration = configuration;
try {
influxClient = new InfluxClient(configuration.influx);
influxClient.login();
} catch(Exception e) {
System.exit(1);
}
// Initial scan
discover();
}
void discover() {
configuration.hmc.forEach( configHmc -> {
if(!hmcClients?.containsKey(configHmc.name)) {
log.debug("Adding HMC: " + configHmc.toString())
HmcClient hmcClient = new HmcClient(configHmc)
hmcClients.put(configHmc.name, hmcClient)
}
});
hmcClients.forEach(( hmcId, hmcClient) -> {
try {
hmcClient.login()
hmcClient.getManagedSystems().each { systemId, system ->
// Add to list of known systems
systems.putIfAbsent(systemId, system)
// Get LPAR's for this system
hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
// Add to list of known partitions
partitions.putIfAbsent(partitionId, partition)
});
}
} catch(Exception e) {
log.error("discover() - " + hmcId + " error: " + e.message)
//hmcClients.remove(hmcId)
}
});
}
void getMetricsForSystems() {
try {
systems.forEach((systemId, system) -> {
HmcClient hmcClient = hmcClients.get(system.hmcId)
// Get and process metrics for this system
String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system)
if(tmpJsonString && !tmpJsonString.empty) {
system.processMetrics(tmpJsonString)
}
});
} catch(Exception e) {
log.error(e.message)
}
}
void getMetricsForPartitions() {
try {
// Get LPAR's for this system
partitions.forEach((partitionId, partition) -> {
HmcClient hmcClient = hmcClients.get(partition.system.hmcId)
// Get and process metrics for this partition
String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition)
if(tmpJsonString2 && !tmpJsonString2.empty) {
partition.processMetrics(tmpJsonString2)
}
});
} catch(Exception e) {
log.error(e.message)
}
}
void writeMetricsForManagedSystems() {
systems.forEach((systemId, system) -> {
influxClient.writeManagedSystem(system)
});
}
void writeMetricsForLogicalPartitions() {
partitions.each {partitionId, partition ->
influxClient.writeLogicalPartition(partition)
}
}
void run() {
log.debug("run()")
boolean keepRunning = true
int executions = 0
while(keepRunning) {
try {
getMetricsForSystems()
getMetricsForPartitions()
writeMetricsForManagedSystems()
writeMetricsForLogicalPartitions()
influxClient.writeBatchPoints()
// Refresh HMC's
if(executions > configuration.rescan) {
executions = 0
discover()
}
} catch(Exception e) {
log.error(e.message, e)
}
executions++
Thread.sleep(configuration.refresh * 1000)
}
}
}

View file

@ -15,9 +15,12 @@
*/
package biz.nellemann.hmci
import groovy.transform.CompileDynamic
import groovy.transform.CompileStatic
import groovy.util.logging.Slf4j
@Slf4j
@CompileStatic
class LogicalPartition extends MetaSystem {
public String id
@ -36,64 +39,68 @@ class LogicalPartition extends MetaSystem {
return "[${id}] ${name} (${type})"
}
@CompileDynamic
List<Measurement> getAffinityScore() {
List<Map> getAffinityScore() {
List<Map> list = new ArrayList<>()
Map<String, Map> map = new HashMap<String, Map>()
List<Measurement> list = new ArrayList<>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: system.name,
partition: name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getAffinityScore() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
affinityScore: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.affinityScore,
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getAffinityScore() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement);
return list
}
List<Map> getMemoryMetrics() {
@CompileDynamic
List<Measurement> getMemoryMetrics() {
List<Map> list = new ArrayList<>()
Map<String, Map> map = new HashMap<String, Map>()
List<Measurement> list = new ArrayList<>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: system.name,
partition: name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
logicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.logicalMem?.first(),
backedPhysicalMem: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.memory?.backedPhysicalMem?.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement);
return list
}
@CompileDynamic
List<Measurement> getProcessorMetrics() {
List<Map> getProcessorMetrics() {
List<Map> list = new ArrayList<>()
Map<String, Map> map = new HashMap<String, Map>()
List<Measurement> list = new ArrayList<>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: system.name,
partition: name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -109,19 +116,21 @@ class LogicalPartition extends MetaSystem {
timePerInstructionExecution: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timeSpentWaitingForDispatch?.first(),
timeSpentWaitingForDispatch: metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.processor?.timePerInstructionExecution?.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement);
return list
}
@CompileDynamic
List<Measurement> getVirtualEthernetAdapterMetrics() {
List<Map> getVirtualEthernetAdapterMetrics() {
List<Map> list = new ArrayList<>()
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.network?.virtualEthernetAdapters?.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: system.name,
@ -131,7 +140,7 @@ class LogicalPartition extends MetaSystem {
vlanId: it.vlanId as String,
vswitchId: it.vswitchId as String,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -140,10 +149,11 @@ class LogicalPartition extends MetaSystem {
receivedBytes: it.receivedBytes.first(),
sentBytes: it.sentBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement);
}
return list
@ -151,11 +161,12 @@ class LogicalPartition extends MetaSystem {
//PartitionVirtualFiberChannelAdapters
List<Map> getVirtualFiberChannelAdaptersMetrics() {
@CompileDynamic
List<Measurement> getVirtualFiberChannelAdaptersMetrics() {
List<Map> list = new ArrayList<>()
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.lparsUtil?.first()?.storage?.virtualFiberChannelAdapters?.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: system.name,
@ -163,7 +174,7 @@ class LogicalPartition extends MetaSystem {
viosId: it.viosId as String,
wwpn: it.wwpn,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -171,10 +182,11 @@ class LogicalPartition extends MetaSystem {
writeBytes: it.writeBytes.first(),
readBytes: it.readBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getVirtualFiberChannelAdaptersMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement);
}
return list

View file

@ -0,0 +1,59 @@
/*
Copyright 2020 mark.nellemann@gmail.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package biz.nellemann.hmci
import groovy.transform.CompileStatic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import java.util.concurrent.Callable;
@CompileStatic
@Command(name = "hmci",
mixinStandardHelpOptions = true,
description = "HMC Insights.",
versionProvider = biz.nellemann.hmci.VersionProvider.class)
public class Main implements Callable<Integer> {
private final static Logger log = LoggerFactory.getLogger(Main.class);
@CommandLine.Option(names = ["-c", "--conf"], description = "Configuration file [default: '/etc/hmci.toml'].")
private String configurationFile = "/etc/hmci.toml";
public static void main(String... args) {
int exitCode = new CommandLine(new Main()).execute(args);
System.exit(exitCode);
}
@Override
public Integer call() throws IOException {
File file = new File(configurationFile);
if(!file.exists()) {
System.err.println("Error - No configuration file found at: " + file.toString());
return -1;
}
Configuration configuration = new Configuration(configurationFile);
Insights insights = new Insights(configuration);
insights.run();
return 0;
}
}

View file

@ -15,10 +15,13 @@
*/
package biz.nellemann.hmci
import groovy.transform.CompileDynamic
import groovy.transform.CompileStatic
import groovy.util.logging.Slf4j
@Slf4j
@CompileStatic
class ManagedSystem extends MetaSystem {
public final String hmcId
@ -43,40 +46,46 @@ class ManagedSystem extends MetaSystem {
}
List<Map> getMemoryMetrics() {
@CompileDynamic
List<Measurement> getMemoryMetrics() {
List<Map> list = new ArrayList<>()
Map<String, Map> map = new HashMap<String, Map>()
List<Measurement> list = new ArrayList<>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
totalMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(),
availableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(),
configurableMem: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(),
assignedMemToLpars: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(),
Map<String, BigDecimal> fieldsMap = [
"totalMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.totalMem?.first(),
"availableMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.availableMem?.first(),
"configurableMem": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.configurableMem?.first(),
"assignedMemToLpars": metrics.systemUtil?.utilSamples?.first()?.serverUtil?.memory?.assignedMemToLpars?.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement)
return list
}
List<Map> getProcessorMetrics() {
@CompileDynamic
List<Measurement> getProcessorMetrics() {
List<Map> list = new ArrayList<>()
Map<String, Map> map = new HashMap<String, Map>()
List<Measurement> list = new ArrayList<>()
//Map<String, Map> map = new HashMap<>()
HashMap<String, String> tagsMap = [
system: name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
//measurement.tags = tagsMap;
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -85,55 +94,62 @@ class ManagedSystem extends MetaSystem {
availableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.availableProcUnits?.first(),
configurableProcUnits: metrics.systemUtil?.utilSamples?.first()?.serverUtil?.processor?.configurableProcUnits?.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
//measurement.fields = fieldsMap;
log.debug("getProcessorMetrics() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement)
return list
}
List<Map> getSharedProcessorPools() {
@CompileDynamic
List<Measurement> getSharedProcessorPools() {
List<Map> list = new ArrayList<>()
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.serverUtil?.sharedProcessorPool?.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
HashMap<String, String> tagsMap = [
system: name,
pool: it.name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
assignedProcUnits: it.assignedProcUnits.first(),
availableProcUnits: it.availableProcUnits.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString())
list.add(map)
Measurement measurement = new Measurement(tagsMap, fieldsMap);
list.add(measurement)
}
return list
}
List<Map> getSystemSharedAdapters() {
@CompileDynamic
List<Measurement> getSystemSharedAdapters() {
List<Map> list = new ArrayList<>()
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each {vios ->
vios.network.sharedAdapters.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = [
system: name,
type: it.type,
vios: vios.name,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
measurement.tags = tagsMap;
log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -141,25 +157,27 @@ class ManagedSystem extends MetaSystem {
receivedBytes: it.receivedBytes.first(),
transferredBytes: it.transferredBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap;
log.debug("getSystemSharedAdapters() - fields: " + fieldsMap.toString())
list.add(map)
list.add(measurement)
}
}
return list
}
List<Map> getSystemFiberChannelAdapters() {
@CompileDynamic
List<Measurement> getSystemFiberChannelAdapters() {
List<Map> list = new ArrayList<>()
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
log.debug("getSystemFiberChannelAdapters() - VIOS: " + vios.name)
vios.storage?.fiberChannelAdapters?.each {
Map<String, Map> map = new HashMap<String, Map>()
//HashMap<String, Map> map = new HashMap<>()
Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = [
id: it.id,
@ -168,7 +186,8 @@ class ManagedSystem extends MetaSystem {
vios: vios.name,
device: it.physicalLocation,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
measurement.tags = tagsMap;
log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -176,23 +195,25 @@ class ManagedSystem extends MetaSystem {
readBytes: it.readBytes.first(),
transmittedBytes: it.transmittedBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap;
log.debug("getSystemFiberChannelAdapters() - fields: " + fieldsMap.toString())
list.add(map)
list.add(measurement)
}
}
return list
}
List<Map> getSystemGenericPhysicalAdapters() {
List<Map> list = new ArrayList<>()
@CompileDynamic
List<Measurement> getSystemGenericPhysicalAdapters() {
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
vios.storage?.genericPhysicalAdapters?.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = [
id: it.id,
@ -200,7 +221,8 @@ class ManagedSystem extends MetaSystem {
vios: vios.name,
device: it.physicalLocation,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
measurement.tags = tagsMap;
log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -208,24 +230,25 @@ class ManagedSystem extends MetaSystem {
readBytes: it.readBytes.first(),
transmittedBytes: it.transmittedBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap;
log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString())
list.add(map)
list.add(measurement)
}
}
return list
}
List<Map> getSystemGenericVirtualAdapters() {
List<Map> list = new ArrayList<>()
@CompileDynamic
List<Measurement> getSystemGenericVirtualAdapters() {
List<Measurement> list = new ArrayList<>()
metrics.systemUtil?.utilSamples?.first()?.viosUtil?.each { vios ->
vios.storage?.genericVirtualAdapters?.each {
Map<String, Map> map = new HashMap<String, Map>()
//Map<String, Map> map = new HashMap<String, Map>()
Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = [
id: it.id,
@ -233,7 +256,8 @@ class ManagedSystem extends MetaSystem {
vios: vios.name,
device: it.physicalLocation,
]
map.put("tags", tagsMap)
//map.put("tags", tagsMap)
measurement.tags = tagsMap;
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString())
HashMap<String, BigDecimal> fieldsMap = [
@ -241,13 +265,12 @@ class ManagedSystem extends MetaSystem {
readBytes: it.readBytes.first(),
transmittedBytes: it.transmittedBytes.first(),
]
map.put("fields", fieldsMap)
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap;
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString())
list.add(map)
list.add(measurement);
}
}
return list

View file

@ -17,6 +17,8 @@ package biz.nellemann.hmci
import biz.nellemann.hmci.pcm.PcmData
import groovy.json.JsonSlurper
import groovy.transform.CompileDynamic
import groovy.transform.CompileStatic
import groovy.util.logging.Slf4j
import java.time.Instant
@ -24,20 +26,22 @@ import java.time.format.DateTimeFormatter
import java.time.format.DateTimeParseException
@Slf4j
@CompileStatic
abstract class MetaSystem {
protected PcmData metrics
@CompileDynamic
void processMetrics(String json) {
def pcmMap = new JsonSlurper().parseText(json)
metrics = new PcmData(pcmMap as Map)
Map pcmMap = new JsonSlurper().parseText(json) as Map
metrics = new PcmData(pcmMap)
}
@CompileDynamic
Instant getTimestamp() {
String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp
Instant instant
Instant instant = null
try {
log.debug("getTimeStamp() - PMC Timestamp: " + timestamp)
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");

View file

@ -0,0 +1,161 @@
package biz.nellemann.hmci;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tomlj.Toml;
import org.tomlj.TomlParseResult;
import org.tomlj.TomlTable;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
public class Configuration {
private final static Logger log = LoggerFactory.getLogger(Configuration.class);
final public Long refresh;
final public Long rescan;
final public InfluxObject influx;
final public List<HmcObject> hmc;
Configuration(String configurationFile) throws IOException {
Path source = Paths.get(configurationFile);
TomlParseResult result = Toml.parse(source);
result.errors().forEach(error -> System.err.println(error.toString()));
//System.out.println(result.toJson());
if(result.contains("refresh")) {
refresh = result.getLong("refresh");
} else {
refresh = 15l;
}
if(result.contains("rescan")) {
rescan = result.getLong("rescan");
} else {
rescan = 60l;
}
hmc = getHmc(result);
influx = getInflux(result);
}
List<HmcObject> getHmc(TomlParseResult result) {
ArrayList<HmcObject> list = new ArrayList<>();
if(result.contains("hmc") && result.isTable("hmc")) {
TomlTable hmcTable = result.getTable("hmc");
for(String key : hmcTable.keySet()) {
HmcObject c = new HmcObject();
c.name = key;
if(hmcTable.contains(key+".url")) {
c.url = hmcTable.getString(key+".url");
}
if(hmcTable.contains(key+".username")) {
c.username = hmcTable.getString(key+".username");
}
if(hmcTable.contains(key+".password")) {
c.password = hmcTable.getString(key+".password");
}
if(hmcTable.contains(key+".unsafe")) {
c.unsafe = hmcTable.getBoolean(key+".unsafe");
} else {
c.unsafe = false;
}
list.add(c);
}
}
return list;
}
InfluxObject getInflux(TomlParseResult result) {
InfluxObject c = new InfluxObject();
if(result.contains("influx")) {
TomlTable t = result.getTable("influx");
if(t != null && t.contains("url")) {
c.url = t.getString("url");
}
if(t != null && t.contains("username")) {
c.username = t.getString("username");
}
if(t != null && t.contains("password")) {
c.password = t.getString("password");
}
if(t != null && t.contains("database")) {
c.database = t.getString("database");
}
}
return c;
}
static class InfluxObject {
String url = "http://localhost:8086";
String username = "root";
String password = "";
String database = "hmci";
private boolean isValid = false;
Boolean isValid() {
return isValid();
}
// TODO: Fixme
void validate() {
isValid = true;
}
}
static class HmcObject {
String name;
String url;
String username;
String password;
Boolean unsafe;
private boolean isValid = false;
Boolean isValid() {
return isValid();
}
// TODO: Fixme
void validate() {
isValid = true;
}
@Override
public String toString() {
return name;
}
}
}

View file

@ -0,0 +1,17 @@
package biz.nellemann.hmci;
import java.util.Map;
public class Measurement {
Map<String, String> tags;
Map<String, Number> fields;
Measurement() {
}
Measurement(Map<String, String> tags, Map<String, Number> fields) {
this.tags = tags;
this.fields = fields;
}
}

View file

@ -0,0 +1,19 @@
package biz.nellemann.hmci;
import picocli.CommandLine;
import java.io.IOException;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
class VersionProvider implements CommandLine.IVersionProvider {
public String[] getVersion() throws IOException {
Manifest manifest = new Manifest(getClass().getResourceAsStream("/META-INF/MANIFEST.MF"));
Attributes attrs = manifest.getMainAttributes();
return new String[] { "${COMMAND-FULL-NAME} " + attrs.getValue("Build-Version") };
}
}

View file

@ -3,7 +3,7 @@
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{12} - %msg%n</pattern>
</encoder>
</appender>

View file

@ -0,0 +1,30 @@
package biz.nellemann.hmci
import spock.lang.Specification
class ConfigurationTest extends Specification {
String testConfigurationFile = new File(getClass().getResource('/hmci.toml').toURI()).absolutePath
void "test parsing"() {
when:
Configuration conf = new Configuration(testConfigurationFile);
then:
conf != null
}
void "test lookup influx"() {
when:
Configuration conf = new Configuration(testConfigurationFile);
then:
conf != null
}
}

View file

@ -12,7 +12,12 @@ class HmcClientTest extends Specification {
def setup() {
mockServer.start();
hmc = new HmcClient("site", mockServer.url("/").toString(), "testUser", "testPassword")
Configuration.HmcObject configHmc = new Configuration.HmcObject()
configHmc.name = "site1"
configHmc.url = mockServer.url("/").toString()
configHmc.username = "testUser"
configHmc.password = "testPassword"
hmc = new HmcClient(configHmc)
hmc.authToken = "blaBla"
}

View file

@ -5,6 +5,6 @@ package biz.nellemann.hmci
import spock.lang.Specification
class AppTest extends Specification {
class InsightsTest extends Specification {
}

View file

@ -34,12 +34,12 @@ class LogicalPartitionTest extends Specification {
when:
lpar.processMetrics(testJson)
List<Map> listOfMaps = lpar.getMemoryMetrics()
List<Measurement> listOfMeasurements = lpar.getMemoryMetrics()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['logicalMem'] == 8192.000
listOfMaps.first().get("tags")['partition'] == '9Flash01'
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['logicalMem'] == 8192.000
listOfMeasurements.first().tags['partition'] == '9Flash01'
}
@ -53,12 +53,12 @@ class LogicalPartitionTest extends Specification {
when:
lpar.processMetrics(testJson)
List<Map> listOfMaps = lpar.getProcessorMetrics()
List<Measurement> listOfMeasurements = lpar.getProcessorMetrics()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['utilizedProcUnits'] == 0.001
listOfMaps.first().get("tags")['partition'] == '9Flash01'
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['utilizedProcUnits'] == 0.001
listOfMeasurements.first().tags['partition'] == '9Flash01'
}
@ -72,12 +72,12 @@ class LogicalPartitionTest extends Specification {
when:
lpar.processMetrics(testJson)
List<Map> listOfMaps = lpar.getVirtualEthernetAdapterMetrics()
List<Measurement> listOfMeasurements = lpar.getVirtualEthernetAdapterMetrics()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['receivedBytes'] == 276.467
listOfMaps.first().get("tags")['sea'] == 'ent5'
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['receivedBytes'] == 276.467
listOfMeasurements.first().tags['sea'] == 'ent5'
}
void "test getVirtualFiberChannelAdaptersMetrics"() {
@ -90,12 +90,12 @@ class LogicalPartitionTest extends Specification {
when:
lpar.processMetrics(testJson)
List<Map> listOfMaps = lpar.getVirtualFiberChannelAdaptersMetrics()
List<Measurement> listOfMeasurements = lpar.getVirtualFiberChannelAdaptersMetrics()
then:
listOfMaps.size() == 4
listOfMaps.first().get("fields")['writeBytes'] == 6690.133
listOfMaps.first().get("tags")['viosId'] == '1'
listOfMeasurements.size() == 4
listOfMeasurements.first().fields['writeBytes'] == 6690.133
listOfMeasurements.first().tags['viosId'] == '1'
}

View file

@ -33,11 +33,11 @@ class ManagedSystemTest extends Specification {
when:
system.processMetrics(testJson)
List<Map> listOfMaps = system.getMemoryMetrics()
List<Measurement> listOfMeasurements = system.getMemoryMetrics()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['totalMem'] == 1048576.000
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['totalMem'] == 1048576.000
}
void "test getProcessorMetrics"() {
@ -49,11 +49,11 @@ class ManagedSystemTest extends Specification {
when:
system.processMetrics(testJson)
List<Map> listOfMaps = system.getProcessorMetrics()
List<Measurement> listOfMeasurements = system.getProcessorMetrics()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['availableProcUnits'] == 16.000
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['availableProcUnits'] == 16.000
}
void "test getSystemSharedProcessorPools"() {
@ -65,11 +65,11 @@ class ManagedSystemTest extends Specification {
when:
system.processMetrics(testJson)
List<Map> listOfMaps = system.getSharedProcessorPools()
List<Measurement> listOfMeasurements = system.getSharedProcessorPools()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['assignedProcUnits'] == 23.767
}
void "test VIOS data"() {
@ -80,11 +80,11 @@ class ManagedSystemTest extends Specification {
when:
system.processMetrics(testJson)
List<Map> listOfMaps = system.getSharedProcessorPools()
List<Measurement> listOfMeasurements = system.getSharedProcessorPools()
then:
listOfMaps.size() == 1
listOfMaps.first().get("fields")['assignedProcUnits'] == 23.767
listOfMeasurements.size() == 1
listOfMeasurements.first().fields['assignedProcUnits'] == 23.767
}
}

View file

@ -0,0 +1,32 @@
# HMCi Configuration
# How often to query HMC's for data - in seconds
hmci.refresh = 30
# Rescan HMC's for new systems and partitions - every x refresh
hmci.rescan = 60
# InfluxDB to save metrics
[influx]
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
# One or more HMC's to query for data and metrics
[hmc]
# HMC on our primary site
[hmc.site1]
url = "https://10.10.10.10:12443"
username = "hmci"
password = "hmcihmci"
unsafe = true # Ignore SSL cert. errors
# Example
#[hmc.site2]
#url = "https://10.10.20.20:12443"
#username = "viewer"
#password = "someSecret"
#unsafe = false