Merged in development (pull request #12)

Development
This commit is contained in:
Mark Nellemann 2021-01-14 13:26:46 +00:00
commit fb0b465f38
45 changed files with 457 additions and 376 deletions

View file

@ -1,8 +1,12 @@
# HMC Insights
**HMCi** is a utility that collects metrics from one or more *IBM Power HMC* systems. The metric data is processed and saved into an InfluxDB time-series database. Grafana is used to visualize the metrics from InfluxDB.
**HMCi** is a utility that collects metrics from one or more *IBM Power HMC* systems. The metric data is processed and saved into an InfluxDB time-series database. Grafana can be used to visualize the metrics from InfluxDB.
Metrics includes *Managed Systems* (the physical Power servers) and *Logical Partitions* (the virtualized servers) running AIX, Linux and IBM-i (AS/400).
Metrics includes:
- *Managed Systems* - the physical Power servers
- *Logical Partitions* - the virtualized servers running AIX, Linux and IBM-i (AS/400)
- *Virtual I/O Servers* - the i/o partition(s) taking care of network and storage
- *Energy* - power consumption and temperatures
![architecture](https://bitbucket.org/mnellemann/hmci/downloads/HMCi.png)
@ -30,7 +34,7 @@ Install InfluxDB on an *LPAR* or other server, which is network accessible by th
### HMCi Installation Instructions
- Ensure you have correct date/time and NTPd running to keep it accurate!
- Ensure you have **correct date/time** and NTPd running to keep it accurate!
- The only requirement for **hmci** is the Java runtime, version 8 (or later)
- Install **HMCi** from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) (rpm, deb or jar) or build from source
- Copy the *doc/hmci.toml* configuration example into */etc/hmci.toml* and edit the configuration to suit your environment. The location of the configuration file can be changed with a flag when running hmci.
@ -49,6 +53,21 @@ Below are screenshots of the provided Grafana dashboards (found in the **doc/**
- [hmci-lpars](https://bitbucket.org/mnellemann/hmci/downloads/hmci-lpars.png)
## Known problems
### Naming collision
You can't have partitions on different HMC's with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the name as key).
### Renaming partitions
If you rename a partition, the metrics in InfluxDB will still be available by the old name, and new metrics will be
available by the new name of the partition. There is no easy way to migrate the old data, but you can delete it easily:
DELETE WHERE partition = 'lpar-name';
## Notes
### Start InfluxDB and Grafana at boot on RedHat 7+

View file

@ -78,7 +78,7 @@ buildDeb {
}
jacoco {
toolVersion = "0.8.5"
toolVersion = "0.8.6"
}
jacocoTestReport {

View file

@ -1,17 +1,17 @@
# HMCi Configuration
# How often to query HMC's for data - in seconds
hmci.refresh = 30
hmci.update = 25
# Rescan HMC's for new systems and partitions - every x refresh
hmci.rescan = 60
# Rescan HMC's for new systems and partitions - every x update
hmci.rescan = 90
# InfluxDB to save metrics
[influx]
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
# One or more HMC's to query for data and metrics
[hmc]
@ -23,6 +23,7 @@ database = "hmci"
password = "hmcihmci"
unsafe = true # Ignore SSL cert. errors
# Example
#[hmc.site2]
#url = "https://10.10.20.20:12443"

View file

@ -1,3 +1,3 @@
id = hmci
group = biz.nellemann.hmci
version = 1.0.1
version = 1.1.0

View file

@ -23,6 +23,8 @@ import picocli.CommandLine.Command;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
@Command(name = "hmci",
@ -45,18 +47,34 @@ public class Application implements Callable<Integer> {
@Override
public Integer call() throws IOException {
Configuration configuration;
InfluxClient influxClient;
List<Thread> threadList = new ArrayList<>();
File file = new File(configurationFile);
if(!file.exists()) {
System.err.println("Error - No configuration file found at: " + file.toString());
return -1;
}
Configuration configuration = new Configuration(configurationFile);
Insights insights = new Insights(configuration);
try {
insights.run();
} catch (InterruptedException e) {
configuration = new Configuration(configurationFile);
influxClient = new InfluxClient(configuration.getInflux());
influxClient.login();
for(Configuration.HmcObject configHmc : configuration.getHmc()) {
Thread t = new Thread(new HmcInstance(configHmc, influxClient));
threadList.add(t);
t.start();
}
for (Thread thread : threadList) {
thread.join();
}
} catch (InterruptedException | RuntimeException e) {
log.error(e.getMessage());
return 1;
}
return 0;

View file

@ -10,14 +10,15 @@ import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
public class Configuration {
public final class Configuration {
//private final static Logger log = LoggerFactory.getLogger(Configuration.class);
final public Long refresh;
final public Long rescan;
final public InfluxObject influx;
final public List<HmcObject> hmc;
final private Long update;
final private Long rescan;
final private InfluxObject influx;
final private List<HmcObject> hmcList;
Configuration(String configurationFile) throws IOException {
@ -25,25 +26,25 @@ public class Configuration {
TomlParseResult result = Toml.parse(source);
result.errors().forEach(error -> System.err.println(error.toString()));
if(result.contains("refresh")) {
refresh = result.getLong("refresh");
if(result.contains("hmci.update")) {
update = result.getLong("hmci.update");
} else {
refresh = 15L;
update = 30L;
}
if(result.contains("rescan")) {
rescan = result.getLong("rescan");
if(result.contains("hmci.rescan")) {
rescan = result.getLong("hmci.rescan");
} else {
rescan = 60L;
}
hmc = getHmc(result);
influx = getInflux(result);
hmcList = parseConfigurationForHmc(result);
influx = parseConfigurationForInflux(result);
}
List<HmcObject> getHmc(TomlParseResult result) {
private List<HmcObject> parseConfigurationForHmc(TomlParseResult result) {
ArrayList<HmcObject> list = new ArrayList<>();
@ -56,6 +57,8 @@ public class Configuration {
HmcObject c = new HmcObject();
c.name = key;
c.update = update;
c.rescan = rescan;
if(hmcTable.contains(key+".url")) {
c.url = hmcTable.getString(key+".url");
@ -83,7 +86,7 @@ public class Configuration {
}
InfluxObject getInflux(TomlParseResult result) {
private InfluxObject parseConfigurationForInflux(TomlParseResult result) {
InfluxObject c = new InfluxObject();
@ -112,6 +115,16 @@ public class Configuration {
}
public List<HmcObject> getHmc() {
return hmcList;
}
public InfluxObject getInflux() {
return influx;
}
static class InfluxObject {
String url = "http://localhost:8086";
@ -134,7 +147,7 @@ public class Configuration {
return validated;
}
// TODO: Fixme
// TODO: Implement validation
void validate() {
validated = true;
}
@ -153,16 +166,20 @@ public class Configuration {
String username;
String password;
Boolean unsafe = false;
Long update = 30L;
Long rescan = 60L;
private boolean validated = false;
HmcObject() { }
HmcObject(String name, String url, String username, String password, Boolean unsafe) {
HmcObject(String name, String url, String username, String password, Boolean unsafe, Long update, Long rescan) {
this.url = url;
this.username = username;
this.password = password;
this.unsafe = unsafe;
this.update = update;
this.rescan = rescan;
}
@ -170,7 +187,7 @@ public class Configuration {
return validated;
}
// TODO: Fixme
// TODO: Implement validation
void validate() {
validated = true;
}

View file

@ -15,86 +15,133 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.HmcObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.lang.Thread.*;
import static java.lang.Thread.sleep;
class Insights {
class HmcInstance implements Runnable {
private final static Logger log = LoggerFactory.getLogger(Insights.class);
private final static Logger log = LoggerFactory.getLogger(HmcInstance.class);
final Configuration configuration;
private final String hmcId;
private final Long updateValue;
private final Long rescanValue;
private final Map<String,ManagedSystem> systems = new HashMap<>();
private final Map<String, LogicalPartition> partitions = new HashMap<>();
InfluxClient influxClient;
final Map<String, HmcClient> hmcClients = new HashMap<>();
final Map<String,ManagedSystem> systems = new HashMap<>();
final Map<String, LogicalPartition> partitions = new HashMap<>();
private final HmcRestClient hmcRestClient;
private final InfluxClient influxClient;
private final AtomicBoolean keepRunning = new AtomicBoolean(true);
Insights(Configuration configuration) {
this.configuration = configuration;
try {
influxClient = new InfluxClient(configuration.influx);
influxClient.login();
} catch (Exception e) {
System.exit(1);
HmcInstance(HmcObject configHmc, InfluxClient influxClient) {
this.hmcId = configHmc.name;
this.updateValue = configHmc.update;
this.rescanValue = configHmc.rescan;
this.influxClient = influxClient;
hmcRestClient = new HmcRestClient(configHmc.url, configHmc.username, configHmc.password, configHmc.unsafe);
log.debug(String.format("HmcInstance() - id: %s, update: %s, refresh %s", hmcId, updateValue, rescanValue));
}
// Initial scan
@Override
public String toString() {
return hmcId;
}
@Override
public void run() {
log.debug("run() - " + hmcId);
int executions = 0;
discover();
do {
Instant instantStart = Instant.now();
try {
getMetricsForSystems();
getMetricsForPartitions();
getMetricsForEnergy();
writeMetricsForManagedSystems();
writeMetricsForLogicalPartitions();
writeMetricsForSystemEnergy();
influxClient.writeBatchPoints();
// Refresh
if (++executions > rescanValue) {
executions = 0;
discover();
}
} catch (Exception e) {
log.error("run()", e);
}
Instant instantEnd = Instant.now();
long timeSpend = Duration.between(instantStart, instantEnd).getSeconds();
log.debug("run() - duration sec: " + timeSpend);
if(timeSpend < updateValue) {
try {
log.debug("run() - sleep sec: " + (updateValue - timeSpend));
//noinspection BusyWait
sleep((updateValue - timeSpend) * 1000);
} catch (InterruptedException e) {
log.error("run() - sleep interrupted", e);
}
}
} while (keepRunning.get());
}
void discover() {
configuration.hmc.forEach( configHmc -> {
if(!hmcClients.containsKey(configHmc.name)) {
HmcClient hmcClient = new HmcClient(configHmc);
hmcClients.put(configHmc.name, hmcClient);
log.info("discover() - Adding HMC: " + hmcClient);
}
});
hmcClients.forEach(( hmcId, hmcClient) -> {
log.debug("discover() - " + hmcId);
try {
hmcClient.logoff();
hmcClient.login();
hmcClient.getManagedSystems().forEach((systemId, system) -> {
hmcRestClient.logoff();
hmcRestClient.login();
hmcRestClient.getManagedSystems().forEach((systemId, system) -> {
// Add to list of known systems
if(!systems.containsKey(systemId)) {
systems.put(systemId, system);
log.info("discover() - Found ManagedSystem: " + system);
log.info("discover() - Found ManagedSystem: " + system + " @" + hmcId);
}
// Get LPAR's for this system
try {
hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
hmcRestClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
// Add to list of known partitions
if(!partitions.containsKey(partitionId)) {
partitions.put(partitionId, partition);
log.info("discover() - Found LogicalPartition: " + partition);
log.info("discover() - Found LogicalPartition: " + partition + " @" + hmcId);
}
});
} catch (Exception e) {
log.error("discover()", e);
log.error("discover() - getLogicalPartitions", e);
}
});
} catch(Exception e) {
log.error("discover() - " + hmcId + " error: " + e.getMessage());
log.error("discover() - getManagedSystems: " + e.getMessage());
}
});
}
@ -102,12 +149,10 @@ class Insights {
systems.forEach((systemId, system) -> {
HmcClient hmcClient = hmcClients.get(system.hmcId);
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcClient.getPcmDataForManagedSystem(system);
tmpJsonString = hmcRestClient.getPcmDataForManagedSystem(system);
} catch (Exception e) {
log.error("getMetricsForSystems()", e);
}
@ -128,12 +173,10 @@ class Insights {
// Get LPAR's for this system
partitions.forEach((partitionId, partition) -> {
HmcClient hmcClient = hmcClients.get(partition.system.hmcId);
// Get and process metrics for this partition
String tmpJsonString2 = null;
try {
tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition);
tmpJsonString2 = hmcRestClient.getPcmDataForLogicalPartition(partition);
} catch (Exception e) {
log.error("getMetricsForPartitions() - getPcmDataForLogicalPartition", e);
}
@ -153,12 +196,10 @@ class Insights {
systems.forEach((systemId, system) -> {
HmcClient hmcClient = hmcClients.get(system.hmcId);
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcClient.getPcmDataForEnergy(system.energy);
tmpJsonString = hmcRestClient.getPcmDataForEnergy(system.energy);
} catch (Exception e) {
log.error("getMetricsForEnergy()", e);
}
@ -176,7 +217,7 @@ class Insights {
try {
systems.forEach((systemId, system) -> influxClient.writeManagedSystem(system));
} catch (NullPointerException npe) {
log.warn("writeMetricsForManagedSystems() - NPE: " + npe.toString());
log.warn("writeMetricsForManagedSystems() - NPE: " + npe.toString(), npe);
}
}
@ -185,7 +226,7 @@ class Insights {
try {
partitions.forEach((partitionId, partition) -> influxClient.writeLogicalPartition(partition));
} catch (NullPointerException npe) {
log.warn("writeMetricsForLogicalPartitions() - NPE: " + npe.toString());
log.warn("writeMetricsForLogicalPartitions() - NPE: " + npe.toString(), npe);
}
}
@ -194,49 +235,9 @@ class Insights {
try {
systems.forEach((systemId, system) -> influxClient.writeSystemEnergy(system.energy));
} catch (NullPointerException npe) {
log.warn("writeMetricsForSystemEnergy() - NPE: " + npe.toString());
log.warn("writeMetricsForSystemEnergy() - NPE: " + npe.toString(), npe);
}
}
void run() throws InterruptedException {
log.debug("run()");
int executions = 0;
AtomicBoolean keepRunning = new AtomicBoolean(true);
Thread shutdownHook = new Thread(() -> {
keepRunning.set(false);
System.out.println("Stopping HMCi, please wait ...");
});
Runtime.getRuntime().addShutdownHook(shutdownHook);
do {
try {
getMetricsForSystems();
getMetricsForPartitions();
getMetricsForEnergy();
writeMetricsForManagedSystems();
writeMetricsForLogicalPartitions();
writeMetricsForSystemEnergy();
influxClient.writeBatchPoints();
// Refresh HMC's
if (executions > configuration.rescan) {
executions = 0;
discover();
}
} catch (Exception e) {
log.error("run()", e);
}
executions++;
//noinspection BusyWait
sleep(configuration.refresh * 1000);
} while (keepRunning.get());
}
}

View file

@ -15,7 +15,6 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.HmcObject;
import okhttp3.*;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
@ -28,47 +27,54 @@ import javax.net.ssl.*;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
class HmcClient {
private final static Logger log = LoggerFactory.getLogger(HmcClient.class);
public class HmcRestClient {
private final static Logger log = LoggerFactory.getLogger(HmcRestClient.class);
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
private final String hmcId;
private final String baseUrl;
private final String username;
private final String password;
protected Integer responseErrors = 0;
protected String authToken;
private final OkHttpClient client;
// OkHttpClient timeouts
private final static int CONNECT_TIMEOUT = 2;
private final static int WRITE_TIMEOUT = 3;
private final static int READ_TIMEOUT = 3;
HmcClient(HmcObject configHmc) {
private final String baseUrl;
private final String username;
private final String password;
this.hmcId = configHmc.name;
this.baseUrl = configHmc.url;
this.username = configHmc.username;
this.password = configHmc.password;
Boolean unsafe = configHmc.unsafe;
HmcRestClient(String url, String username, String password, Boolean unsafe) {
this.baseUrl = url;
this.username = username;
this.password = password;
if(unsafe) {
this.client = getUnsafeOkHttpClient();
} else {
this.client = new OkHttpClient();
this.client = getSafeOkHttpClient();
}
}
@Override
public String toString() {
return hmcId + " (" + baseUrl + ")";
return baseUrl;
}
@ -165,7 +171,6 @@ class HmcClient {
Elements managedSystems = doc.select("ManagedSystem|ManagedSystem"); // doc.select("img[src$=.png]");
for(Element el : managedSystems) {
ManagedSystem system = new ManagedSystem(
hmcId,
el.select("Metadata > Atom > AtomID").text(),
el.select("SystemName").text(),
el.select("MachineTypeModelAndSerialNumber > MachineType").text(),
@ -193,7 +198,7 @@ class HmcClient {
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) throws Exception {
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id));
String responseBody = getResponse(url);
Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>();
Map<String, LogicalPartition> partitionMap = new HashMap<>();
// Do not try to parse empty response
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
@ -300,7 +305,8 @@ class HmcClient {
/**
* Parse XML feed to get PCM Data in JSON format
* Parse XML feed to get PCM Data in JSON format.
* Does not work for older HMC (pre v9) and older Power server (pre Power 8).
* @param systemEnergy a valid SystemEnergy
* @return JSON string with PCM data for this SystemEnergy
*/
@ -315,7 +321,7 @@ class HmcClient {
// Do not try to parse empty response
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
responseErrors++;
log.warn("getPcmDataForEnergy() - empty response");
log.debug("getPcmDataForEnergy() - empty response");
return null;
}
@ -379,7 +385,7 @@ class HmcClient {
/**
* Provide an unsafe (ignoring SSL problems) OkHttpClient
*
* @return unsafe OkHttpClient
* @return OkHttpClient ignoring SSL/TLS errors
*/
private static OkHttpClient getUnsafeOkHttpClient() {
try {
@ -387,8 +393,7 @@ class HmcClient {
final TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) {
}
public void checkClientTrusted(X509Certificate[] chain, String authType) { }
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) {
@ -411,11 +416,27 @@ class HmcClient {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
builder.hostnameVerifier((hostname, session) -> true);
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
} catch (Exception e) {
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
/**
* Get OkHttpClient with our preferred timeout values.
* @return OkHttpClient
*/
private static OkHttpClient getSafeOkHttpClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
}
}

View file

@ -36,6 +36,10 @@ class InfluxClient {
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
private static final int BATCH_ACTIONS_LIMIT = 5000;
private static final int BATCH_INTERVAL_DURATION = 1000;
final private String url;
final private String username;
final private String password;
@ -53,7 +57,7 @@ class InfluxClient {
}
synchronized void login() throws Exception {
synchronized void login() throws RuntimeException, InterruptedException {
if(influxDB != null) {
return;
@ -67,19 +71,13 @@ class InfluxClient {
log.debug("Connecting to InfluxDB - " + url);
influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase();
// Enable batch writes to get better performance.
BatchOptions options = BatchOptions.DEFAULTS.actions(1000).flushDuration(5000).precision(TimeUnit.SECONDS);
influxDB.enableBatch(options);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
connected = true;
} catch(Exception e) {
sleep(15*1000);
sleep(15 * 1000);
if(errors++ > 3) {
log.error("login() error, giving up - " + e.getMessage());
throw new Exception(e);
throw new RuntimeException(e);
} else {
log.warn("login() error, retrying - " + e.getMessage());
}
@ -107,7 +105,7 @@ class InfluxClient {
synchronized void writeBatchPoints() throws Exception {
log.debug("writeBatchPoints()");
try {
influxDB.write(batchPoints);
influxDB.writeWithRetry(batchPoints);
} catch(Exception e) {
log.error("writeBatchPoints() error - " + e.getMessage());
logoff();
@ -125,7 +123,7 @@ class InfluxClient {
void writeManagedSystem(ManagedSystem system) {
if(system.metrics == null) {
log.warn("writeManagedSystem() - null metrics, skipping");
log.debug("writeManagedSystem() - null metrics, skipping");
return;
}
@ -242,13 +240,14 @@ class InfluxClient {
/*
System Energy
Not supported on older HMC (pre v8) or older Power server (pre Power 8)
*/
void writeSystemEnergy(SystemEnergy system) {
if(system.metrics == null) {
log.warn("writeSystemEnergy() - null metrics, skipping");
log.debug("writeSystemEnergy() - null metrics, skipping");
return;
}

View file

@ -41,6 +41,7 @@ class LogicalPartition extends MetaSystem {
}
@Override
public String toString() {
return String.format("[%s] %s (%s)", id, name, type);
}
@ -50,12 +51,12 @@ class LogicalPartition extends MetaSystem {
List<Measurement> list = new ArrayList<>();
Map<String, String> tagsMap = new HashMap<String, String>();
Map<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
tagsMap.put("partition", name);
log.debug("getAffinityScore() - tags: " + tagsMap.toString());
Map<String, Number> fieldsMap = new HashMap<String, Number>();
Map<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("affinityScore", metrics.systemUtil.sample.lparsUtil.affinityScore);
log.debug("getAffinityScore() - fields: " + fieldsMap.toString());
@ -68,12 +69,12 @@ class LogicalPartition extends MetaSystem {
List<Measurement> list = new ArrayList<>();
Map<String, String> tagsMap = new HashMap<String, String>();
Map<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
tagsMap.put("partition", name);
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString());
Map<String, Number> fieldsMap = new HashMap<String, Number>();
Map<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("logicalMem", metrics.systemUtil.sample.lparsUtil.memory.logicalMem);
fieldsMap.put("backedPhysicalMem", metrics.systemUtil.sample.lparsUtil.memory.backedPhysicalMem);
log.debug("getMemoryMetrics() - fields: " + fieldsMap.toString());
@ -87,12 +88,12 @@ class LogicalPartition extends MetaSystem {
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
tagsMap.put("partition", name);
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("utilizedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits);
fieldsMap.put("maxVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.maxVirtualProcessors);
fieldsMap.put("currentVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.currentVirtualProcessors);
@ -114,9 +115,10 @@ class LogicalPartition extends MetaSystem {
List<Measurement> getVirtualEthernetAdapterMetrics() {
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.forEach( adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
tagsMap.put("partition", name);
tagsMap.put("sea", adapter.sharedEthernetAdapterId);
@ -125,7 +127,7 @@ class LogicalPartition extends MetaSystem {
tagsMap.put("vswitchId", adapter.vswitchId.toString());
log.debug("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("receivedPhysicalBytes", adapter.receivedPhysicalBytes);
fieldsMap.put("sentPhysicalBytes", adapter.sentPhysicalBytes);
fieldsMap.put("receivedBytes", adapter.receivedBytes);
@ -144,14 +146,14 @@ class LogicalPartition extends MetaSystem {
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.lparsUtil.storage.virtualFiberChannelAdapters.forEach( adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
tagsMap.put("partition", name);
tagsMap.put("viosId", adapter.viosId.toString());
tagsMap.put("wwpn", adapter.wwpn);
log.debug("getVirtualFiberChannelAdaptersMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("transmittedBytes", adapter.transmittedBytes.get(0));
fieldsMap.put("writeBytes", adapter.writeBytes.get(0));
fieldsMap.put("readBytes", adapter.readBytes.get(0));

View file

@ -27,7 +27,7 @@ class ManagedSystem extends MetaSystem {
private final static Logger log = LoggerFactory.getLogger(ManagedSystem.class);
public final String hmcId;
//public final String hmcId;
public final String id;
public final String name;
public final String type;
@ -37,8 +37,7 @@ class ManagedSystem extends MetaSystem {
public final SystemEnergy energy;
ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) {
this.hmcId = hmcId;
ManagedSystem(String id, String name, String type, String model, String serialNumber) {
this.id = id;
this.name = name;
this.type = type;
@ -47,6 +46,7 @@ class ManagedSystem extends MetaSystem {
this.energy = new SystemEnergy(this);
}
@Override
public String toString() {
return String.format("[%s] %s (%s-%s %s)", id, name, type, model, serialNumber);
}
@ -56,11 +56,11 @@ class ManagedSystem extends MetaSystem {
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
log.debug("getMemoryMetrics() - tags: " + tagsMap.toString());
Map<String, Number> fieldsMap = new HashMap<String, Number>();
Map<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("totalMem", metrics.systemUtil.sample.serverUtil.memory.totalMem);
fieldsMap.put("availableMem", metrics.systemUtil.sample.serverUtil.memory.availableMem);
fieldsMap.put("configurableMem", metrics.systemUtil.sample.serverUtil.memory.configurableMem);
@ -77,11 +77,11 @@ class ManagedSystem extends MetaSystem {
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
log.debug("getProcessorMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("totalProcUnits", metrics.systemUtil.sample.serverUtil.processor.totalProcUnits);
fieldsMap.put("utilizedProcUnits", metrics.systemUtil.sample.serverUtil.processor.utilizedProcUnits);
fieldsMap.put("availableProcUnits", metrics.systemUtil.sample.serverUtil.processor.availableProcUnits);
@ -98,12 +98,12 @@ class ManagedSystem extends MetaSystem {
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.serverUtil.sharedProcessorPool.forEach(adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
tagsMap.put("pool", adapter.name);
log.debug("getSharedProcessorPools() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("assignedProcUnits", adapter.assignedProcUnits);
fieldsMap.put("availableProcUnits", adapter.availableProcUnits);
log.debug("getSharedProcessorPools() - fields: " + fieldsMap.toString());
@ -121,12 +121,12 @@ class ManagedSystem extends MetaSystem {
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.viosUtil.forEach(vios -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
tagsMap.put("vios", vios.name);
log.debug("getViosMemoryMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
Number assignedMem = getNumberMetricObject(vios.memory.assignedMem);
Number utilizedMem = getNumberMetricObject(vios.memory.utilizedMem);
if(assignedMem != null) {
@ -154,12 +154,12 @@ class ManagedSystem extends MetaSystem {
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.viosUtil.forEach(vios -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
tagsMap.put("vios", vios.name);
log.debug("getViosProcessorMetrics() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("utilizedProcUnits", vios.processor.utilizedProcUnits);
fieldsMap.put("maxVirtualProcessors", vios.processor.maxVirtualProcessors);
fieldsMap.put("currentVirtualProcessors", vios.processor.currentVirtualProcessors);
@ -185,14 +185,14 @@ class ManagedSystem extends MetaSystem {
vios.network.sharedAdapters.forEach(adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
tagsMap.put("type", adapter.type);
tagsMap.put("vios", vios.name);
tagsMap.put("device", adapter.physicalLocation);
log.debug("getSystemSharedAdapters() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("sentBytes", adapter.sentBytes);
fieldsMap.put("receivedBytes", adapter.receivedBytes);
fieldsMap.put("transferredBytes", adapter.transferredBytes);
@ -215,7 +215,7 @@ class ManagedSystem extends MetaSystem {
vios.storage.fiberChannelAdapters.forEach( adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("id", adapter.id);
tagsMap.put("system", name);
tagsMap.put("wwpn", adapter.wwpn);
@ -223,7 +223,7 @@ class ManagedSystem extends MetaSystem {
tagsMap.put("device", adapter.physicalLocation);
log.debug("getSystemFiberChannelAdapters() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("writeBytes", adapter.writeBytes);
fieldsMap.put("readBytes", adapter.readBytes);
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
@ -312,13 +312,13 @@ class ManagedSystem extends MetaSystem {
vios.network.virtualEthernetAdapters.forEach( adapter -> {
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", name);
tagsMap.put("vios", vios.name);
tagsMap.put("device", adapter.physicalLocation);
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString());
HashMap<String, Number> fieldsMap = new HashMap<String, Number>();
HashMap<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("sentBytes", adapter.sentBytes);
fieldsMap.put("receivedBytes", adapter.receivedBytes);
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString());

View file

@ -1,3 +1,18 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import java.util.Map;

View file

@ -21,6 +21,7 @@ import com.squareup.moshi.FromJson;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.Moshi;
import com.squareup.moshi.ToJson;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -52,7 +53,7 @@ abstract class MetaSystem {
try {
metrics = jsonAdapter.nullSafe().fromJson(json);
} catch(Exception e) {
} catch(IOException e) {
log.warn("processMetrics() error", e);
}
//System.out.println(jsonAdapter.toJson(metrics));

View file

@ -36,6 +36,7 @@ class SystemEnergy extends MetaSystem {
}
@Override
public String toString() {
return system.name;
}
@ -45,11 +46,11 @@ class SystemEnergy extends MetaSystem {
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
log.debug("getPowerMetrics() - tags: " + tagsMap.toString());
Map<String, Number> fieldsMap = new HashMap<String, Number>();
Map<String, Number> fieldsMap = new HashMap<>();
fieldsMap.put("powerReading", metrics.systemUtil.sample.energyUtil.powerUtil.powerReading);
log.debug("getPowerMetrics() - fields: " + fieldsMap.toString());
@ -62,11 +63,11 @@ class SystemEnergy extends MetaSystem {
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<String, String>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("system", system.name);
log.debug("getThermalMetrics() - tags: " + tagsMap.toString());
Map<String, Number> fieldsMap = new HashMap<String, Number>();
Map<String, Number> fieldsMap = new HashMap<>();
for(Temperature t : metrics.systemUtil.sample.energyUtil.thermalUtil.cpuTemperatures) {
fieldsMap.put("cpuTemperature_" + t.entityInstance, t.temperatureReading);

View file

@ -8,6 +8,7 @@ import java.util.jar.Manifest;
class VersionProvider implements CommandLine.IVersionProvider {
@Override
public String[] getVersion() throws IOException {
Manifest manifest = new Manifest(getClass().getResourceAsStream("/META-INF/MANIFEST.MF"));

View file

@ -4,10 +4,10 @@ import com.serjltt.moshi.adapters.FirstElement;
public class FiberChannelAdapter {
public String id;
public String wwpn;
public String physicalLocation;
public Integer numOfPorts;
public String id = "";
public String wwpn = "";
public String physicalLocation = "";
public Integer numOfPorts = 0;
@FirstElement
public Number numOfReads;

View file

@ -1,17 +1,18 @@
package biz.nellemann.hmci.pcm;
import java.util.ArrayList;
import java.util.List;
public class GenericAdapter {
public String id;
public String type;
public String physicalLocation;
public List<Number> receivedPackets;
public List<Number> sentPackets;
public List<Number> droppedPackets;
public List<Number> sentBytes;
public List<Number> receivedBytes;
public List<Number> transferredBytes;
public String id = "";
public String type = "";
public String physicalLocation = "";
public List<Number> receivedPackets = new ArrayList<>();
public List<Number> sentPackets = new ArrayList<>();
public List<Number> droppedPackets = new ArrayList<>();
public List<Number> sentBytes = new ArrayList<>();
public List<Number> receivedBytes = new ArrayList<>();
public List<Number> transferredBytes = new ArrayList<>();
}

View file

@ -4,9 +4,9 @@ import com.serjltt.moshi.adapters.FirstElement;
public class GenericPhysicalAdapters {
public String id;
public String type;
public String physicalLocation;
public String id = "";
public String type = "";
public String physicalLocation = "";
@FirstElement
public Number numOfReads;

View file

@ -5,10 +5,10 @@ import com.serjltt.moshi.adapters.FirstElement;
public class GenericVirtualAdapter {
public String id;
public String type;
public Integer viosId;
public String physicalLocation;
public String id = "";
public String type = "";
public Integer viosId = 0;
public String physicalLocation = "";
@FirstElement
public Number numOfReads;

View file

@ -4,9 +4,9 @@ import com.serjltt.moshi.adapters.FirstElement;
public class LparProcessor {
public Integer poolId;
public Integer weight;
public String mode;
public Integer poolId = 0;
public Integer weight = 0;
public String mode = "";
@FirstElement
public Number maxVirtualProcessors;

View file

@ -2,16 +2,16 @@ package biz.nellemann.hmci.pcm;
public class LparUtil {
public Integer id;
public String uuid;
public String name;
public String state;
public String type;
public String osType;
public Number affinityScore;
public Integer id = 0;
public String uuid = "";
public String name = "";
public String state = "";
public String type = "";
public String osType = "";
public Number affinityScore = 0.0f;
public LparMemory memory;
public LparProcessor processor;
public LparMemory memory = new LparMemory();
public LparProcessor processor = new LparProcessor();
public Network network = new Network();
public Storage storage = new Storage();

View file

@ -2,6 +2,6 @@ package biz.nellemann.hmci.pcm;
public class PcmData {
public SystemUtil systemUtil;
public SystemUtil systemUtil = new SystemUtil();
}

View file

@ -1,13 +1,14 @@
package biz.nellemann.hmci.pcm;
import java.util.ArrayList;
import java.util.List;
public class PhysicalProcessorPool {
public List<Number> assignedProcUnits;
public List<Number> utilizedProcUnits;
public List<Number> availableProcUnits;
public List<Number> configuredProcUnits;
public List<Number> borrowedProcUnits;
public List<Number> assignedProcUnits = new ArrayList<>();
public List<Number> utilizedProcUnits = new ArrayList<>();
public List<Number> availableProcUnits = new ArrayList<>();
public List<Number> configuredProcUnits = new ArrayList<>();
public List<Number> borrowedProcUnits = new ArrayList<>();
}

View file

@ -1,3 +1,4 @@
package biz.nellemann.hmci.pcm;
import com.serjltt.moshi.adapters.FirstElement;

View file

@ -2,7 +2,7 @@ package biz.nellemann.hmci.pcm;
public class SampleInfo {
public String timeStamp;
public Integer status;
public String timeStamp = "";
public Integer status = 0;
}

View file

@ -5,15 +5,15 @@ import com.serjltt.moshi.adapters.FirstElement;
public class ServerMemory {
@FirstElement
public Number totalMem;
public Number totalMem = 0;
@FirstElement
public Number availableMem;
public Number availableMem = 0;
@FirstElement
public Number configurableMem;
public Number configurableMem = 0;
@FirstElement
public Number assignedMemToLpars;
public Number assignedMemToLpars = 0;
}

View file

@ -5,15 +5,15 @@ import com.serjltt.moshi.adapters.FirstElement;
public class ServerProcessor {
@FirstElement
public Number totalProcUnits;
public Number totalProcUnits = 0;
@FirstElement
public Number utilizedProcUnits;
public Number utilizedProcUnits = 0;
@FirstElement
public Number availableProcUnits;
public Number availableProcUnits = 0;
@FirstElement
public Number configurableProcUnits;
public Number configurableProcUnits = 0;
}

View file

@ -1,12 +1,13 @@
package biz.nellemann.hmci.pcm;
import java.util.ArrayList;
import java.util.List;
public class ServerUtil {
public ServerProcessor processor;
public ServerMemory memory;
public PhysicalProcessorPool physicalProcessorPool;
public List<SharedProcessorPool> sharedProcessorPool;
public ServerProcessor processor = new ServerProcessor();
public ServerMemory memory = new ServerMemory();
public PhysicalProcessorPool physicalProcessorPool = new PhysicalProcessorPool();
public List<SharedProcessorPool> sharedProcessorPool = new ArrayList<>();
}

View file

@ -4,29 +4,29 @@ import com.serjltt.moshi.adapters.FirstElement;
public class SharedAdapter {
public String id;
public String type;
public String physicalLocation;
public String id = "";
public String type = "";
public String physicalLocation = "";
@FirstElement
public Number receivedPackets;
public Number receivedPackets = 0;
@FirstElement
public Number sentPackets;
public Number sentPackets = 0;
@FirstElement
public Number droppedPackets;
public Number droppedPackets = 0;
@FirstElement
public Number sentBytes;
public Number sentBytes = 0;
@FirstElement
public Number receivedBytes;
public Number receivedBytes = 0;
@FirstElement
public Number transferredBytes;
public Number transferredBytes = 0;
@FirstElement
public String bridgedAdapters;
public String bridgedAdapters = "";
}

View file

@ -4,22 +4,22 @@ import com.serjltt.moshi.adapters.FirstElement;
public class SharedProcessorPool {
public String id;
public String name;
public String id = "";
public String name = "";
@FirstElement
public Number assignedProcUnits;
public Number assignedProcUnits = 0;
@FirstElement
public Number utilizedProcUnits;
public Number utilizedProcUnits = 0;
@FirstElement
public Number availableProcUnits;
public Number availableProcUnits = 0;
@FirstElement
public Number configuredProcUnits;
public Number configuredProcUnits = 0;
@FirstElement
public Number borrowedProcUnits;
public Number borrowedProcUnits = 0;
}

View file

@ -4,10 +4,10 @@ import com.serjltt.moshi.adapters.FirstElement;
public class Temperature {
public String entityId;
public String entityInstance;
public String entityId = "";
public String entityInstance = "";
@FirstElement
public Float temperatureReading;
public Float temperatureReading = 0.0f;
}

View file

@ -1,17 +1,18 @@
package biz.nellemann.hmci.pcm;
import java.util.ArrayList;
import java.util.List;
public class UtilInfo {
public String version;
public String metricType;
public Integer frequency;
public String startTimeStamp;
public String endTimeStamp;
public String mtms;
public String name;
public String uuid;
public List<String> metricArrayOrder;
public String version = "";
public String metricType = "";
public Integer frequency = 0;
public String startTimeStamp = "";
public String endTimeStamp = "";
public String mtms = "";
public String name = "";
public String uuid = "";
public List<String> metricArrayOrder = new ArrayList<>();
}

View file

@ -7,23 +7,13 @@ import java.util.List;
public class UtilSample {
public String sampleType;
public SampleInfo sampleInfo;
public ServerUtil serverUtil;
public String sampleType = "";
public SampleInfo sampleInfo = new SampleInfo();
public ServerUtil serverUtil = new ServerUtil();
public EnergyUtil energyUtil = new EnergyUtil();
public List<ViosUtil> viosUtil = new ArrayList<>();
@FirstElement
public LparUtil lparsUtil;
/*
public LparUtil getLparsUtil() {
if(lparsUtil == null || lparsUtil.isEmpty()) {
return new LparUtil();
} else {
return lparsUtil.get(0);
}
}*/
public LparUtil lparsUtil = new LparUtil();
}

View file

@ -5,9 +5,9 @@ import com.serjltt.moshi.adapters.FirstElement;
public class ViosMemory {
@FirstElement
public Number assignedMem;
public Number assignedMem = 0;
@FirstElement
public Number utilizedMem;
public Number utilizedMem = 0;
}

View file

@ -2,15 +2,15 @@ package biz.nellemann.hmci.pcm;
public class ViosUtil {
public String id;
public String uuid;
public String name;
public String state;
public Integer affinityScore;
public String id = "";
public String uuid = "";
public String name = "";
public String state = "";
public Integer affinityScore = 0;
public ViosMemory memory;
public LparProcessor processor;
public Network network;
public Storage storage;
public ViosMemory memory = new ViosMemory();
public LparProcessor processor = new LparProcessor();
public Network network = new Network();
public Storage storage = new Storage();
}

View file

@ -2,49 +2,50 @@ package biz.nellemann.hmci.pcm;
import com.serjltt.moshi.adapters.FirstElement;
public class VirtualEthernetAdapter {
public String physicalLocation;
public Integer vlanId;
public Integer vswitchId;
public Boolean isPortVlanId;
public Integer viosId;
public String sharedEthernetAdapterId;
public String physicalLocation = "";
public Integer vlanId = 0;
public Integer vswitchId = 0;
public Boolean isPortVlanId = false;
public Integer viosId = 0;
public String sharedEthernetAdapterId = "";
@FirstElement
public Number receivedPackets;
public Number receivedPackets = 0;
@FirstElement
public Number sentPackets;
public Number sentPackets = 0;
@FirstElement
public Number droppedPackets;
public Number droppedPackets = 0;
@FirstElement
public Number sentBytes;
public Number sentBytes = 0;
@FirstElement
public Number receivedBytes;
public Number receivedBytes = 0;
@FirstElement
public Number receivedPhysicalPackets;
public Number receivedPhysicalPackets = 0;
@FirstElement
public Number sentPhysicalPackets;
public Number sentPhysicalPackets = 0;
@FirstElement
public Number droppedPhysicalPackets;
public Number droppedPhysicalPackets = 0;
@FirstElement
public Number sentPhysicalBytes;
public Number sentPhysicalBytes = 0;
@FirstElement
public Number receivedPhysicalBytes;
public Number receivedPhysicalBytes = 0;
@FirstElement
public Number transferredBytes;
public Number transferredBytes = 0;
@FirstElement
public Number transferredPhysicalBytes;
public Number transferredPhysicalBytes = 0;
}

View file

@ -1,19 +1,20 @@
package biz.nellemann.hmci.pcm;
import java.util.ArrayList;
import java.util.List;
public class VirtualFiberChannelAdapter {
public String wwpn;
public String wwpn2;
public String physicalLocation;
public String physicalPortWWPN;
public Integer viosId;
public List<Number> numOfReads;
public List<Number> numOfWrites;
public List<Number> readBytes;
public List<Number> writeBytes;
public List<Number> runningSpeed;
public List<Number> transmittedBytes;
public String wwpn = "";
public String wwpn2 = "";
public String physicalLocation = "";
public String physicalPortWWPN = "";
public Integer viosId = 0;
public List<Number> numOfReads = new ArrayList<>();
public List<Number> numOfWrites = new ArrayList<>();
public List<Number> readBytes = new ArrayList<>();
public List<Number> writeBytes = new ArrayList<>();
public List<Number> runningSpeed = new ArrayList<>();
public List<Number> transmittedBytes = new ArrayList<>();
}

View file

@ -3,3 +3,4 @@ org.slf4j.simpleLogger.showDateTime=true
org.slf4j.simpleLogger.showShortLogName=true
org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS
org.slf4j.simpleLogger.levelInBrackets=true
#org.slf4j.simpleLogger.defaultLogLevel=debug

View file

@ -4,16 +4,15 @@ import okhttp3.mockwebserver.MockResponse
import okhttp3.mockwebserver.MockWebServer
import spock.lang.Specification
class HmcClientTest extends Specification {
class HmcRestClientTest extends Specification {
HmcClient hmc
HmcRestClient hmc
MockWebServer mockServer = new MockWebServer()
def setup() {
mockServer.start()
Configuration.HmcObject configHmc = new Configuration.HmcObject("site1", mockServer.url("/").toString(), "testUser", "testPassword", true);
hmc = new HmcClient(configHmc)
hmc = new HmcRestClient(mockServer.url("/").toString(), "testUser", "testPassword", true)
hmc.authToken = "blaBla"
}
@ -57,7 +56,7 @@ class HmcClientTest extends Specification {
mockServer.enqueue(new MockResponse().setBody(testXml))
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
Map<String, LogicalPartition> partitions = hmc.getLogicalPartitionsForManagedSystem(system)
then:

View file

@ -17,7 +17,6 @@ class InfluxClientTest extends Specification {
influxClient.logoff()
}
@Ignore
void "write ManagedSystem data to influx"() {
setup:
@ -25,7 +24,7 @@ class InfluxClientTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
system.processMetrics(testJson)
influxClient.writeManagedSystem(system)
@ -34,7 +33,6 @@ class InfluxClientTest extends Specification {
}
@Ignore
void "write LogicalPartition data to influx"() {
setup:
@ -42,7 +40,7 @@ class InfluxClientTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
lpar.processMetrics(testJson)

View file

@ -1,10 +0,0 @@
/*
* This Spock specification was generated by the Gradle 'init' task.
*/
package biz.nellemann.hmci
import spock.lang.Specification
class InsightsTest extends Specification {
}

View file

@ -12,7 +12,7 @@ class LogicalPartitionTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
lpar.processMetrics(testJson)
@ -29,7 +29,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -48,7 +48,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -67,7 +67,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -85,7 +85,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:

View file

@ -11,7 +11,7 @@ class ManagedSystemTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.processMetrics(testJson)
then:
@ -30,7 +30,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -46,7 +46,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -62,7 +62,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -77,7 +77,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -93,7 +93,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -111,7 +111,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)

View file

@ -13,7 +13,7 @@ class MetaSystemTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.processMetrics(testJson)
Instant instant = system.getTimestamp()

View file

@ -11,7 +11,7 @@ class SystemEnergyTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.energy.processMetrics(testJson)
then: