Merged in threading (pull request #11)

Threading
This commit is contained in:
Mark Nellemann 2021-01-14 13:25:32 +00:00
commit 1c69214da7
19 changed files with 227 additions and 231 deletions

View File

@ -1,8 +1,12 @@
# HMC Insights
**HMCi** is a utility that collects metrics from one or more *IBM Power HMC* systems. The metric data is processed and saved into an InfluxDB time-series database. Grafana is used to visualize the metrics from InfluxDB.
**HMCi** is a utility that collects metrics from one or more *IBM Power HMC* systems. The metric data is processed and saved into an InfluxDB time-series database. Grafana can be used to visualize the metrics from InfluxDB.
Metrics includes *Managed Systems* (the physical Power servers) and *Logical Partitions* (the virtualized servers) running AIX, Linux and IBM-i (AS/400).
Metrics includes:
- *Managed Systems* - the physical Power servers
- *Logical Partitions* - the virtualized servers running AIX, Linux and IBM-i (AS/400)
- *Virtual I/O Servers* - the i/o partition(s) taking care of network and storage
- *Energy* - power consumption and temperatures
![architecture](https://bitbucket.org/mnellemann/hmci/downloads/HMCi.png)
@ -30,7 +34,7 @@ Install InfluxDB on an *LPAR* or other server, which is network accessible by th
### HMCi Installation Instructions
- Ensure you have correct date/time and NTPd running to keep it accurate!
- Ensure you have **correct date/time** and NTPd running to keep it accurate!
- The only requirement for **hmci** is the Java runtime, version 8 (or later)
- Install **HMCi** from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) (rpm, deb or jar) or build from source
- Copy the *doc/hmci.toml* configuration example into */etc/hmci.toml* and edit the configuration to suit your environment. The location of the configuration file can be changed with a flag when running hmci.
@ -54,7 +58,7 @@ Below are screenshots of the provided Grafana dashboards (found in the **doc/**
### Naming collision
You can't have partitions on different HMC's with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the name is key).
written to InfluxDB (which uses the name as key).
### Renaming partitions

View File

@ -78,7 +78,7 @@ buildDeb {
}
jacoco {
toolVersion = "0.8.5"
toolVersion = "0.8.6"
}
jacocoTestReport {

View File

@ -1,17 +1,17 @@
# HMCi Configuration
# How often to query HMC's for data - in seconds
hmci.refresh = 30
hmci.update = 25
# Rescan HMC's for new systems and partitions - every x refresh
hmci.rescan = 60
# Rescan HMC's for new systems and partitions - every x update
hmci.rescan = 90
# InfluxDB to save metrics
[influx]
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
# One or more HMC's to query for data and metrics
[hmc]
@ -23,6 +23,7 @@ database = "hmci"
password = "hmcihmci"
unsafe = true # Ignore SSL cert. errors
# Example
#[hmc.site2]
#url = "https://10.10.20.20:12443"

View File

@ -1,3 +1,3 @@
id = hmci
group = biz.nellemann.hmci
version = 1.0.2
version = 1.1.0

View File

@ -23,6 +23,8 @@ import picocli.CommandLine.Command;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
@Command(name = "hmci",
@ -45,18 +47,34 @@ public class Application implements Callable<Integer> {
@Override
public Integer call() throws IOException {
Configuration configuration;
InfluxClient influxClient;
List<Thread> threadList = new ArrayList<>();
File file = new File(configurationFile);
if(!file.exists()) {
System.err.println("Error - No configuration file found at: " + file.toString());
return -1;
}
Configuration configuration = new Configuration(configurationFile);
Insights insights = new Insights(configuration);
try {
insights.run();
} catch (InterruptedException e) {
configuration = new Configuration(configurationFile);
influxClient = new InfluxClient(configuration.getInflux());
influxClient.login();
for(Configuration.HmcObject configHmc : configuration.getHmc()) {
Thread t = new Thread(new HmcInstance(configHmc, influxClient));
threadList.add(t);
t.start();
}
for (Thread thread : threadList) {
thread.join();
}
} catch (InterruptedException | RuntimeException e) {
log.error(e.getMessage());
return 1;
}
return 0;

View File

@ -14,10 +14,11 @@ public final class Configuration {
//private final static Logger log = LoggerFactory.getLogger(Configuration.class);
final public Long refresh;
final public Long rescan;
final public InfluxObject influx;
final public List<HmcObject> hmc;
final private Long update;
final private Long rescan;
final private InfluxObject influx;
final private List<HmcObject> hmcList;
Configuration(String configurationFile) throws IOException {
@ -25,27 +26,25 @@ public final class Configuration {
TomlParseResult result = Toml.parse(source);
result.errors().forEach(error -> System.err.println(error.toString()));
if(result.contains("hmci.refresh")) {
refresh = result.getLong("hmci.refresh");
if(result.contains("hmci.update")) {
update = result.getLong("hmci.update");
} else {
refresh = 30L;
update = 30L;
}
System.err.println("Refresh: " + refresh);
if(result.contains("hmci.rescan")) {
rescan = result.getLong("hmci.rescan");
} else {
rescan = 60L;
}
System.err.println("Rescan: " + refresh);
hmc = getHmc(result);
influx = getInflux(result);
hmcList = parseConfigurationForHmc(result);
influx = parseConfigurationForInflux(result);
}
List<HmcObject> getHmc(TomlParseResult result) {
private List<HmcObject> parseConfigurationForHmc(TomlParseResult result) {
ArrayList<HmcObject> list = new ArrayList<>();
@ -58,6 +57,8 @@ public final class Configuration {
HmcObject c = new HmcObject();
c.name = key;
c.update = update;
c.rescan = rescan;
if(hmcTable.contains(key+".url")) {
c.url = hmcTable.getString(key+".url");
@ -85,7 +86,7 @@ public final class Configuration {
}
InfluxObject getInflux(TomlParseResult result) {
private InfluxObject parseConfigurationForInflux(TomlParseResult result) {
InfluxObject c = new InfluxObject();
@ -113,7 +114,17 @@ public final class Configuration {
return c;
}
public List<HmcObject> getHmc() {
return hmcList;
}
public InfluxObject getInflux() {
return influx;
}
static class InfluxObject {
String url = "http://localhost:8086";
@ -136,7 +147,7 @@ public final class Configuration {
return validated;
}
// TODO: Fixme
// TODO: Implement validation
void validate() {
validated = true;
}
@ -155,16 +166,20 @@ public final class Configuration {
String username;
String password;
Boolean unsafe = false;
Long update = 30L;
Long rescan = 60L;
private boolean validated = false;
HmcObject() { }
HmcObject(String name, String url, String username, String password, Boolean unsafe) {
HmcObject(String name, String url, String username, String password, Boolean unsafe, Long update, Long rescan) {
this.url = url;
this.username = username;
this.password = password;
this.unsafe = unsafe;
this.update = update;
this.rescan = rescan;
}
@ -172,7 +187,7 @@ public final class Configuration {
return validated;
}
// TODO: Fixme
// TODO: Implement validation
void validate() {
validated = true;
}

View File

@ -15,6 +15,7 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.HmcObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -24,79 +25,122 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.lang.Thread.*;
import static java.lang.Thread.sleep;
class Insights {
class HmcInstance implements Runnable {
private final static Logger log = LoggerFactory.getLogger(Insights.class);
private final static Logger log = LoggerFactory.getLogger(HmcInstance.class);
final Configuration configuration;
private final String hmcId;
private final Long updateValue;
private final Long rescanValue;
private final Map<String,ManagedSystem> systems = new HashMap<>();
private final Map<String, LogicalPartition> partitions = new HashMap<>();
InfluxClient influxClient;
final Map<String, HmcClient> hmcClients = new HashMap<>();
final Map<String,ManagedSystem> systems = new HashMap<>();
final Map<String, LogicalPartition> partitions = new HashMap<>();
private final HmcRestClient hmcRestClient;
private final InfluxClient influxClient;
private final AtomicBoolean keepRunning = new AtomicBoolean(true);
Insights(Configuration configuration) {
this.configuration = configuration;
try {
influxClient = new InfluxClient(configuration.influx);
influxClient.login();
} catch (Exception e) {
System.exit(1);
}
HmcInstance(HmcObject configHmc, InfluxClient influxClient) {
this.hmcId = configHmc.name;
this.updateValue = configHmc.update;
this.rescanValue = configHmc.rescan;
this.influxClient = influxClient;
hmcRestClient = new HmcRestClient(configHmc.url, configHmc.username, configHmc.password, configHmc.unsafe);
log.debug(String.format("HmcInstance() - id: %s, update: %s, refresh %s", hmcId, updateValue, rescanValue));
}
@Override
public String toString() {
return hmcId;
}
@Override
public void run() {
log.debug("run() - " + hmcId);
int executions = 0;
// Initial scan
discover();
do {
Instant instantStart = Instant.now();
try {
getMetricsForSystems();
getMetricsForPartitions();
getMetricsForEnergy();
writeMetricsForManagedSystems();
writeMetricsForLogicalPartitions();
writeMetricsForSystemEnergy();
influxClient.writeBatchPoints();
// Refresh
if (++executions > rescanValue) {
executions = 0;
discover();
}
} catch (Exception e) {
log.error("run()", e);
}
Instant instantEnd = Instant.now();
long timeSpend = Duration.between(instantStart, instantEnd).getSeconds();
log.debug("run() - duration sec: " + timeSpend);
if(timeSpend < updateValue) {
try {
log.debug("run() - sleep sec: " + (updateValue - timeSpend));
//noinspection BusyWait
sleep((updateValue - timeSpend) * 1000);
} catch (InterruptedException e) {
log.error("run() - sleep interrupted", e);
}
}
} while (keepRunning.get());
}
void discover() {
configuration.hmc.forEach( configHmc -> {
if(!hmcClients.containsKey(configHmc.name)) {
HmcClient hmcClient = new HmcClient(configHmc);
hmcClients.put(configHmc.name, hmcClient);
log.info("discover() - Adding HMC: " + hmcClient);
}
});
log.debug("discover() - " + hmcId);
hmcClients.forEach(( hmcId, hmcClient) -> {
try {
hmcRestClient.logoff();
hmcRestClient.login();
hmcRestClient.getManagedSystems().forEach((systemId, system) -> {
try {
hmcClient.logoff();
hmcClient.login();
hmcClient.getManagedSystems().forEach((systemId, system) -> {
// Add to list of known systems
if(!systems.containsKey(systemId)) {
systems.put(systemId, system);
log.info("discover() - Found ManagedSystem: " + system + " @" + hmcId);
}
// Add to list of known systems
if(!systems.containsKey(systemId)) {
systems.put(systemId, system);
log.info(hmcId + " discover() - Found ManagedSystem: " + system);
}
// Get LPAR's for this system
try {
hmcRestClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
// Get LPAR's for this system
try {
hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
// Add to list of known partitions
if(!partitions.containsKey(partitionId)) {
partitions.put(partitionId, partition);
log.info("discover() - Found LogicalPartition: " + partition + " @" + hmcId);
}
// Add to list of known partitions
if(!partitions.containsKey(partitionId)) {
partitions.put(partitionId, partition);
log.info(hmcId + " discover() - Found LogicalPartition: " + partition);
}
});
} catch (Exception e) {
log.error("discover() - getLogicalPartitions", e);
}
});
} catch (Exception e) {
log.error(hmcId + " discover() - getLogicalPartitions", e);
}
});
});
} catch(Exception e) {
log.error(hmcId + " discover() - getManagedSystems: " + e.getMessage());
}
});
} catch(Exception e) {
log.error("discover() - getManagedSystems: " + e.getMessage());
}
}
@ -105,12 +149,10 @@ class Insights {
systems.forEach((systemId, system) -> {
HmcClient hmcClient = hmcClients.get(system.hmcId);
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcClient.getPcmDataForManagedSystem(system);
tmpJsonString = hmcRestClient.getPcmDataForManagedSystem(system);
} catch (Exception e) {
log.error("getMetricsForSystems()", e);
}
@ -131,12 +173,10 @@ class Insights {
// Get LPAR's for this system
partitions.forEach((partitionId, partition) -> {
HmcClient hmcClient = hmcClients.get(partition.system.hmcId);
// Get and process metrics for this partition
String tmpJsonString2 = null;
try {
tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition);
tmpJsonString2 = hmcRestClient.getPcmDataForLogicalPartition(partition);
} catch (Exception e) {
log.error("getMetricsForPartitions() - getPcmDataForLogicalPartition", e);
}
@ -156,12 +196,10 @@ class Insights {
systems.forEach((systemId, system) -> {
HmcClient hmcClient = hmcClients.get(system.hmcId);
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcClient.getPcmDataForEnergy(system.energy);
tmpJsonString = hmcRestClient.getPcmDataForEnergy(system.energy);
} catch (Exception e) {
log.error("getMetricsForEnergy()", e);
}
@ -202,50 +240,4 @@ class Insights {
}
void run() throws InterruptedException {
log.debug("run()");
int executions = 0;
AtomicBoolean keepRunning = new AtomicBoolean(true);
Thread shutdownHook = new Thread(() -> {
keepRunning.set(false);
System.out.println("Stopping HMCi, please wait ...");
});
Runtime.getRuntime().addShutdownHook(shutdownHook);
do {
Instant start = Instant.now();
try {
getMetricsForSystems();
getMetricsForPartitions();
getMetricsForEnergy();
writeMetricsForManagedSystems();
writeMetricsForLogicalPartitions();
writeMetricsForSystemEnergy();
influxClient.writeBatchPoints();
} catch (Exception e) {
log.error("run()", e);
}
// Refresh
if (++executions > configuration.rescan) {
executions = 0;
discover();
}
Instant end = Instant.now();
long timeSpend = Duration.between(start, end).getSeconds();
log.debug("run() - duration sec: " + timeSpend);
if(timeSpend < configuration.refresh) {
//noinspection BusyWait
sleep((configuration.refresh - timeSpend) * 1000);
}
} while (keepRunning.get());
}
}

View File

@ -15,7 +15,6 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.HmcObject;
import okhttp3.*;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
@ -37,34 +36,32 @@ import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
class HmcClient {
private final static Logger log = LoggerFactory.getLogger(HmcClient.class);
public class HmcRestClient {
private final static Logger log = LoggerFactory.getLogger(HmcRestClient.class);
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
private final String hmcId;
private final String baseUrl;
private final String username;
private final String password;
protected Integer responseErrors = 0;
protected String authToken;
private final OkHttpClient client;
// OkHttpClient timeouts
private final static int connectTimeout = 2;
private final static int writeTimeout = 3;
private final static int readTimeout = 3;
private final static int CONNECT_TIMEOUT = 2;
private final static int WRITE_TIMEOUT = 3;
private final static int READ_TIMEOUT = 3;
private final String baseUrl;
private final String username;
private final String password;
HmcClient(HmcObject configHmc) {
HmcRestClient(String url, String username, String password, Boolean unsafe) {
this.hmcId = configHmc.name;
this.baseUrl = configHmc.url;
this.username = configHmc.username;
this.password = configHmc.password;
Boolean unsafe = configHmc.unsafe;
this.baseUrl = url;
this.username = username;
this.password = password;
if(unsafe) {
this.client = getUnsafeOkHttpClient();
@ -74,9 +71,10 @@ class HmcClient {
}
@Override
public String toString() {
return hmcId + " (" + baseUrl + ")";
return baseUrl;
}
@ -173,7 +171,6 @@ class HmcClient {
Elements managedSystems = doc.select("ManagedSystem|ManagedSystem"); // doc.select("img[src$=.png]");
for(Element el : managedSystems) {
ManagedSystem system = new ManagedSystem(
hmcId,
el.select("Metadata > Atom > AtomID").text(),
el.select("SystemName").text(),
el.select("MachineTypeModelAndSerialNumber > MachineType").text(),
@ -419,9 +416,9 @@ class HmcClient {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
builder.hostnameVerifier((hostname, session) -> true);
builder.connectTimeout(connectTimeout, TimeUnit.SECONDS);
builder.writeTimeout(writeTimeout, TimeUnit.SECONDS);
builder.readTimeout(readTimeout, TimeUnit.SECONDS);
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
} catch (KeyManagementException | NoSuchAlgorithmException e) {
@ -436,9 +433,9 @@ class HmcClient {
*/
private static OkHttpClient getSafeOkHttpClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.connectTimeout(connectTimeout, TimeUnit.SECONDS);
builder.writeTimeout(writeTimeout, TimeUnit.SECONDS);
builder.readTimeout(readTimeout, TimeUnit.SECONDS);
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
}

View File

@ -36,6 +36,10 @@ class InfluxClient {
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
private static final int BATCH_ACTIONS_LIMIT = 5000;
private static final int BATCH_INTERVAL_DURATION = 1000;
final private String url;
final private String username;
final private String password;
@ -53,7 +57,7 @@ class InfluxClient {
}
synchronized void login() throws Exception {
synchronized void login() throws RuntimeException, InterruptedException {
if(influxDB != null) {
return;
@ -67,19 +71,13 @@ class InfluxClient {
log.debug("Connecting to InfluxDB - " + url);
influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase();
// Enable batch writes to get better performance.
BatchOptions options = BatchOptions.DEFAULTS.actions(1000).flushDuration(5000).precision(TimeUnit.SECONDS);
influxDB.enableBatch(options);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
connected = true;
} catch(Exception e) {
sleep(15*1000);
sleep(15 * 1000);
if(errors++ > 3) {
log.error("login() error, giving up - " + e.getMessage());
throw new Exception(e);
throw new RuntimeException(e);
} else {
log.warn("login() error, retrying - " + e.getMessage());
}
@ -107,7 +105,7 @@ class InfluxClient {
synchronized void writeBatchPoints() throws Exception {
log.debug("writeBatchPoints()");
try {
influxDB.write(batchPoints);
influxDB.writeWithRetry(batchPoints);
} catch(Exception e) {
log.error("writeBatchPoints() error - " + e.getMessage());
logoff();

View File

@ -41,6 +41,7 @@ class LogicalPartition extends MetaSystem {
}
@Override
public String toString() {
return String.format("[%s] %s (%s)", id, name, type);
}
@ -113,10 +114,8 @@ class LogicalPartition extends MetaSystem {
List<Measurement> getVirtualEthernetAdapterMetrics() {
//List<VirtualEthernetAdapter> metricsList = getListObject(metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters);
List<Measurement> list = new ArrayList<>();
//metricsList.forEach( adapter -> {
metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.forEach( adapter -> {
HashMap<String, String> tagsMap = new HashMap<>();

View File

@ -27,7 +27,7 @@ class ManagedSystem extends MetaSystem {
private final static Logger log = LoggerFactory.getLogger(ManagedSystem.class);
public final String hmcId;
//public final String hmcId;
public final String id;
public final String name;
public final String type;
@ -37,8 +37,7 @@ class ManagedSystem extends MetaSystem {
public final SystemEnergy energy;
ManagedSystem(String hmcId, String id, String name, String type, String model, String serialNumber) {
this.hmcId = hmcId;
ManagedSystem(String id, String name, String type, String model, String serialNumber) {
this.id = id;
this.name = name;
this.type = type;

View File

@ -21,6 +21,7 @@ import com.squareup.moshi.FromJson;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.Moshi;
import com.squareup.moshi.ToJson;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -28,8 +29,6 @@ import java.math.BigDecimal;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.List;
abstract class MetaSystem {
@ -54,7 +53,7 @@ abstract class MetaSystem {
try {
metrics = jsonAdapter.nullSafe().fromJson(json);
} catch(Exception e) {
} catch(IOException e) {
log.warn("processMetrics() error", e);
}
//System.out.println(jsonAdapter.toJson(metrics));
@ -130,18 +129,5 @@ abstract class MetaSystem {
}
}
static ArrayList getListObject(Object obj) {
ArrayList list;
try {
list = (ArrayList) obj;
} catch (NullPointerException npe) {
log.warn("getListObject()", npe);
list = new ArrayList();
}
return list;
}
}

View File

@ -4,16 +4,15 @@ import okhttp3.mockwebserver.MockResponse
import okhttp3.mockwebserver.MockWebServer
import spock.lang.Specification
class HmcClientTest extends Specification {
class HmcRestClientTest extends Specification {
HmcClient hmc
HmcRestClient hmc
MockWebServer mockServer = new MockWebServer()
def setup() {
mockServer.start()
Configuration.HmcObject configHmc = new Configuration.HmcObject("site1", mockServer.url("/").toString(), "testUser", "testPassword", true);
hmc = new HmcClient(configHmc)
hmc = new HmcRestClient(mockServer.url("/").toString(), "testUser", "testPassword", true)
hmc.authToken = "blaBla"
}
@ -57,7 +56,7 @@ class HmcClientTest extends Specification {
mockServer.enqueue(new MockResponse().setBody(testXml))
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
Map<String, LogicalPartition> partitions = hmc.getLogicalPartitionsForManagedSystem(system)
then:

View File

@ -17,7 +17,6 @@ class InfluxClientTest extends Specification {
influxClient.logoff()
}
@Ignore
void "write ManagedSystem data to influx"() {
setup:
@ -25,7 +24,7 @@ class InfluxClientTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
system.processMetrics(testJson)
influxClient.writeManagedSystem(system)
@ -34,7 +33,6 @@ class InfluxClientTest extends Specification {
}
@Ignore
void "write LogicalPartition data to influx"() {
setup:
@ -42,7 +40,7 @@ class InfluxClientTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "TestSystem", "TestType", "TestModel", "Test s/n")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
lpar.processMetrics(testJson)

View File

@ -1,10 +0,0 @@
/*
* This Spock specification was generated by the Gradle 'init' task.
*/
package biz.nellemann.hmci
import spock.lang.Specification
class InsightsTest extends Specification {
}

View File

@ -12,7 +12,7 @@ class LogicalPartitionTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
lpar.processMetrics(testJson)
@ -29,7 +29,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -48,7 +48,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -67,7 +67,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:
@ -85,7 +85,7 @@ class LogicalPartitionTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-logical-partition.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
LogicalPartition lpar = new LogicalPartition("2DE05DB6-8AD5-448F-8327-0F488D287E82", "9Flash01", "OS400", system)
when:

View File

@ -11,7 +11,7 @@ class ManagedSystemTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.processMetrics(testJson)
then:
@ -30,7 +30,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -46,7 +46,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -62,7 +62,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -77,7 +77,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -93,7 +93,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)
@ -111,7 +111,7 @@ class ManagedSystemTest extends Specification {
setup:
def testFile = new File(getClass().getResource('/pcm-data-managed-system.json').toURI())
def testJson = testFile.getText('UTF-8')
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
when:
system.processMetrics(testJson)

View File

@ -13,7 +13,7 @@ class MetaSystemTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.processMetrics(testJson)
Instant instant = system.getTimestamp()

View File

@ -11,7 +11,7 @@ class SystemEnergyTest extends Specification {
def testJson = testFile.getText('UTF-8')
when:
ManagedSystem system = new ManagedSystem("site1", "e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
ManagedSystem system = new ManagedSystem("e09834d1-c930-3883-bdad-405d8e26e166", "Test Name","Test Type", "Test Model", "Test S/N")
system.energy.processMetrics(testJson)
then: