More cleanup.

This commit is contained in:
Mark Nellemann 2020-10-15 15:23:16 +02:00
parent 3265f0721c
commit fe5a516cd3
6 changed files with 29 additions and 69 deletions

View File

@ -1,7 +1,5 @@
package biz.nellemann.hmci; package biz.nellemann.hmci;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tomlj.Toml; import org.tomlj.Toml;
import org.tomlj.TomlParseResult; import org.tomlj.TomlParseResult;
import org.tomlj.TomlTable; import org.tomlj.TomlTable;
@ -14,7 +12,7 @@ import java.util.List;
public class Configuration { public class Configuration {
private final static Logger log = LoggerFactory.getLogger(Configuration.class); //private final static Logger log = LoggerFactory.getLogger(Configuration.class);
final public Long refresh; final public Long refresh;
final public Long rescan; final public Long rescan;
@ -160,7 +158,7 @@ public class Configuration {
HmcObject() { } HmcObject() { }
HmcObject(String url, String username, String password, Boolean unsafe) { HmcObject(String name, String url, String username, String password, Boolean unsafe) {
this.url = url; this.url = url;
this.username = username; this.username = username;
this.password = password; this.password = password;

View File

@ -29,7 +29,6 @@ import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.security.SecureRandom; import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate; import java.security.cert.X509Certificate;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -156,7 +155,7 @@ class HmcClient {
Map<String,ManagedSystem> managedSystemsMap = new HashMap<>(); Map<String,ManagedSystem> managedSystemsMap = new HashMap<>();
// Do not try to parse empty response // Do not try to parse empty response
if(responseBody.isEmpty() || responseBody.length() <= 1) { if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
responseErrors++; responseErrors++;
return managedSystemsMap; return managedSystemsMap;
} }
@ -197,7 +196,7 @@ class HmcClient {
Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>() {}; Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>() {};
// Do not try to parse empty response // Do not try to parse empty response
if(responseBody.isEmpty() || responseBody.length() <= 1) { if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
responseErrors++; responseErrors++;
return partitionMap; return partitionMap;
} }
@ -238,7 +237,7 @@ class HmcClient {
String jsonBody = null; String jsonBody = null;
// Do not try to parse empty response // Do not try to parse empty response
if(responseBody.isEmpty() || responseBody.length() <= 1) { if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
responseErrors++; responseErrors++;
log.warn("getPcmDataForManagedSystem() - empty response"); log.warn("getPcmDataForManagedSystem() - empty response");
return null; return null;
@ -276,7 +275,7 @@ class HmcClient {
String jsonBody = null; String jsonBody = null;
// Do not try to parse empty response // Do not try to parse empty response
if(responseBody.isEmpty() || responseBody.length() <= 1) { if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
responseErrors++; responseErrors++;
log.warn("getPcmDataForLogicalPartition() - empty response"); log.warn("getPcmDataForLogicalPartition() - empty response");
return null; return null;
@ -351,11 +350,11 @@ class HmcClient {
final TrustManager[] trustAllCerts = new TrustManager[] { final TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() { new X509TrustManager() {
@Override @Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { public void checkClientTrusted(X509Certificate[] chain, String authType) {
} }
@Override @Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { public void checkServerTrusted(X509Certificate[] chain, String authType) {
} }
@Override @Override

View File

@ -63,8 +63,7 @@ class InfluxClient {
createDatabase(); createDatabase();
// Enable batch writes to get better performance. // Enable batch writes to get better performance.
//influxDB.enableBatch(BatchOptions.DEFAULTS); BatchOptions options = BatchOptions.DEFAULTS.actions(1000).flushDuration(5000).precision(TimeUnit.SECONDS);
BatchOptions options = BatchOptions.DEFAULTS.actions(100).flushDuration(500);
influxDB.enableBatch(options); influxDB.enableBatch(options);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
@ -121,33 +120,20 @@ class InfluxClient {
return; return;
} }
getSystemMemory(system, timestamp).forEach( it -> { getSystemMemory(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
});
getSystemProcessor(system, timestamp).forEach( it -> { getSystemProcessor(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
});
getSystemSharedProcessorPools(system, timestamp).forEach( it -> { getSystemSharedProcessorPools(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
});
getSystemSharedAdapters(system, timestamp).forEach( it -> { getSystemSharedAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
});
getSystemFiberChannelAdapters(system, timestamp).forEach( it -> { getSystemFiberChannelAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
});
getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> { getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
batchPoints.point(it);
}); getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> {
batchPoints.point(it);
});
} }
@ -204,29 +190,18 @@ class InfluxClient {
return; return;
} }
getPartitionAffinityScore(partition, timestamp).forEach( it -> { getPartitionAffinityScore(partition, timestamp).forEach( it -> batchPoints.point(it));
batchPoints.point(it);
});
getPartitionMemory(partition, timestamp).forEach( it -> { getPartitionMemory(partition, timestamp).forEach( it -> batchPoints.point(it));
batchPoints.point(it);
});
getPartitionProcessor(partition, timestamp).forEach( it -> { getPartitionProcessor(partition, timestamp).forEach( it -> batchPoints.point(it));
batchPoints.point(it);
});
getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> { getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> batchPoints.point(it));
batchPoints.point(it);
});
getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> { getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> batchPoints.point(it));
batchPoints.point(it);
});
//influxDB.write(batchPoints);
} }
private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) { private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getAffinityScore(); List<Measurement> metrics = partition.getAffinityScore();
return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore"); return processMeasurementMap(metrics, timestamp, "PartitionAffinityScore");

View File

@ -64,7 +64,7 @@ class Insights {
hmcClients.forEach(( hmcId, hmcClient) -> { hmcClients.forEach(( hmcId, hmcClient) -> {
try { try {
hmcClient.logoff();; hmcClient.logoff();
hmcClient.login(); hmcClient.login();
hmcClient.getManagedSystems().forEach((systemId, system) -> { hmcClient.getManagedSystems().forEach((systemId, system) -> {
@ -152,16 +152,12 @@ class Insights {
void writeMetricsForManagedSystems() { void writeMetricsForManagedSystems() {
systems.forEach((systemId, system) -> { systems.forEach((systemId, system) -> influxClient.writeManagedSystem(system));
influxClient.writeManagedSystem(system);
});
} }
void writeMetricsForLogicalPartitions() { void writeMetricsForLogicalPartitions() {
partitions.forEach((partitionId, partition) -> { partitions.forEach((partitionId, partition) -> influxClient.writeLogicalPartition(partition));
influxClient.writeLogicalPartition(partition);
});
} }
@ -197,6 +193,7 @@ class Insights {
} }
executions++; executions++;
//noinspection BusyWait
sleep(configuration.refresh * 1000); sleep(configuration.refresh * 1000);
} while (keepRunning.get()); } while (keepRunning.get());

View File

@ -242,7 +242,7 @@ class ManagedSystem extends MetaSystem {
metrics.systemUtil.sample.viosUtil.forEach( vios -> { metrics.systemUtil.sample.viosUtil.forEach( vios -> {
vios.storage.genericPhysicalAdapters.forEach( adapter -> { vios.storage.genericPhysicalAdapters.forEach( adapter -> {
//Map<String, Map> map = new HashMap<String, Map>()
Measurement measurement = new Measurement(); Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = new HashMap<String, String>() { HashMap<String, String> tagsMap = new HashMap<String, String>() {
@ -254,7 +254,6 @@ class ManagedSystem extends MetaSystem {
} }
}; };
//map.put("tags", tagsMap)
measurement.tags = tagsMap; measurement.tags = tagsMap;
log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString()); log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString());
@ -266,7 +265,6 @@ class ManagedSystem extends MetaSystem {
} }
}; };
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap; measurement.fields = fieldsMap;
log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString()); log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString());
@ -287,7 +285,6 @@ class ManagedSystem extends MetaSystem {
vios.storage.genericVirtualAdapters.forEach( adapter -> { vios.storage.genericVirtualAdapters.forEach( adapter -> {
//Map<String, Map> map = new HashMap<String, Map>()
Measurement measurement = new Measurement(); Measurement measurement = new Measurement();
HashMap<String, String> tagsMap = new HashMap<String, String>() { HashMap<String, String> tagsMap = new HashMap<String, String>() {
@ -299,7 +296,6 @@ class ManagedSystem extends MetaSystem {
} }
}; };
//map.put("tags", tagsMap)
measurement.tags = tagsMap; measurement.tags = tagsMap;
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString()); log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString());
@ -311,7 +307,6 @@ class ManagedSystem extends MetaSystem {
} }
}; };
//map.put("fields", fieldsMap)
measurement.fields = fieldsMap; measurement.fields = fieldsMap;
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString()); log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString());

View File

@ -12,11 +12,7 @@ class HmcClientTest extends Specification {
def setup() { def setup() {
mockServer.start() mockServer.start()
Configuration.HmcObject configHmc = new Configuration.HmcObject() Configuration.HmcObject configHmc = new Configuration.HmcObject("site1", mockServer.url("/").toString(), "testUser", "testPassword", true);
configHmc.name = "site1"
configHmc.url = mockServer.url("/").toString()
configHmc.username = "testUser"
configHmc.password = "testPassword"
hmc = new HmcClient(configHmc) hmc = new HmcClient(configHmc)
hmc.authToken = "blaBla" hmc.authToken = "blaBla"
} }