Merged in refactoring-cleanup (pull request #5)
Refactoring cleanup Approved-by: Mark Nellemann <mark.nellemann@gmail.com>
This commit is contained in:
commit
350ff8c354
|
@ -19,18 +19,17 @@ dependencies {
|
|||
annotationProcessor 'info.picocli:picocli-codegen:4.5.1'
|
||||
implementation 'info.picocli:picocli:4.5.1'
|
||||
implementation 'org.jsoup:jsoup:1.13.1'
|
||||
implementation 'com.squareup.okhttp3:okhttp:4.8.0'
|
||||
implementation 'com.squareup.okhttp3:okhttp:4.9.0'
|
||||
implementation 'com.squareup.moshi:moshi:1.11.0'
|
||||
implementation 'com.serjltt.moshi:moshi-lazy-adapters:2.2'
|
||||
implementation 'org.tomlj:tomlj:1.0.0'
|
||||
implementation 'org.influxdb:influxdb-java:2.19'
|
||||
implementation 'org.influxdb:influxdb-java:2.20'
|
||||
implementation 'org.slf4j:slf4j-api:1.7.+'
|
||||
runtimeOnly 'ch.qos.logback:logback-classic:1.+'
|
||||
|
||||
testImplementation 'org.codehaus.groovy:groovy-all:3.0.5'
|
||||
testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0')
|
||||
testImplementation('com.squareup.okhttp3:mockwebserver:4.9.0')
|
||||
testImplementation("org.slf4j:slf4j-simple:1.7.+")
|
||||
testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0')
|
||||
//implementation platform('org.testcontainers:testcontainers-bom:1.14.3') //import bom
|
||||
//testCompile "org.testcontainers:influxdb:1.14.3"
|
||||
}
|
||||
|
@ -98,7 +97,7 @@ jacocoTestCoverageVerification {
|
|||
violationRules {
|
||||
rule {
|
||||
limit {
|
||||
minimum = 0.3 // FIXME: Raise when more tests are implemented
|
||||
minimum = 0.4 // TODO: Raise when more tests are implemented
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
id = hmci
|
||||
group = biz.nellemann.hmci
|
||||
version = 0.2.1
|
||||
version = 0.2.2
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
package biz.nellemann.hmci;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tomlj.Toml;
|
||||
import org.tomlj.TomlParseResult;
|
||||
import org.tomlj.TomlTable;
|
||||
|
@ -14,7 +12,7 @@ import java.util.List;
|
|||
|
||||
public class Configuration {
|
||||
|
||||
private final static Logger log = LoggerFactory.getLogger(Configuration.class);
|
||||
//private final static Logger log = LoggerFactory.getLogger(Configuration.class);
|
||||
|
||||
final public Long refresh;
|
||||
final public Long rescan;
|
||||
|
@ -160,7 +158,7 @@ public class Configuration {
|
|||
|
||||
HmcObject() { }
|
||||
|
||||
HmcObject(String url, String username, String password, Boolean unsafe) {
|
||||
HmcObject(String name, String url, String username, String password, Boolean unsafe) {
|
||||
this.url = url;
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
|
|
|
@ -29,7 +29,6 @@ import java.io.IOException;
|
|||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.security.SecureRandom;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -67,26 +66,18 @@ class HmcClient {
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Logon to the HMC and get an authentication token for further requests.
|
||||
*/
|
||||
void login() throws Exception {
|
||||
this.login(false);
|
||||
@Override
|
||||
public String toString() {
|
||||
return hmcId + " (" + baseUrl + ")";
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Logon to the HMC and get an authentication token for further requests.
|
||||
* @param force
|
||||
*/
|
||||
void login(Boolean force) throws Exception {
|
||||
synchronized void login() throws Exception {
|
||||
|
||||
if(authToken != null && !force) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.info("Connecting to HMC - " + baseUrl);
|
||||
log.debug("Connecting to HMC - " + baseUrl);
|
||||
|
||||
StringBuilder payload = new StringBuilder();
|
||||
payload.append("<?xml version='1.0' encoding='UTF-8' standalone='yes'?>");
|
||||
|
@ -99,29 +90,24 @@ class HmcClient {
|
|||
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
|
||||
Request request = new Request.Builder()
|
||||
.url(url)
|
||||
//.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
|
||||
.addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse")
|
||||
.addHeader("X-Audit-Memento", "hmci")
|
||||
.put(RequestBody.create(payload.toString(), MEDIA_TYPE_IBM_XML_LOGIN))
|
||||
.build();
|
||||
|
||||
Response response = client.newCall(request).execute();
|
||||
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
||||
|
||||
// Get response body and parse
|
||||
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||
Objects.requireNonNull(response.body()).close();
|
||||
if (!response.isSuccessful()) {
|
||||
log.warn("login() - Unexpected response: " + response.code());
|
||||
throw new IOException("Unexpected code: " + response);
|
||||
}
|
||||
|
||||
Document doc = Jsoup.parse(responseBody);
|
||||
authToken = doc.select("X-API-Session").text();
|
||||
|
||||
log.debug("login() - Auth Token: " + authToken);
|
||||
} catch (MalformedURLException e) {
|
||||
log.error("login() - url error", e);
|
||||
throw new Exception(new Throwable("Login URL Error: " + e.getMessage()));
|
||||
} catch(Exception e) {
|
||||
log.error("login() - general error", e);
|
||||
throw new Exception(new Throwable("Login General Error: " + e.getMessage()));
|
||||
log.error("login() - URL Error: " + e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -132,7 +118,7 @@ class HmcClient {
|
|||
* Logoff from the HMC and remove any session
|
||||
*
|
||||
*/
|
||||
void logoff() throws IOException {
|
||||
synchronized void logoff() throws IOException {
|
||||
|
||||
if(authToken == null) {
|
||||
return;
|
||||
|
@ -145,12 +131,14 @@ class HmcClient {
|
|||
.addHeader("X-API-Session", authToken)
|
||||
.delete()
|
||||
.build();
|
||||
|
||||
Response response = client.newCall(request).execute();
|
||||
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response);
|
||||
|
||||
try {
|
||||
client.newCall(request).execute();
|
||||
} catch (IOException e) {
|
||||
log.warn("logoff() error: " + e.getMessage());
|
||||
} finally {
|
||||
authToken = null;
|
||||
log.debug("logoff()");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -163,12 +151,11 @@ class HmcClient {
|
|||
Map<String, ManagedSystem> getManagedSystems() throws Exception {
|
||||
|
||||
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl));
|
||||
Response response = getResponse(url);
|
||||
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||
String responseBody = getResponse(url);
|
||||
Map<String,ManagedSystem> managedSystemsMap = new HashMap<>();
|
||||
|
||||
// Do not try to parse empty response
|
||||
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
responseErrors++;
|
||||
return managedSystemsMap;
|
||||
}
|
||||
|
@ -186,7 +173,7 @@ class HmcClient {
|
|||
el.select("MachineTypeModelAndSerialNumber > SerialNumber").text()
|
||||
);
|
||||
managedSystemsMap.put(system.id, system);
|
||||
log.debug("getManagedSystems() - Found system: " + system.toString());
|
||||
log.debug("getManagedSystems() - Found system: " + system);
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
|
@ -205,12 +192,11 @@ class HmcClient {
|
|||
*/
|
||||
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) throws Exception {
|
||||
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id));
|
||||
Response response = getResponse(url);
|
||||
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||
String responseBody = getResponse(url);
|
||||
Map<String, LogicalPartition> partitionMap = new HashMap<String, LogicalPartition>() {};
|
||||
|
||||
// Do not try to parse empty response
|
||||
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
responseErrors++;
|
||||
return partitionMap;
|
||||
}
|
||||
|
@ -226,7 +212,7 @@ class HmcClient {
|
|||
system
|
||||
);
|
||||
partitionMap.put(logicalPartition.id, logicalPartition);
|
||||
log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition.toString());
|
||||
log.debug("getLogicalPartitionsForManagedSystem() - Found partition: " + logicalPartition);
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
|
@ -247,12 +233,11 @@ class HmcClient {
|
|||
|
||||
log.debug("getPcmDataForManagedSystem() - " + system.id);
|
||||
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id));
|
||||
Response response = getResponse(url);
|
||||
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||
String responseBody = getResponse(url);
|
||||
String jsonBody = null;
|
||||
|
||||
// Do not try to parse empty response
|
||||
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
responseErrors++;
|
||||
log.warn("getPcmDataForManagedSystem() - empty response");
|
||||
return null;
|
||||
|
@ -266,7 +251,7 @@ class HmcClient {
|
|||
if(link.attr("type").equals("application/json")) {
|
||||
String href = link.attr("href");
|
||||
log.debug("getPcmDataForManagedSystem() - json url: " + href);
|
||||
jsonBody = getResponseBody(new URL(href));
|
||||
jsonBody = getResponse(new URL(href));
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
|
@ -286,12 +271,11 @@ class HmcClient {
|
|||
|
||||
log.debug(String.format("getPcmDataForLogicalPartition() - %s @ %s", partition.id, partition.system.id));
|
||||
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id));
|
||||
Response response = getResponse(url);
|
||||
String responseBody = Objects.requireNonNull(response.body()).string();
|
||||
String responseBody = getResponse(url);
|
||||
String jsonBody = null;
|
||||
|
||||
// Do not try to parse empty response
|
||||
if(responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
if(responseBody == null || responseBody.isEmpty() || responseBody.length() <= 1) {
|
||||
responseErrors++;
|
||||
log.warn("getPcmDataForLogicalPartition() - empty response");
|
||||
return null;
|
||||
|
@ -305,7 +289,7 @@ class HmcClient {
|
|||
if(link.attr("type").equals("application/json")) {
|
||||
String href = link.attr("href");
|
||||
log.debug("getPcmDataForLogicalPartition() - json url: " + href);
|
||||
jsonBody = getResponseBody(new URL(href));
|
||||
jsonBody = getResponse(new URL(href));
|
||||
}
|
||||
|
||||
} catch(Exception e) {
|
||||
|
@ -316,72 +300,41 @@ class HmcClient {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return body text from a HTTP response from the HMC
|
||||
*
|
||||
* @param url URL to get response body as String
|
||||
* @return String with http reponse body
|
||||
*/
|
||||
protected String getResponseBody(URL url) throws Exception {
|
||||
Response response = getResponse(url);
|
||||
String body = Objects.requireNonNull(response.body()).string();
|
||||
Objects.requireNonNull(response.body()).close();
|
||||
return body;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return a Response from the HMC
|
||||
* @param url to get Response from
|
||||
* @return Response object
|
||||
* @return Response body string
|
||||
*/
|
||||
private Response getResponse(URL url) throws Exception {
|
||||
return getResponse(url, 0);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return a Response from the HMC
|
||||
* @param url to get Response from
|
||||
* @param retry number of retries for this call
|
||||
* @return Response object
|
||||
*/
|
||||
private Response getResponse(URL url, Integer retry) throws Exception {
|
||||
private String getResponse(URL url) throws Exception {
|
||||
|
||||
log.debug("getResponse() - " + url.toString());
|
||||
|
||||
if(responseErrors > 2) {
|
||||
responseErrors = 0;
|
||||
login(true);
|
||||
return getResponse(url, retry++);
|
||||
}
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(url)
|
||||
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
|
||||
.addHeader("X-API-Session", authToken)
|
||||
.get()
|
||||
.build();
|
||||
.get().build();
|
||||
|
||||
Response response = client.newCall(request).execute();
|
||||
String body = Objects.requireNonNull(response.body()).string();
|
||||
|
||||
if (!response.isSuccessful()) {
|
||||
Objects.requireNonNull(response.body()).close();
|
||||
|
||||
response.close();
|
||||
|
||||
if(response.code() == 401) {
|
||||
login(true);
|
||||
return getResponse(url, retry++);
|
||||
}
|
||||
|
||||
if(retry < 2) {
|
||||
log.warn("getResponse() - Retrying due to unexpected response: " + response.code());
|
||||
return getResponse(url, retry++);
|
||||
log.warn("getResponse() - 401 - login and retry.");
|
||||
authToken = null;
|
||||
login();
|
||||
return null;
|
||||
}
|
||||
|
||||
log.error("getResponse() - Unexpected response: " + response.code());
|
||||
throw new IOException("getResponse() - Unexpected response: " + response.code());
|
||||
}
|
||||
|
||||
return response;
|
||||
return body;
|
||||
}
|
||||
|
||||
|
||||
|
@ -389,7 +342,7 @@ class HmcClient {
|
|||
/**
|
||||
* Provide an unsafe (ignoring SSL problems) OkHttpClient
|
||||
*
|
||||
* @return
|
||||
* @return unsafe OkHttpClient
|
||||
*/
|
||||
private static OkHttpClient getUnsafeOkHttpClient() {
|
||||
try {
|
||||
|
@ -397,11 +350,11 @@ class HmcClient {
|
|||
final TrustManager[] trustAllCerts = new TrustManager[] {
|
||||
new X509TrustManager() {
|
||||
@Override
|
||||
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
public void checkClientTrusted(X509Certificate[] chain, String authType) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
public void checkServerTrusted(X509Certificate[] chain, String authType) {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -51,32 +51,30 @@ class InfluxClient {
|
|||
}
|
||||
|
||||
|
||||
void login() throws Exception {
|
||||
synchronized void login() throws Exception {
|
||||
|
||||
if(influxDB != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
log.info("Connecting to InfluxDB - " + url);
|
||||
log.debug("Connecting to InfluxDB - " + url);
|
||||
influxDB = InfluxDBFactory.connect(url, username, password);
|
||||
createDatabase();
|
||||
|
||||
// Enable batch writes to get better performance.
|
||||
//BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
|
||||
influxDB.enableBatch(BatchOptions.DEFAULTS);
|
||||
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
|
||||
|
||||
BatchOptions options = BatchOptions.DEFAULTS.actions(1000).flushDuration(5000).precision(TimeUnit.SECONDS);
|
||||
influxDB.enableBatch(options);
|
||||
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
|
||||
|
||||
} catch(Exception e) {
|
||||
log.error(e.getMessage());
|
||||
log.error("login() error - " + e.getMessage());
|
||||
throw new Exception(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void logoff() {
|
||||
synchronized void logoff() {
|
||||
if(influxDB != null) {
|
||||
influxDB.close();
|
||||
}
|
||||
|
@ -91,7 +89,7 @@ class InfluxClient {
|
|||
}
|
||||
|
||||
|
||||
void writeBatchPoints() throws Exception {
|
||||
synchronized void writeBatchPoints() throws Exception {
|
||||
log.debug("writeBatchPoints()");
|
||||
try {
|
||||
influxDB.write(batchPoints);
|
||||
|
@ -122,35 +120,20 @@ class InfluxClient {
|
|||
return;
|
||||
}
|
||||
|
||||
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
||||
getSystemMemory(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemMemory(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemProcessor(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemProcessor(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemSharedProcessorPools(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemSharedProcessorPools(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemSharedAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemSharedAdapters(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemFiberChannelAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemFiberChannelAdapters(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemGenericPhysicalAdapters(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> batchPoints.point(it) );
|
||||
|
||||
getSystemGenericVirtualAdapters(system, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
@ -207,29 +190,16 @@ class InfluxClient {
|
|||
return;
|
||||
}
|
||||
|
||||
//BatchPoints batchPoints = BatchPoints.database(database).build();
|
||||
getPartitionAffinityScore(partition, timestamp).forEach( it -> batchPoints.point(it));
|
||||
|
||||
getPartitionAffinityScore(partition, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getPartitionMemory(partition, timestamp).forEach( it -> batchPoints.point(it));
|
||||
|
||||
getPartitionMemory(partition, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getPartitionProcessor(partition, timestamp).forEach( it -> batchPoints.point(it));
|
||||
|
||||
getPartitionProcessor(partition, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> batchPoints.point(it));
|
||||
|
||||
getPartitionVirtualEthernetAdapter(partition, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> batchPoints.point(it));
|
||||
|
||||
getPartitionVirtualFiberChannelAdapter(partition, timestamp).forEach( it -> {
|
||||
batchPoints.point(it);
|
||||
});
|
||||
|
||||
//influxDB.write(batchPoints);
|
||||
}
|
||||
|
||||
private static List<Point> getPartitionAffinityScore(LogicalPartition partition, Instant timestamp) {
|
||||
|
|
|
@ -54,27 +54,36 @@ class Insights {
|
|||
void discover() {
|
||||
|
||||
configuration.hmc.forEach( configHmc -> {
|
||||
if(hmcClients != null && !hmcClients.containsKey(configHmc.name)) {
|
||||
if(!hmcClients.containsKey(configHmc.name)) {
|
||||
HmcClient hmcClient = new HmcClient(configHmc);
|
||||
hmcClients.put(configHmc.name, hmcClient);
|
||||
log.info("discover() - Adding HMC: " + hmcClient);
|
||||
}
|
||||
});
|
||||
|
||||
hmcClients.forEach(( hmcId, hmcClient) -> {
|
||||
|
||||
try {
|
||||
hmcClient.logoff();
|
||||
hmcClient.login();
|
||||
hmcClient.getManagedSystems().forEach((systemId, system) -> {
|
||||
|
||||
// Add to list of known systems
|
||||
systems.putIfAbsent(systemId, system);
|
||||
if(!systems.containsKey(systemId)) {
|
||||
systems.put(systemId, system);
|
||||
log.info("discover() - Found ManagedSystem: " + system);
|
||||
}
|
||||
|
||||
// Get LPAR's for this system
|
||||
try {
|
||||
hmcClient.getLogicalPartitionsForManagedSystem(system).forEach((partitionId, partition) -> {
|
||||
|
||||
// Add to list of known partitions
|
||||
partitions.putIfAbsent(partitionId, partition);
|
||||
if(!partitions.containsKey(partitionId)) {
|
||||
partitions.put(partitionId, partition);
|
||||
log.info("discover() - Found LogicalPartition: " + partition);
|
||||
}
|
||||
|
||||
});
|
||||
} catch (Exception e) {
|
||||
log.error("discover()", e);
|
||||
|
@ -143,16 +152,12 @@ class Insights {
|
|||
|
||||
|
||||
void writeMetricsForManagedSystems() {
|
||||
systems.forEach((systemId, system) -> {
|
||||
influxClient.writeManagedSystem(system);
|
||||
});
|
||||
systems.forEach((systemId, system) -> influxClient.writeManagedSystem(system));
|
||||
}
|
||||
|
||||
|
||||
void writeMetricsForLogicalPartitions() {
|
||||
partitions.forEach((partitionId, partition) -> {
|
||||
influxClient.writeLogicalPartition(partition);
|
||||
});
|
||||
partitions.forEach((partitionId, partition) -> influxClient.writeLogicalPartition(partition));
|
||||
}
|
||||
|
||||
|
||||
|
@ -162,7 +167,10 @@ class Insights {
|
|||
int executions = 0;
|
||||
AtomicBoolean keepRunning = new AtomicBoolean(true);
|
||||
|
||||
Thread shutdownHook = new Thread(() -> keepRunning.set(false));
|
||||
Thread shutdownHook = new Thread(() -> {
|
||||
keepRunning.set(false);
|
||||
System.out.println("Stopping HMCi, please wait ...");
|
||||
});
|
||||
Runtime.getRuntime().addShutdownHook(shutdownHook);
|
||||
|
||||
do {
|
||||
|
@ -185,6 +193,7 @@ class Insights {
|
|||
}
|
||||
|
||||
executions++;
|
||||
//noinspection BusyWait
|
||||
sleep(configuration.refresh * 1000);
|
||||
|
||||
} while (keepRunning.get());
|
||||
|
|
|
@ -18,6 +18,7 @@ package biz.nellemann.hmci;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Option;
|
||||
import picocli.CommandLine.Command;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -32,9 +33,8 @@ public class Main implements Callable<Integer> {
|
|||
|
||||
private final static Logger log = LoggerFactory.getLogger(Main.class);
|
||||
|
||||
@SuppressWarnings("FieldMayBeFinal")
|
||||
@CommandLine.Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/hmci.toml'].")
|
||||
private String configurationFile = "/etc/hmci.toml";
|
||||
@Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/hmci.toml'].", defaultValue = "/etc/hmci.toml", paramLabel = "<file>")
|
||||
private String configurationFile;
|
||||
|
||||
public static void main(String... args) {
|
||||
int exitCode = new CommandLine(new Main()).execute(args);
|
||||
|
|
|
@ -242,7 +242,7 @@ class ManagedSystem extends MetaSystem {
|
|||
metrics.systemUtil.sample.viosUtil.forEach( vios -> {
|
||||
|
||||
vios.storage.genericPhysicalAdapters.forEach( adapter -> {
|
||||
//Map<String, Map> map = new HashMap<String, Map>()
|
||||
|
||||
Measurement measurement = new Measurement();
|
||||
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||
|
@ -254,7 +254,6 @@ class ManagedSystem extends MetaSystem {
|
|||
}
|
||||
};
|
||||
|
||||
//map.put("tags", tagsMap)
|
||||
measurement.tags = tagsMap;
|
||||
log.debug("getSystemGenericPhysicalAdapters() - tags: " + tagsMap.toString());
|
||||
|
||||
|
@ -266,7 +265,6 @@ class ManagedSystem extends MetaSystem {
|
|||
}
|
||||
};
|
||||
|
||||
//map.put("fields", fieldsMap)
|
||||
measurement.fields = fieldsMap;
|
||||
log.debug("getSystemGenericPhysicalAdapters() - fields: " + fieldsMap.toString());
|
||||
|
||||
|
@ -287,7 +285,6 @@ class ManagedSystem extends MetaSystem {
|
|||
|
||||
vios.storage.genericVirtualAdapters.forEach( adapter -> {
|
||||
|
||||
//Map<String, Map> map = new HashMap<String, Map>()
|
||||
Measurement measurement = new Measurement();
|
||||
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {
|
||||
|
@ -299,7 +296,6 @@ class ManagedSystem extends MetaSystem {
|
|||
}
|
||||
};
|
||||
|
||||
//map.put("tags", tagsMap)
|
||||
measurement.tags = tagsMap;
|
||||
log.debug("getSystemGenericVirtualAdapters() - tags: " + tagsMap.toString());
|
||||
|
||||
|
@ -311,7 +307,6 @@ class ManagedSystem extends MetaSystem {
|
|||
}
|
||||
};
|
||||
|
||||
//map.put("fields", fieldsMap)
|
||||
measurement.fields = fieldsMap;
|
||||
log.debug("getSystemGenericVirtualAdapters() - fields: " + fieldsMap.toString());
|
||||
|
||||
|
|
|
@ -12,11 +12,7 @@ class HmcClientTest extends Specification {
|
|||
|
||||
def setup() {
|
||||
mockServer.start()
|
||||
Configuration.HmcObject configHmc = new Configuration.HmcObject()
|
||||
configHmc.name = "site1"
|
||||
configHmc.url = mockServer.url("/").toString()
|
||||
configHmc.username = "testUser"
|
||||
configHmc.password = "testPassword"
|
||||
Configuration.HmcObject configHmc = new Configuration.HmcObject("site1", mockServer.url("/").toString(), "testUser", "testPassword", true);
|
||||
hmc = new HmcClient(configHmc)
|
||||
hmc.authToken = "blaBla"
|
||||
}
|
||||
|
@ -78,7 +74,7 @@ class HmcClientTest extends Specification {
|
|||
mockServer.enqueue(new MockResponse().setBody(testJson))
|
||||
|
||||
when:
|
||||
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/ManagedSystem_e09834d1-c930-3883-bdad-405d8e26e166_20200807T122600+0200_20200807T122600+0200_30.json") as String))
|
||||
String jsonString = hmc.getResponse(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/ManagedSystem_e09834d1-c930-3883-bdad-405d8e26e166_20200807T122600+0200_20200807T122600+0200_30.json") as String))
|
||||
|
||||
then:
|
||||
jsonString.contains('"uuid": "e09834d1-c930-3883-bdad-405d8e26e166"')
|
||||
|
@ -92,7 +88,7 @@ class HmcClientTest extends Specification {
|
|||
mockServer.enqueue(new MockResponse().setBody(testJson))
|
||||
|
||||
when:
|
||||
String jsonString = hmc.getResponseBody(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/LogicalPartition_2DE05DB6-8AD5-448F-8327-0F488D287E82_20200807T123730+0200_20200807T123730+0200_30.json") as String))
|
||||
String jsonString = hmc.getResponse(new URL(mockServer.url("/rest/api/pcm/ProcessedMetrics/LogicalPartition_2DE05DB6-8AD5-448F-8327-0F488D287E82_20200807T123730+0200_20200807T123730+0200_30.json") as String))
|
||||
|
||||
then:
|
||||
jsonString.contains('"uuid": "b597e4da-2aab-3f52-8616-341d62153559"')
|
||||
|
|
|
@ -49,7 +49,7 @@ class InfluxClientTest extends Specification {
|
|||
influxClient.writeLogicalPartition(lpar)
|
||||
|
||||
then:
|
||||
lpar.metrics.systemUtil.utilSamples.first().sampleInfo.status == 2
|
||||
lpar.metrics.systemUtil.sample.sampleInfo.status == 2
|
||||
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue