diff --git a/README.md b/README.md index c80fab6..4a4d86e 100644 --- a/README.md +++ b/README.md @@ -2,24 +2,16 @@ Small utility to fetch metrics from one or more HMC's and push those to an InfluxDB time-series database. -## Known Problems - -- When running on Windows, the data is collected and written to InfluxDB, but in Grafana there is no data. - ## Usage Instructions -### Create Configuration +- Ensure you have correct date/time and NTP running to keep it accurate. -Modify the **/opt/hmci/conf/hmci.groovy** configuration file to suit your environment. - - -### Run HMCi Tool - -Requires Java 8+ runtime +Modify the */opt/hmci/conf/hmci.groovy* configuration file to suit your environment and run the program: /opt/hmci/bin/hmci +Configure Grafana to communicate with your InfluxDB and import dashboards from *doc/* into Grafana. The dashboards are slightly modified versions of the dashboard provided by the nmon2influxdb tool. ## Development Information diff --git a/conf/hmci.groovy b/conf/hmci.groovy index 61d6c8f..eed8efd 100644 --- a/conf/hmci.groovy +++ b/conf/hmci.groovy @@ -2,8 +2,8 @@ Configuration for HMCi */ -hmci.refresh = 60 -hmci.rescan = 15 +hmci.refresh = 30 +hmci.rescan = 60 // InfluxDB to save metrics influx { diff --git a/src/main/groovy/biz/nellemann/hmci/App.groovy b/src/main/groovy/biz/nellemann/hmci/App.groovy index a4bfcad..d44e716 100644 --- a/src/main/groovy/biz/nellemann/hmci/App.groovy +++ b/src/main/groovy/biz/nellemann/hmci/App.groovy @@ -60,9 +60,11 @@ class App implements Runnable { hmcClients.each { hmcId, hmcClient -> - log.info("Loggin in to HMC " + hmcId) + hmcClient.logoff() + hmcClient.login() + + log.info("Logging in to HMC " + hmcId) try { - hmcClient.login() hmcClient.getManagedSystems().each { systemId, system -> // Add to list of known systems @@ -185,13 +187,15 @@ class App implements Runnable { while(keepRunning) { getMetricsForSystems() - writeMetricsForManagedSystems() - getMetricsForPartitions() + + writeMetricsForManagedSystems() writeMetricsForLogicalPartitions() + influxClient.writeBatchPoints() // Refresh HMC's - if(executions % rescanHmcEvery) { + if(executions > rescanHmcEvery) { + executions = 0 discover() } diff --git a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy b/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy index 3408910..c80edc4 100644 --- a/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy +++ b/src/main/groovy/biz/nellemann/hmci/HmcClient.groovy @@ -55,10 +55,6 @@ class HmcClient { */ void login() throws IOException { - if(authToken) { - return - } - String payload = """\ @@ -100,6 +96,11 @@ class HmcClient { * */ void logoff() { + + if(!authToken) { + return + } + URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl)) Request request = new Request.Builder() .url(absUrl) @@ -272,7 +273,13 @@ class HmcClient { .build(); Response response = client.newCall(request).execute(); - if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); + if (!response.isSuccessful()) { + if(response.code == 401) { + login() + } else { + throw new IOException("Unexpected code " + response) + } + }; return response } diff --git a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy b/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy index b47d565..1e514b0 100644 --- a/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy +++ b/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy @@ -20,6 +20,7 @@ class InfluxClient { final String database InfluxDB influxDB + BatchPoints batchPoints InfluxClient(String url, String username, String password, String database) { this.url = url @@ -33,6 +34,15 @@ class InfluxClient { try { influxDB = InfluxDBFactory.connect(url, username, password); createDatabase() + + // Enable batch writes to get better performance. + BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500); + influxDB.enableBatch(options); + + influxDB.setLogLevel(InfluxDB.LogLevel.BASIC); + + batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build(); + } catch(Exception e) { log.error(e.message) throw new Exception(e) @@ -50,34 +60,21 @@ class InfluxClient { influxDB.query(new Query("CREATE DATABASE " + database)); influxDB.setDatabase(database); -/* + /* // ... and a retention policy, if necessary. String retentionPolicyName = "HMCI_ONE_YEAR"; influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName + " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT")); influxDB.setRetentionPolicy(retentionPolicyName); - */ - // Enable batch writes to get better performance. - influxDB.enableBatch(BatchOptions.DEFAULTS); + */ + } - void write() { - // Write points to InfluxDB. - influxDB.write(Point.measurement("h2o_feet") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .tag("location", "santa_monica") - .addField("level description", "below 3 feet") - .addField("water_level", 2.064d) - .build()); - - influxDB.write(Point.measurement("h2o_feet") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .tag("location", "coyote_creek") - .addField("level description", "between 6 and 9 feet") - .addField("water_level", 8.12d) - .build()); - + void writeBatchPoints() { + log.debug("writeBatchPoints()") + influxDB.write(batchPoints); + //influxDB.flush() } @@ -100,7 +97,7 @@ class InfluxClient { return } - BatchPoints batchPoints = BatchPoints.database(database).build(); + //BatchPoints batchPoints = BatchPoints.database(database).build(); getSystemMemory(system, timestamp).each { batchPoints.point(it) @@ -122,7 +119,6 @@ class InfluxClient { batchPoints.point(it) } - influxDB.write(batchPoints); } private static List getSystemMemory(ManagedSystem system, Instant timestamp) { @@ -168,7 +164,7 @@ class InfluxClient { return } - BatchPoints batchPoints = BatchPoints.database(database).build(); + //BatchPoints batchPoints = BatchPoints.database(database).build(); getPartitionMemory(partition, timestamp).each { batchPoints.point(it) @@ -186,7 +182,7 @@ class InfluxClient { batchPoints.point(it) } - influxDB.write(batchPoints); + //influxDB.write(batchPoints); } private static List getPartitionMemory(LogicalPartition partition, Instant timestamp) { diff --git a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy b/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy index 162ca75..26ae197 100644 --- a/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy +++ b/src/main/groovy/biz/nellemann/hmci/MetaSystem.groovy @@ -21,15 +21,18 @@ abstract class MetaSystem { Instant getTimestamp() { - String timeStamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp + String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp Instant instant try { + log.debug("getTimeStamp() - PMC Timestamp: " + timestamp) DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]"); - instant = Instant.from(dateTimeFormatter.parse(timeStamp)) + instant = Instant.from(dateTimeFormatter.parse(timestamp)) + log.debug("getTimestamp() - Instant: " + instant.toString()) } catch(DateTimeParseException e) { - log.warn("getTimestamp() - parse error: " + timeStamp) + log.warn("getTimestamp() - parse error: " + timestamp) } - return instant + + return instant ?: Instant.now() } }