Changes to influx batch writes.

This commit is contained in:
Mark Nellemann 2020-08-14 09:34:44 +02:00
parent 80138b8d57
commit bd4cb89892
6 changed files with 53 additions and 51 deletions

View file

@ -2,24 +2,16 @@
Small utility to fetch metrics from one or more HMC's and push those to an InfluxDB time-series database. Small utility to fetch metrics from one or more HMC's and push those to an InfluxDB time-series database.
## Known Problems
- When running on Windows, the data is collected and written to InfluxDB, but in Grafana there is no data.
## Usage Instructions ## Usage Instructions
### Create Configuration - Ensure you have correct date/time and NTP running to keep it accurate.
Modify the **/opt/hmci/conf/hmci.groovy** configuration file to suit your environment. Modify the */opt/hmci/conf/hmci.groovy* configuration file to suit your environment and run the program:
### Run HMCi Tool
Requires Java 8+ runtime
/opt/hmci/bin/hmci /opt/hmci/bin/hmci
Configure Grafana to communicate with your InfluxDB and import dashboards from *doc/* into Grafana. The dashboards are slightly modified versions of the dashboard provided by the nmon2influxdb tool.
## Development Information ## Development Information

View file

@ -2,8 +2,8 @@
Configuration for HMCi Configuration for HMCi
*/ */
hmci.refresh = 60 hmci.refresh = 30
hmci.rescan = 15 hmci.rescan = 60
// InfluxDB to save metrics // InfluxDB to save metrics
influx { influx {

View file

@ -60,9 +60,11 @@ class App implements Runnable {
hmcClients.each { hmcId, hmcClient -> hmcClients.each { hmcId, hmcClient ->
log.info("Loggin in to HMC " + hmcId) hmcClient.logoff()
try {
hmcClient.login() hmcClient.login()
log.info("Logging in to HMC " + hmcId)
try {
hmcClient.getManagedSystems().each { systemId, system -> hmcClient.getManagedSystems().each { systemId, system ->
// Add to list of known systems // Add to list of known systems
@ -185,13 +187,15 @@ class App implements Runnable {
while(keepRunning) { while(keepRunning) {
getMetricsForSystems() getMetricsForSystems()
writeMetricsForManagedSystems()
getMetricsForPartitions() getMetricsForPartitions()
writeMetricsForManagedSystems()
writeMetricsForLogicalPartitions() writeMetricsForLogicalPartitions()
influxClient.writeBatchPoints()
// Refresh HMC's // Refresh HMC's
if(executions % rescanHmcEvery) { if(executions > rescanHmcEvery) {
executions = 0
discover() discover()
} }

View file

@ -55,10 +55,6 @@ class HmcClient {
*/ */
void login() throws IOException { void login() throws IOException {
if(authToken) {
return
}
String payload = """\ String payload = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<LogonRequest xmlns="http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/" schemaVersion="V1_0"> <LogonRequest xmlns="http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/" schemaVersion="V1_0">
@ -100,6 +96,11 @@ class HmcClient {
* *
*/ */
void logoff() { void logoff() {
if(!authToken) {
return
}
URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl)) URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl))
Request request = new Request.Builder() Request request = new Request.Builder()
.url(absUrl) .url(absUrl)
@ -272,7 +273,13 @@ class HmcClient {
.build(); .build();
Response response = client.newCall(request).execute(); Response response = client.newCall(request).execute();
if (!response.isSuccessful()) throw new IOException("Unexpected code " + response); if (!response.isSuccessful()) {
if(response.code == 401) {
login()
} else {
throw new IOException("Unexpected code " + response)
}
};
return response return response
} }

View file

@ -20,6 +20,7 @@ class InfluxClient {
final String database final String database
InfluxDB influxDB InfluxDB influxDB
BatchPoints batchPoints
InfluxClient(String url, String username, String password, String database) { InfluxClient(String url, String username, String password, String database) {
this.url = url this.url = url
@ -33,6 +34,15 @@ class InfluxClient {
try { try {
influxDB = InfluxDBFactory.connect(url, username, password); influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase() createDatabase()
// Enable batch writes to get better performance.
BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
influxDB.enableBatch(options);
influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
} catch(Exception e) { } catch(Exception e) {
log.error(e.message) log.error(e.message)
throw new Exception(e) throw new Exception(e)
@ -50,34 +60,21 @@ class InfluxClient {
influxDB.query(new Query("CREATE DATABASE " + database)); influxDB.query(new Query("CREATE DATABASE " + database));
influxDB.setDatabase(database); influxDB.setDatabase(database);
/* /*
// ... and a retention policy, if necessary. // ... and a retention policy, if necessary.
String retentionPolicyName = "HMCI_ONE_YEAR"; String retentionPolicyName = "HMCI_ONE_YEAR";
influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName
+ " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT")); + " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT"));
influxDB.setRetentionPolicy(retentionPolicyName); influxDB.setRetentionPolicy(retentionPolicyName);
*/ */
// Enable batch writes to get better performance.
influxDB.enableBatch(BatchOptions.DEFAULTS);
} }
void write() { void writeBatchPoints() {
// Write points to InfluxDB. log.debug("writeBatchPoints()")
influxDB.write(Point.measurement("h2o_feet") influxDB.write(batchPoints);
.time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) //influxDB.flush()
.tag("location", "santa_monica")
.addField("level description", "below 3 feet")
.addField("water_level", 2.064d)
.build());
influxDB.write(Point.measurement("h2o_feet")
.time(System.currentTimeMillis(), TimeUnit.MILLISECONDS)
.tag("location", "coyote_creek")
.addField("level description", "between 6 and 9 feet")
.addField("water_level", 8.12d)
.build());
} }
@ -100,7 +97,7 @@ class InfluxClient {
return return
} }
BatchPoints batchPoints = BatchPoints.database(database).build(); //BatchPoints batchPoints = BatchPoints.database(database).build();
getSystemMemory(system, timestamp).each { getSystemMemory(system, timestamp).each {
batchPoints.point(it) batchPoints.point(it)
@ -122,7 +119,6 @@ class InfluxClient {
batchPoints.point(it) batchPoints.point(it)
} }
influxDB.write(batchPoints);
} }
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) { private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
@ -168,7 +164,7 @@ class InfluxClient {
return return
} }
BatchPoints batchPoints = BatchPoints.database(database).build(); //BatchPoints batchPoints = BatchPoints.database(database).build();
getPartitionMemory(partition, timestamp).each { getPartitionMemory(partition, timestamp).each {
batchPoints.point(it) batchPoints.point(it)
@ -186,7 +182,7 @@ class InfluxClient {
batchPoints.point(it) batchPoints.point(it)
} }
influxDB.write(batchPoints); //influxDB.write(batchPoints);
} }
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) { private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {

View file

@ -21,15 +21,18 @@ abstract class MetaSystem {
Instant getTimestamp() { Instant getTimestamp() {
String timeStamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp String timestamp = metrics.systemUtil.utilSamples.first().sampleInfo.timeStamp
Instant instant Instant instant
try { try {
log.debug("getTimeStamp() - PMC Timestamp: " + timestamp)
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]"); DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
instant = Instant.from(dateTimeFormatter.parse(timeStamp)) instant = Instant.from(dateTimeFormatter.parse(timestamp))
log.debug("getTimestamp() - Instant: " + instant.toString())
} catch(DateTimeParseException e) { } catch(DateTimeParseException e) {
log.warn("getTimestamp() - parse error: " + timeStamp) log.warn("getTimestamp() - parse error: " + timestamp)
} }
return instant
return instant ?: Instant.now()
} }
} }