hmci/src/main/groovy/biz/nellemann/hmci/InfluxClient.groovy

243 lines
7.1 KiB
Groovy
Raw Normal View History

2020-08-10 13:44:14 +00:00
package biz.nellemann.hmci
import groovy.util.logging.Slf4j
import org.influxdb.BatchOptions
2020-08-13 20:01:22 +00:00
import org.influxdb.InfluxDB
import org.influxdb.InfluxDBFactory
2020-08-13 20:01:22 +00:00
import org.influxdb.dto.BatchPoints
import org.influxdb.dto.Point
2020-08-13 20:01:22 +00:00
import org.influxdb.dto.Query
2020-08-13 20:01:22 +00:00
import java.time.Instant
import java.util.concurrent.TimeUnit
@Slf4j
2020-08-10 13:44:14 +00:00
class InfluxClient {
final String url
final String username
final String password
final String database
InfluxDB influxDB
2020-08-14 07:34:44 +00:00
BatchPoints batchPoints
InfluxClient(String url, String username, String password, String database) {
this.url = url
this.username = username
this.password = password
this.database = database
}
void login() {
if(!influxDB) {
try {
influxDB = InfluxDBFactory.connect(url, username, password);
createDatabase()
2020-08-14 07:34:44 +00:00
// Enable batch writes to get better performance.
BatchOptions options = BatchOptions.DEFAULTS.actions(300).flushDuration(500);
influxDB.enableBatch(options);
//influxDB.setLogLevel(InfluxDB.LogLevel.BASIC);
2020-08-14 07:34:44 +00:00
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
} catch(Exception e) {
log.error(e.message)
throw new Exception(e)
}
}
}
void logoff() {
influxDB?.close();
}
void createDatabase() {
// Create a database...
influxDB.query(new Query("CREATE DATABASE " + database));
influxDB.setDatabase(database);
2020-08-14 07:34:44 +00:00
/*
// ... and a retention policy, if necessary.
String retentionPolicyName = "HMCI_ONE_YEAR";
influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName
+ " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT"));
2020-08-13 20:01:22 +00:00
influxDB.setRetentionPolicy(retentionPolicyName);
2020-08-14 07:34:44 +00:00
*/
2020-08-14 07:34:44 +00:00
}
2020-08-14 07:34:44 +00:00
void writeBatchPoints() {
log.debug("writeBatchPoints()")
influxDB.write(batchPoints);
//influxDB.flush()
}
2020-08-13 09:48:00 +00:00
/*
Managed System
*/
void writeManagedSystem(ManagedSystem system) {
if(system.metrics == null) {
log.warn("writeManagedSystem() - null metrics, skipping")
return
}
Instant timestamp = system.getTimestamp()
if(!timestamp) {
log.warn("writeManagedSystem() - no timestamp, skipping")
return
}
2020-08-14 07:34:44 +00:00
//BatchPoints batchPoints = BatchPoints.database(database).build();
getSystemMemory(system, timestamp).each {
batchPoints.point(it)
}
getSystemProcessor(system, timestamp).each {
batchPoints.point(it)
}
getSystemSharedProcessorPools(system, timestamp).each {
batchPoints.point(it)
}
2020-08-13 09:48:00 +00:00
getSystemSharedAdapters(system, timestamp).each {
batchPoints.point(it)
}
getSystemFiberChannelAdapters(system, timestamp).each {
batchPoints.point(it)
}
}
2020-08-13 09:48:00 +00:00
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getMemoryMetrics()
return processMeasurementMap(metrics, timestamp, "SystemMemory")
}
private static List<Point> getSystemProcessor(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getProcessorMetrics()
return processMeasurementMap(metrics, timestamp, "SystemProcessor")
}
private static List<Point> getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSharedProcessorPools()
return processMeasurementMap(metrics, timestamp, "SystemSharedProcessorPool")
}
private static List<Point> getSystemSharedAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemSharedAdapters()
return processMeasurementMap(metrics, timestamp, "SystemSharedAdapters")
}
private static List<Point> getSystemFiberChannelAdapters(ManagedSystem system, Instant timestamp) {
List<Map> metrics = system.getSystemFiberChannelAdapters()
return processMeasurementMap(metrics, timestamp, "SystemFiberChannelAdapters")
}
/*
Logical Partitions
*/
void writeLogicalPartition(LogicalPartition partition) {
if(partition.metrics == null) {
log.warn("writeLogicalPartition() - null metrics, skipping")
return
}
Instant timestamp = partition.getTimestamp()
if(!timestamp) {
log.warn("writeLogicalPartition() - no timestamp, skipping")
return
}
2020-08-14 07:34:44 +00:00
//BatchPoints batchPoints = BatchPoints.database(database).build();
getPartitionMemory(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionProcessor(partition, timestamp).each {
batchPoints.point(it)
}
getPartitionVirtualEthernetAdapter(partition, timestamp).each {
batchPoints.point(it)
}
2020-08-13 09:48:00 +00:00
getPartitionVirtualFiberChannelAdapter(partition, timestamp).each {
batchPoints.point(it)
}
2020-08-14 07:34:44 +00:00
//influxDB.write(batchPoints);
}
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {
2020-08-13 09:48:00 +00:00
List<Map> metrics = partition.getMemoryMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionMemory")
}
private static List<Point> getPartitionProcessor(LogicalPartition partition, Instant timestamp) {
2020-08-13 09:48:00 +00:00
List<Map> metrics = partition.getProcessorMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionProcessor")
}
private static List<Point> getPartitionVirtualEthernetAdapter(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getVirtualEthernetAdapterMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionVirtualEthernetAdapters")
}
2020-08-13 09:48:00 +00:00
private static List<Point> getPartitionVirtualFiberChannelAdapter(LogicalPartition partition, Instant timestamp) {
List<Map> metrics = partition.getVirtualFiberChannelAdaptersMetrics()
return processMeasurementMap(metrics, timestamp, "PartitionVirtualFiberChannelAdapters")
}
2020-08-13 09:48:00 +00:00
/*
Shared
*/
private static List<Point> processMeasurementMap(List<Map> listOfMaps, Instant timestamp, String measurement) {
List<Point> list = new ArrayList<>()
listOfMaps.each { map ->
// Iterate fields
map.get("fields").each { String fieldName, BigDecimal fieldValue ->
Point.Builder builder = Point.measurement(measurement)
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
.tag("name", fieldName)
.addField("value", fieldValue)
// For each field, we add all tags
map.get("tags").each { String tagName, String tagValue ->
builder.tag(tagName, tagValue)
}
list.add(builder.build())
}
}
return list
}
2020-08-10 13:44:14 +00:00
}