Merge pull request 'influxdb2 support' (#1) from influxdb2 into main
continuous-integration/drone/tag Build is passing Details

Reviewed-on: #1
This commit is contained in:
Mark Nellemann 2023-05-19 18:39:20 +00:00
commit 5b2a3ff9ea
15 changed files with 478 additions and 725 deletions

View File

@ -2,6 +2,11 @@
All notable changes to this project will be documented in this file.
## 1.4.4 - 2023-05-20
- Support for InfluxDB v2, now requires InfluxDB 1.8 or later
- Increase influx writer buffer limit
- Various dashboard improvements
## 1.4.3 - 2023-03-21
- Fix and improve processor utilization dashboards.
- Minor code cleanup.

View File

@ -9,7 +9,7 @@ Metrics includes:
- *Managed Systems* - the physical Power servers
- *Logical Partitions* - the virtualized servers running AIX, Linux and/or IBM-i (AS/400)
- *Virtual I/O Servers* - the i/o partition(s) virtualizing network and storage
- *Energy* - watts and temperatures (needs to be enabled and is not available on multi-chassis systems)
- *Energy* - watts and temperatures (needs to be enabled and is not available on all systems)
![architecture](doc/HMCi.png)
@ -66,6 +66,13 @@ Read the [readme-grafana.md](doc/readme-grafana.md) file for instructions and he
This is most likely due to timezone, date and/or NTP not being configured correctly on the HMC and/or host running HMCi.
You can check the timestamp of the most recent data by querying InfluxDB with the ```influx``` CLI client, and take note of the timezone when comparing:
```sql
use hmci;
precision rfc3339;
SELECT * FROM server_details GROUP BY * ORDER BY DESC LIMIT 1;
```
### Compatibility with nextract Plus
@ -126,6 +133,7 @@ If you rename a partition, the metrics in InfluxDB will still be available by th
DELETE WHERE lparname = 'name';
```
## Development Information
You need Java (JDK) version 8 or later to build hmci.
@ -141,7 +149,7 @@ Use the gradle build tool, which will download all required dependencies:
### Local Testing
#### InfluxDB
#### InfluxDB v1.x
Start a InfluxDB container:
@ -155,6 +163,18 @@ Create the *hmci* database:
docker exec -i influxdb influx -execute "CREATE DATABASE hmci"
```
#### InfluxDB v2.x
Start a InfluxDB container:
```shell
docker pull influxdb:latest
docker run --name=influxdb --rm -d -p 8086:8086 influxdb:latest
```
- Then use the Web UI to create an initial user (for the web UI), an organization and bucket: http://localhost:8086/
- Then create an API token with RW access to your bucket.
#### Grafana
@ -166,4 +186,7 @@ docker run --name grafana --link influxdb:influxdb --rm -d -p 3000:3000 grafana/
Setup Grafana to connect to the InfluxDB container by defining a new datasource on URL *http://influxdb:8086* named *hmci*.
If you are [connecting](https://docs.influxdata.com/influxdb/v2.7/tools/grafana/) to InfluxDB v2.x, then add a custom http header, enter bucket as database and disable authorization.
- Authorization = Token abcdef_random_token_from_nfluxdb==
Import dashboards from the [doc/dashboards/](doc/dashboards/) folder.

View File

@ -1,13 +1,10 @@
plugins {
id 'java'
id 'jacoco'
id 'groovy'
id 'application'
// Code coverage of tests
id 'jacoco'
id "net.nemerosa.versioning" version "2.15.1"
id "com.netflix.nebula.ospackage" version "10.0.0"
id "com.netflix.nebula.ospackage" version "11.2.0"
id "com.github.johnrengelman.shadow" version "7.1.2"
}
@ -20,19 +17,18 @@ group = projectGroup
version = projectVersion
dependencies {
annotationProcessor 'info.picocli:picocli-codegen:4.7.1'
implementation 'info.picocli:picocli:4.7.1'
implementation 'org.influxdb:influxdb-java:2.23'
//implementation 'com.influxdb:influxdb-client-java:6.7.0'
implementation 'org.slf4j:slf4j-api:2.0.6'
implementation 'org.slf4j:slf4j-simple:2.0.6'
annotationProcessor 'info.picocli:picocli-codegen:4.7.3'
implementation 'info.picocli:picocli:4.7.3'
implementation 'org.slf4j:slf4j-api:2.0.7'
implementation 'org.slf4j:slf4j-simple:2.0.7'
implementation 'com.squareup.okhttp3:okhttp:4.10.0' // Also used by InfluxDB Client
implementation 'com.fasterxml.jackson.core:jackson-databind:2.14.2'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.14.2'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-toml:2.14.2'
implementation 'com.influxdb:influxdb-client-java:6.8.0'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.14.3'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.14.3'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-toml:2.14.3'
testImplementation 'junit:junit:4.13.2'
testImplementation 'org.spockframework:spock-core:2.3-groovy-3.0'
testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0'
testImplementation "org.mock-server:mockserver-netty-no-dependencies:5.14.0"
}
@ -87,7 +83,7 @@ buildDeb {
}
jacoco {
toolVersion = "0.8.8"
toolVersion = "0.8.9"
}
jacocoTestReport {

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

After

Width:  |  Height:  |  Size: 163 KiB

View File

@ -529,6 +529,137 @@
"title": "Processors",
"type": "row"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 3,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
},
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "normal"
},
"thresholdsStyle": {
"mode": "line"
}
},
"decimals": 2,
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 12,
"x": 0,
"y": 12
},
"id": 2,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_lparname",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"hide": false,
"measurement": "/^$ServerName$/",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") AS \"usage\" FROM \"lpar_processor\" WHERE (\"servername\" =~ /^$ServerName$/ AND \"lparname\" =~ /^$LPAR$/) AND $timeFilter GROUP BY time($interval), \"lparname\", \"servername\" fill(linear)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"title": "Processor Units - Utilization Stacked",
"transformations": [],
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
@ -597,11 +728,11 @@
},
"gridPos": {
"h": 11,
"w": 24,
"x": 0,
"w": 12,
"x": 12,
"y": 12
},
"id": 2,
"id": 40,
"links": [],
"options": {
"legend": {
@ -662,7 +793,7 @@
"tags": []
}
],
"title": "Processor Units - Utilization / Entitled Percentage",
"title": "Processor Units - Utilization / Entitled",
"transformations": [],
"type": "timeseries"
},
@ -2510,7 +2641,7 @@
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "percent"
"mode": "normal"
},
"thresholdsStyle": {
"mode": "off"
@ -2522,10 +2653,6 @@
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
},
@ -2617,11 +2744,11 @@
]
}
],
"title": "Memory Assigned",
"title": "Memory Assigned - Stacked",
"type": "timeseries"
}
],
"refresh": false,
"refresh": "30s",
"schemaVersion": 37,
"style": "dark",
"tags": [
@ -2660,7 +2787,7 @@
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/",
"definition": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/ ",
"hide": 0,
"includeAll": true,
"label": "Logical Partition",
@ -2668,7 +2795,7 @@
"multiFormat": "regex values",
"name": "LPAR",
"options": [],
"query": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/",
"query": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/ ",
"refresh": 1,
"refresh_on_load": false,
"regex": "",
@ -2711,6 +2838,6 @@
"timezone": "browser",
"title": "HMCi - Power LPAR Overview",
"uid": "Xl7oHESGz",
"version": 3,
"version": 9,
"weekStart": ""
}

View File

@ -107,6 +107,21 @@
"transparent": true,
"type": "text"
},
{
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 3
},
"id": 15,
"panels": [],
"repeat": "ServerName",
"repeatDirection": "h",
"title": "$ServerName",
"type": "row"
},
{
"datasource": {
"type": "influxdb",
@ -140,7 +155,7 @@
"h": 7,
"w": 24,
"x": 0,
"y": 3
"y": 4
},
"id": 7,
"options": {
@ -250,7 +265,7 @@
"h": 11,
"w": 8,
"x": 0,
"y": 10
"y": 11
},
"id": 4,
"options": {
@ -453,7 +468,7 @@
"h": 11,
"w": 16,
"x": 8,
"y": 10
"y": 11
},
"id": 12,
"options": {
@ -629,7 +644,7 @@
"h": 10,
"w": 8,
"x": 0,
"y": 21
"y": 22
},
"id": 13,
"options": {
@ -779,7 +794,7 @@
"h": 10,
"w": 16,
"x": 8,
"y": 21
"y": 22
},
"id": 5,
"options": {
@ -874,13 +889,13 @@
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"definition": "SHOW TAG VALUES FROM \"server_energy_power\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"hide": 0,
"includeAll": false,
"multi": false,
"includeAll": true,
"multi": true,
"name": "ServerName",
"options": [],
"query": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"query": "SHOW TAG VALUES FROM \"server_energy_power\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
@ -912,6 +927,6 @@
"timezone": "",
"title": "HMCi - Power System Energy",
"uid": "oHcrgD1Mk",
"version": 2,
"version": 7,
"weekStart": ""
}

File diff suppressed because it is too large Load Diff

View File

@ -1390,7 +1390,8 @@
"mode": "absolute",
"steps": [
{
"color": "green"
"color": "green",
"value": null
}
]
},
@ -1563,7 +1564,8 @@
"mode": "absolute",
"steps": [
{
"color": "green"
"color": "green",
"value": null
},
{
"color": "red",
@ -1706,7 +1708,7 @@
},
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"hide": 0,
"includeAll": false,
"includeAll": true,
"label": "Server",
"multi": true,
"multiFormat": "regex values",
@ -1779,6 +1781,6 @@
"timezone": "browser",
"title": "HMCi - Power VIO Overview",
"uid": "DDNEv5vGz",
"version": 2,
"version": 3,
"weekStart": ""
}

View File

@ -3,14 +3,23 @@
###
### Define one InfluxDB to save metrics into
### There must be only one and it should be named [influx]
###
# InfluxDB v1.x example
#[influx]
#url = "http://localhost:8086"
#username = "root"
#password = ""
#database = "hmci"
# InfluxDB v2.x example
[influx]
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
org = "myOrg"
token = "rAnd0mT0k3nG3neRaT3dByInF1uxDb=="
bucket = "hmci"
###

View File

@ -7,6 +7,7 @@ When installed Grafana listens on [http://localhost:3000](http://localhost:3000)
- Configure Grafana to use InfluxDB as a new datasource
- Name the datasource **hmci** to make it obvious what it contains.
- You would typically use *http://localhost:8086* without any credentials.
- For InfluxDB 2.x add a custom header: Authorization = Token myTokenFromInfluxDB
- The name of the database would be *hmci* (or another name you used when creating it)
- **NOTE:** set *Min time interval* to *30s* or *1m* depending on your HMCi *refresh* setting.

10
doc/readme-influxdb.md Normal file
View File

@ -0,0 +1,10 @@
# InfluxDB Notes
## Delete data
To delete *all* data before a specific date, run:
```sql
DELETE WHERE time < '2023-01-01'
```

View File

@ -1,3 +1,3 @@
projectId = hmci
projectGroup = biz.nellemann.hmci
projectVersion = 1.4.3
projectVersion = 1.4.4

View File

@ -16,65 +16,82 @@
package biz.nellemann.hmci;
import biz.nellemann.hmci.dto.toml.InfluxConfiguration;
import org.influxdb.BatchOptions;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Point;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import com.influxdb.client.WriteApi;
import com.influxdb.client.WriteOptions;
import com.influxdb.client.write.Point;
import com.influxdb.client.domain.WritePrecision;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static java.lang.Thread.sleep;
public final class InfluxClient {
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
final private String url;
final private String username;
final private String password;
final private String database;
final private String org; // v2 only
final private String token;
final private String bucket; // Bucket in v2, Database in v1
private InfluxDBClient influxDBClient;
private WriteApi writeApi;
private InfluxDB influxDB;
InfluxClient(InfluxConfiguration config) {
this.url = config.url;
this.username = config.username;
this.password = config.password;
this.database = config.database;
if(config.org != null) {
this.org = config.org;
} else {
this.org = "hmci"; // In InfluxDB 1.x, there is no concept of organization.
}
if(config.token != null) {
this.token = config.token;
} else {
this.token = config.username + ":" + config.password;
}
if(config.bucket != null) {
this.bucket = config.bucket;
} else {
this.bucket = config.database;
}
}
synchronized void login() throws RuntimeException, InterruptedException {
if(influxDB != null) {
if(influxDBClient != null) {
return;
}
boolean connected = false;
int loginErrors = 0;
do {
try {
log.debug("Connecting to InfluxDB - {}", url);
influxDB = InfluxDBFactory.connect(url, username, password).setDatabase(database);
influxDB.version(); // This ensures that we actually try to connect to the db
influxDBClient = InfluxDBClientFactory.create(url, token.toCharArray(), org, bucket);
influxDBClient.version(); // This ensures that we actually try to connect to the db
Runtime.getRuntime().addShutdownHook(new Thread(influxDBClient::close));
influxDB.enableBatch(
BatchOptions.DEFAULTS
.flushDuration(5000)
.threadFactory(runnable -> {
Thread thread = new Thread(runnable);
thread.setDaemon(true);
return thread;
})
);
Runtime.getRuntime().addShutdownHook(new Thread(influxDB::close));
// Todo: Handle events - https://github.com/influxdata/influxdb-client-java/tree/master/client#handle-the-events
//writeApi = influxDBClient.makeWriteApi();
writeApi = influxDBClient.makeWriteApi(
WriteOptions.builder()
.bufferLimit(20_000)
.flushInterval(5_000)
.build());
connected = true;
} catch(Exception e) {
sleep(15 * 1000);
if(loginErrors++ > 3) {
@ -90,10 +107,10 @@ public final class InfluxClient {
synchronized void logoff() {
if(influxDB != null) {
influxDB.close();
if(influxDBClient != null) {
influxDBClient.close();
}
influxDB = null;
influxDBClient = null;
}
@ -101,7 +118,7 @@ public final class InfluxClient {
log.debug("write() - measurement: {} {}", name, measurements.size());
if(!measurements.isEmpty()) {
processMeasurementMap(measurements, name).forEach((point) -> {
influxDB.write(point);
writeApi.writePoint(point);
});
}
}
@ -111,11 +128,11 @@ public final class InfluxClient {
List<Point> listOfPoints = new ArrayList<>();
measurements.forEach( (m) -> {
log.trace("processMeasurementMap() - timestamp: {}, tags: {}, fields: {}", m.timestamp, m.tags, m.fields);
Point.Builder builder = Point.measurement(name)
.time(m.timestamp.getEpochSecond(), TimeUnit.SECONDS)
.tag(m.tags)
.fields(m.fields);
listOfPoints.add(builder.build());
Point point = new Point(name)
.time(m.timestamp.getEpochSecond(), WritePrecision.S)
.addTags(m.tags)
.addFields(m.fields);
listOfPoints.add(point);
});
return listOfPoints;
}

View File

@ -3,6 +3,10 @@ package biz.nellemann.hmci.dto.toml;
public class InfluxConfiguration {
public String url;
public String org;
public String token;
public String bucket;
public String username;
public String password;
public String database;