Compare commits

...

49 Commits
v1.3.0 ... main

Author SHA1 Message Date
Mark Nellemann 706f0c7038 Update README.md 2024-05-17 06:19:44 +00:00
Mark Nellemann 2c1921564b Merge pull request 'Avoid HMC sessions timeouts.' (#2) from sessiontimeouts into main
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
Reviewed-on: #2
2023-11-13 13:16:45 +00:00
Mark Nellemann a24b03f4ad Update 3rd party dependencies.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/pr Build is passing Details
2023-11-13 14:14:27 +01:00
Mark Nellemann d59079e6da Update gradle plugin ospackage plugin.
continuous-integration/drone/push Build is passing Details
2023-08-10 18:31:04 +02:00
Mark Nellemann bdfa535b75 Work on avoiding lingering sessions on the HMC.
continuous-integration/drone/push Build is passing Details
2023-08-10 11:02:53 +02:00
Mark Nellemann 41decccc82 Merge remote-tracking branch 'origin/main' into influxdb2 2023-08-10 10:22:01 +02:00
Mark Nellemann 24d1701ab3 Update hmci version
continuous-integration/drone/push Build is passing Details
2023-06-28 12:00:19 +00:00
Mark Nellemann 5b2a3ff9ea Merge pull request 'influxdb2 support' (#1) from influxdb2 into main
continuous-integration/drone/tag Build is passing Details
Reviewed-on: #1
2023-05-19 18:39:20 +00:00
Mark Nellemann ec9586f870 Updated dashboards, docs and 3rd party deps. 2023-05-19 20:37:24 +02:00
Mark Nellemann 46fd9d7671 Modifications to support to InfluxDB v2.x 2023-05-17 20:36:40 +02:00
Mark Nellemann 8f4fbc6a93 Update dashboards. 2023-05-17 18:26:13 +02:00
Mark Nellemann 39af1e3c00 Increase influx writer bufferlimit. 2023-05-17 16:29:37 +02:00
Mark Nellemann 6b9b78f32c Switch to updated influxdb client.
continuous-integration/drone/push Build is passing Details
2023-04-04 22:22:10 +02:00
Mark Nellemann 2967f6ef75 Cleanup and dashboard fixes and improvements.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-03-21 14:57:00 +01:00
Mark Nellemann 6699566fba Update documentation.
continuous-integration/drone/push Build is passing Details
2023-03-08 10:24:21 +01:00
Mark Nellemann 55e7fe2b90 Update documentation.
continuous-integration/drone/push Build is passing Details
2023-03-08 10:02:43 +01:00
Mark Nellemann e30d290f07 Update documentation.
continuous-integration/drone/push Build is passing Details
2023-03-08 09:49:55 +01:00
Mark Nellemann f461b40321 Update dashboard links.
continuous-integration/drone/push Build is passing Details
2023-02-06 19:46:10 +01:00
Mark Nellemann c64bf66d9d Update dependencies and provide screenshot in README.
continuous-integration/drone/push Build is passing Details
2023-02-06 19:14:44 +01:00
Mark Nellemann 2e363f0a39 Update links.
continuous-integration/drone/push Build is passing Details
2023-01-18 15:40:58 +01:00
Mark Nellemann aa36e51367 Update gradle and fix typo
continuous-integration/drone/push Build is passing Details
2023-01-05 14:32:57 +01:00
Mark Nellemann 5952a21714 Fix error in sriov type being null.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-01-05 14:28:12 +01:00
Mark Nellemann 985b9100c3 Update links
continuous-integration/drone/tag Build is passing Details
2023-01-04 15:46:37 +01:00
Mark Nellemann 2d3f304fb0 Preperations for migration. 2023-01-04 15:41:59 +01:00
Mark Nellemann e941fe81f5 Add screenshots. 2022-12-17 11:26:39 +01:00
Mark Nellemann 78ff6783aa Update README with links to other related projects. 2022-12-17 10:34:09 +01:00
Mark Nellemann bce1d08c0b Merged in samples (pull request #23)
Multiple samples
2022-12-16 07:09:18 +00:00
Mark Nellemann 5806277266 Go from millisec. to sec. precision in influx timestamps.
Do not go to minNumberOfSamples at first processing.
2022-12-16 08:06:40 +01:00
Mark Nellemann 1b5a91c776 Go from millisec. to sec. precision in influx timestamps.
Do not go to minNumberOfSamples at first processing.
2022-12-07 16:33:56 +01:00
Mark Nellemann fb5bfd532b More work on multiple samples. 2022-12-07 16:12:29 +01:00
Mark Nellemann a0cfff18ef Fetch multiple samples. 2022-12-05 15:18:42 +01:00
Mark Nellemann 7786f5182f Merged in jackson (pull request #22)
Jackson
2022-12-01 15:17:47 +00:00
Mark Nellemann 08c0235925 Remove excess logging when energy metrics are not available. 2022-12-01 16:16:00 +01:00
Mark Nellemann ce42989f1e Improve logging on discovery. 2022-11-28 16:51:29 +01:00
Mark Nellemann 2d13bddde1 More work on reading and updating PCM preferences to enable energymonitoring. 2022-11-28 09:12:33 +01:00
Mark Nellemann bcd2b84e9f Work on reading and updating PCM preferences to enable energymonitoring. 2022-11-26 10:47:10 +01:00
Mark Nellemann 930a1b982d Major refactoring of xml+json deserialization.
Initial work for optionally moving hmc specific code into it's own library.
2022-11-24 12:35:49 +01:00
Mark Nellemann 4f341e0909 Update dashboards. 2022-10-28 17:36:42 +02:00
Mark Nellemann 647517eb98 Update dashboards. 2022-10-28 13:16:05 +02:00
Mark Nellemann 8a4d6d0ca5 Update 3rd party dependencies. 2022-10-24 15:30:56 +02:00
Mark Nellemann eb4df748e0 - Default configuration location on Windows platform.
- Process LPAR SR-IOV logical network ports data
- Update default dashboards
- Update documentation
2022-09-20 17:55:40 +02:00
Mark Nellemann ec5ab3f706 Update documentation. 2022-09-08 08:45:39 +02:00
Mark Nellemann 8bf6f3dfbd Update dependencies 2022-08-20 09:59:15 +02:00
Mark Nellemann 86ce966b4a Add information on RedHat firewall port openings. 2022-08-09 10:05:30 +02:00
Mark Nellemann 3e7d3bec97 Increase HTTP read timeout value from 30 to 180 seconds. Helps when querying for managed systems on busy HMC's with many systems. 2022-05-23 13:57:20 +02:00
Mark Nellemann 470c9e4c9d Improve documentation and fix typos. 2022-05-17 09:19:56 +02:00
Mark Nellemann 907721b112 Update architecture drawing and move into doc/ folder. 2022-05-16 16:33:27 +02:00
Mark Nellemann 1274e37c51 Update dashboards. 2022-04-27 10:36:07 +02:00
Mark Nellemann 2c84a2478e Support Windows default location for config file. 2022-03-29 13:31:43 +02:00
154 changed files with 29740 additions and 4904 deletions

23
.drone.yml Normal file
View File

@ -0,0 +1,23 @@
---
kind: pipeline
name: default
type: docker
steps:
- name: test
image: eclipse-temurin:8-jdk
commands:
- ./gradlew test
- name: build
image: eclipse-temurin:8-jdk
environment:
AUTH_TOKEN: # Gitea access token ENV variable
from_secret: auth # Name of DroneCI secret exposed above
commands:
- ./gradlew build packages
- for file in build/libs/*-all.jar ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
when:
event:
- tag

View File

@ -2,21 +2,44 @@
All notable changes to this project will be documented in this file.
## [1.3.0] - 2022-02-xx
### Changed
## 1.4.5 - 2023-11-13
- Adjust timeout to not have lingering sessions on HMC
- Update 3rd party dependencies
## 1.4.4 - 2023-05-20
- Support for InfluxDB v2, now requires InfluxDB 1.8 or later
- Increase influx writer buffer limit
- Various dashboard improvements
## 1.4.3 - 2023-03-21
- Fix and improve processor utilization dashboards.
- Minor code cleanup.
## 1.4.2 - 2023-01-05
- Fix error in SR-IOV port type being null.
## 1.4.1 - 2022-12-15
- Retrieve multiple PCM samples and keep track of processing.
- Rename VIOS metric 'vFC' (storage adapter) to 'virtual'.
## 1.4.0 - 2022-12-01
- Rewrite of toml+xml+json de-serialization code (uses jackson now).
- Changes to configuration file format - please look at [doc/hmci.toml](doc/hmci.toml) as example.
- Logging (write to file) JSON output from HMC is currently not possible.
## 1.3.3 - 2022-09-20
- Default configuration location on Windows platform.
- Process LPAR SR-IOV logical network ports data
- Update default dashboards
- Update documentation
## 1.3.0 - 2022-02-04
- Correct use of InfluxDB batch writing.
## [1.2.8] - 2022-02-28
### Changed
## 1.2.8 - 2022-02-28
- Sort measurement tags before writing to InfluxDB.
- Update 3rd party dependencies.
## [1.2.7] - 2022-02-24
### Added
## 1.2.7 - 2022-02-24
- Options to include/exclude Managed Systems and/or Logical Partitions.
[1.3.0]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.3.0%0Dv1.2.8
[1.2.8]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.2.8%0Dv1.2.7
[1.2.7]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.2.7%0Dv1.2.6
[1.2.6]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.2.6%0Dv1.2.5

217
README.md
View File

@ -1,216 +1,3 @@
# HMC Insights
# Repository moved
**HMCi** is a utility that collects metrics from one or more *IBM Power HMC*, without the need to install any agents. The metric data is processed and saved into an InfluxDB time-series database. Grafana can be used to visualize the metrics from InfluxDB. This software is free to use and is licensed under the [Apache 2.0 License](https://bitbucket.org/mnellemann/syslogd/src/master/LICENSE), but is not supported or endorsed by International Business Machines (IBM). There is an optional [companion agent](https://bitbucket.org/mnellemann/sysmon/) application, which provides more metrics from within AIX and Linux.
Metrics includes:
- *Managed Systems* - the physical Power servers
- *Logical Partitions* - the virtualized servers running AIX, Linux and IBM-i (AS/400)
- *Virtual I/O Servers* - the i/o partition(s) virtualizing network and storage
- *Energy* - power consumption and temperatures (needs to be enabled and is not available for P7, E870, E880 and E980)
![architecture](https://bitbucket.org/mnellemann/hmci/downloads/HMCi.png)
## Installation and Setup
There are few steps in the installation.
1) Preparations on the Hardware Management Console (HMC)
2) Installation of InfluxDB and Grafana software on a Linux LPAR or VM
3) Installation and configuration of the HMCi software
4) Configure Grafana and import example dashboards
### 1 - Power HMC Setup Instructions
- Login to your HMC
- Navigate to *Console Settings*
- Go to *Change Date and Time*
- Set correct timezone, if not done already
- Configure one or more NTP servers, if not done already
- Enable the NTP client, if not done already
- Navigate to *Users and Security*
- Create a new read-only **hmci** user, which will be used to connect to the REST API.
- Click *Manage User Profiles and Access*, edit the newly created *hmci* user and click *User Properties*:
- **Enable** *Allow remote access via the web*
- Set *Session timeout minutes* to **120**
- Set *Verify timeout minutes* to **15**
- Set *Idle timeout minutes* to **15**
- Set *Minimum time in days between password changes* to **0**
- Navigate to *HMC Management* and *Console Settings*
- Click *Change Performance Monitoring Settings*:
- Enable *Performance Monitoring Data Collection for Managed Servers*: **All On**
- Set *Performance Data Storage* to **1** day or preferable more
If you do not enable *Performance Monitoring Data Collection for Managed Servers*, you will see errors such as *Unexpected response: 403*. Use the *hmci* debug flag to get more details about what is going on.
### 2 - InfluxDB and Grafana Installation
Install InfluxDB (v. **1.8** for best compatibility with Grafana) on an LPAR or VM, which is network accessible by the *HMCi* utility (the default InfluxDB port is 8086). You can install Grafana on the same server or any server which are able to connect to the InfluxDB database. The Grafana installation needs to be accessible from your browser (default on port 3000). The default settings for both InfluxDB and Grafana will work fine as a start.
- You can download [Grafana ppc64le](https://www.power-devops.com/grafana) and [InfluxDB ppc64le](https://www.power-devops.com/influxdb) packages for most Linux distributions and AIX on the [Power DevOps](https://www.power-devops.com/) site.
- Binaries for amd64/x86 are available from the [Grafana website](https://grafana.com/grafana/download) and [InfluxDB website](https://portal.influxdata.com/downloads/) and most likely directly from your Linux distributions repositories.
- Create the empty *hmci* database through the **influx** cli command:
```text
CREATE DATABASE "hmci" WITH DURATION 365d REPLICATION 1;
```
See the [Influx documentation](https://docs.influxdata.com/influxdb/v1.8/query_language/manage-database/#create-database) for more information on duration and replication.
### 3 - HMCi Installation & Configuration
Install *HMCi* on a host, which can connect to the Power HMC through HTTPS, and is able to connect to the InfluxDB service. This *can be* the same LPAR/VM as used for the InfluxDB installation.
- Ensure you have **correct date/time** and NTPd running to keep it accurate!
- The only requirement for **hmci** is the Java runtime, version 8 (or later)
- Install **HMCi** from [downloads](https://bitbucket.org/mnellemann/hmci/downloads/) (rpm, deb or jar) or build from source
- On RPM based systems: **sudo rpm -i hmci-x.y.z-n.noarch.rpm**
- On DEB based systems: **sudo dpkg -i hmci_x.y.z-n_all.deb**
- Copy the **/opt/hmci/doc/hmci.toml** configuration example into **/etc/hmci.toml** and edit the configuration to suit your environment. The location of the configuration file can be changed with the *--conf* option.
- Run the **/opt/hmci/bin/hmci** program in a shell, as a @reboot cron task or configure as a proper service - there are instructions in the *doc/readme-service.md* file.
- When started, *hmci* expects the InfluxDB database to be created by you.
### 4 - Grafana Configuration
- Configure Grafana to use InfluxDB as a new datasource
- **NOTE:** set *Min time interval* to *30s* or *1m* depending on your HMCi *refresh* setting.
- Import example dashboards from the *doc/* folder into Grafana as a starting point and get creative making your own cool dashboards :)
## Notes
### No data (or past/future data) shown in Grafana
This is most likely due to timezone, date and/or NTP not being configured correctly on the HMC and/or host running HMCi.
Example showing how you configure related settings through the HMC CLI:
```shell
chhmc -c date -s modify --datetime MMDDhhmm # Set current date/time: MMDDhhmm[[CC]YY][.ss]
chhmc -c date -s modify --timezone Europe/Copenhagen # Configure your timezone
chhmc -c xntp -s enable # Enable the NTP service
chhmc -c xntp -s add -a IP_Addr # Add a remote NTP server
```
Remember to reboot your HMC after changing the timezone.
### Compatibility with nextract Plus
From version 1.2 *HMCi* is made compatible with the similar [nextract Plus](https://www.ibm.com/support/pages/nextract-plus-hmc-rest-api-performance-statistics) tool from Nigel Griffiths. This means that the Grafana [dashboards](https://grafana.com/grafana/dashboards/13819) made by Nigel are compatible with *HMCi* and the other way around.
### Start InfluxDB and Grafana at boot (systemd compatible Linux)
```shell
systemctl enable influxdb
systemctl start influxdb
systemctl enable grafana-server
systemctl start grafana-server
```
### InfluxDB Retention Policy
Examples for changing the default InfluxDB retention policy for the hmci database:
```text
ALTER RETENTION POLICY "autogen" ON "hmci" DURATION 156w
ALTER RETENTION POLICY "autogen" ON "hmci" DURATION 90d
```
### Upgrading HMCi
On RPM based systems (RedHat, Suse, CentOS), download the latest *hmci-x.y.z-n.noarch.rpm* file and upgrade:
```shell
sudo rpm -Uvh hmci-x.y.z-n.noarch.rpm
```
On DEB based systems (Debian, Ubuntu and derivatives), download the latest *hmci_x.y.z-n_all.deb* file and upgrade:
```shell
sudo dpkg -i hmci_x.y.z-n_all.deb
```
Restart the HMCi service on *systemd* based Linux systems:
```shell
systemctl restart hmci
journalctl -f -u hmci # to check log output
```
### AIX Notes
To install (or upgrade) on AIX, you need to pass the *--ignoreos* flag to the *rpm* command:
```shell
rpm -Uvh --ignoreos hmci-x.y.z-n.noarch.rpm
```
## Grafana Screenshots
Below are screenshots of the provided Grafana dashboards (found in the **doc/** folder), which can be used as a starting point.
- [hmci-systems.png](https://bitbucket.org/mnellemann/hmci/downloads/hmci-systems-dashboard.png)
- [hmci-vois.png](https://bitbucket.org/mnellemann/hmci/downloads/hmci-vios-dashboard.png)
- [hmci-lpars](https://bitbucket.org/mnellemann/hmci/downloads/hmci-lpars-dashboard.png)
## Known problems
### Incomplete test of metrics
I have not been able to test and verify all types of metric data. If you encounter any missing or wrong data, please contact me, so I can try to fix it. It is possible to run **hmci** with *-d -d* to log JSON data received by the HCM, which can help me implement missing data.
### Naming collision
You can't have partitions (or Virtual I/O Servers) on different Systems with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the name as key).
### Renaming partitions
If you rename a partition, the metrics in InfluxDB will still be available by the old name, and new metrics will be available by the new name of the partition. There is no easy way to migrate the old data, but you can delete it easily:
```text
DELETE WHERE lparname = 'name';
```
## Development Information
You need Java (JDK) version 8 or later to build hmci.
### Build & Test
Use the gradle build tool, which will download all required dependencies:
```shell
./gradlew clean build
```
### Local Testing
#### InfluxDB container
Start the InfluxDB container:
```shell
docker run --name=influxdb --rm -d -p 8086:8086 influxdb:1.8-alpine
```
To execute the Influx client from within the container:
```shell
docker exec -it influxdb influx
```
#### Grafana container
Start the Grafana container, linking it to the InfluxDB container:
```shell
docker run --name grafana --link influxdb:influxdb --rm -d -p 3000:3000 grafana/grafana:7.1.3
```
Setup Grafana to connect to the InfluxDB container by defining a new datasource on URL *http://influxdb:8086* named *hmci*.
The hmci database must be created beforehand, which can be done by running the hmci tool first.
Grafana dashboards can be imported from the *doc/* folder.
Please visit [github.com/mnellemann/hmci](https://github.com/mnellemann/hmci)

View File

@ -1,4 +1,4 @@
image: openjdk:8
image: eclipse-temurin:8-jdk
pipelines:
branches:

View File

@ -1,14 +1,11 @@
plugins {
id 'java'
id 'jacoco'
id 'groovy'
id 'application'
// Code coverage of tests
id 'jacoco'
id "com.github.johnrengelman.shadow" version "7.1.2"
id "net.nemerosa.versioning" version "2.15.1"
id "nebula.ospackage" version "9.1.1"
id "com.netflix.nebula.ospackage" version "11.5.0"
id "com.github.johnrengelman.shadow" version "7.1.2"
}
repositories {
@ -18,36 +15,38 @@ repositories {
group = projectGroup
version = projectVersion
sourceCompatibility = projectJavaVersion
targetCompatibility = projectJavaVersion
dependencies {
annotationProcessor 'info.picocli:picocli-codegen:4.6.2'
implementation 'info.picocli:picocli:4.6.3'
implementation 'org.jsoup:jsoup:1.14.3'
implementation 'com.squareup.okhttp3:okhttp:4.9.3'
implementation 'com.squareup.moshi:moshi:1.13.0'
implementation 'com.serjltt.moshi:moshi-lazy-adapters:2.2'
implementation 'org.tomlj:tomlj:1.0.0'
implementation 'org.influxdb:influxdb-java:2.22'
implementation 'org.slf4j:slf4j-api:1.7.36'
implementation 'org.slf4j:slf4j-simple:1.7.36'
annotationProcessor 'info.picocli:picocli-codegen:4.7.5'
implementation 'info.picocli:picocli:4.7.5'
implementation 'org.slf4j:slf4j-api:2.0.9'
implementation 'org.slf4j:slf4j-simple:2.0.9'
implementation 'com.squareup.okhttp3:okhttp:4.11.0' // Also used by InfluxDB Client
implementation 'com.influxdb:influxdb-client-java:6.10.0'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.15.2'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-toml:2.15.2'
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation 'com.squareup.okhttp3:mockwebserver:4.9.3'
testImplementation 'org.slf4j:slf4j-simple:1.7.36'
testImplementation 'junit:junit:4.13.2'
testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0'
testImplementation "org.mock-server:mockserver-netty-no-dependencies:5.14.0"
}
application {
mainClass.set('biz.nellemann.hmci.Application')
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx512m", "-XX:+UseG1GC" ]
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx256m", "-XX:+UseG1GC", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
}
java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
test {
useJUnitPlatform()
}
apply plugin: 'nebula.ospackage'
ospackage {
packageName = 'hmci'
release = '1'
@ -84,7 +83,7 @@ buildDeb {
}
jacoco {
toolVersion = "0.8.7"
toolVersion = "0.8.9"
}
jacocoTestReport {
@ -101,7 +100,7 @@ jacocoTestCoverageVerification {
violationRules {
rule {
limit {
minimum = 0.5 // TODO: Raise when more tests are implemented
minimum = 0.4
}
}
}
@ -123,7 +122,7 @@ jar {
}
}
tasks.create("packages") {
tasks.register("packages") {
group "build"
dependsOn ":build"
dependsOn ":buildDeb"

1
doc/HMCi.drawio Normal file

File diff suppressed because one or more lines are too long

BIN
doc/HMCi.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 163 KiB

View File

@ -0,0 +1,702 @@
{
"__inputs": [
{
"name": "DS_HMCI",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.6"
},
{
"type": "datasource",
"id": "influxdb",
"name": "InfluxDB",
"version": "1.0.0"
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"enable": false,
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/hmci/ - Metrics from IBM Power Systems",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": 1510,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 37,
"options": {
"content": "## Metrics collected from IBM Power HMC\n \nFor more information visit: [git.data.coop/nellemann/hmci](https://git.data.coop/nellemann/hmci)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "area"
}
},
"decimals": 2,
"links": [],
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 0.8
},
{
"color": "red",
"value": 0.9
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 24,
"x": 0,
"y": 3
},
"id": 2,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_lparname",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"hide": false,
"measurement": "/^$ServerName$/",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"currentVirtualProcessors\") AS \"usage\" FROM \"lpar_processor\" WHERE (\"servername\" =~ /^$ServerName$/ AND \"lparname\" =~ /^$LPAR$/) AND $timeFilter GROUP BY time($interval), \"lparname\", \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"title": "Processor Units - Utilized / Max",
"transformations": [],
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 20,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "binBps"
},
"overrides": []
},
"gridPos": {
"h": 19,
"w": 12,
"x": 0,
"y": 14
},
"id": 8,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_servername - $tag_lparname ($col)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"lparname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"hide": false,
"measurement": "lpar_net_virtual",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"receivedPhysicalBytes"
],
"type": "field"
},
{
"params": [],
"type": "sum"
},
{
"params": [
"read"
],
"type": "alias"
}
],
[
{
"params": [
"sentPhysicalBytes"
],
"type": "field"
},
{
"params": [],
"type": "sum"
},
{
"params": [
"*-1"
],
"type": "math"
},
{
"params": [
"write"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "lparname",
"operator": "=~",
"value": "/^$LPAR$/"
},
{
"condition": "AND",
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "Virtual Network Adapters",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"decimals": 2,
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "Bps"
},
"overrides": []
},
"gridPos": {
"h": 19,
"w": 12,
"x": 12,
"y": 14
},
"id": 34,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_servername - $tag_lparname ($col)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"lparname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"hide": false,
"measurement": "lpar_storage_vFC",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"value\") FROM \"PartitionVirtualFiberChannelAdapters\" WHERE (\"system\" =~ /^$ManagedSystem$/ AND \"name\" != 'transmittedBytes' AND \"partition\" =~ /^$Partition$/) AND $timeFilter GROUP BY time($interval), \"wwpn\", \"partition\", \"name\" fill(null)",
"rawQuery": false,
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"readBytes"
],
"type": "field"
},
{
"params": [],
"type": "sum"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"read"
],
"type": "alias"
}
],
[
{
"params": [
"writeBytes"
],
"type": "field"
},
{
"params": [],
"type": "sum"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"*-1"
],
"type": "math"
},
{
"params": [
"write"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
},
{
"condition": "AND",
"key": "lparname",
"operator": "=~",
"value": "/^$LPAR$/"
}
]
}
],
"title": "Virtual Fiber Channel Adapters",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"allFormat": "regex values",
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"hide": 0,
"includeAll": true,
"label": "Server Name",
"multi": true,
"multiFormat": "regex values",
"name": "ServerName",
"options": [],
"query": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"refresh": 1,
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"type": "query",
"useTags": false
},
{
"allFormat": "regex values",
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/",
"hide": 0,
"includeAll": true,
"label": "Logical Partition",
"multi": true,
"multiFormat": "regex values",
"name": "LPAR",
"options": [],
"query": "SHOW TAG VALUES FROM \"lpar_processor\" WITH KEY = \"lparname\" WHERE servername =~ /$ServerName/",
"refresh": 1,
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-6h",
"now": false,
"to": "now-30s"
},
"timepicker": {
"nowDelay": "30s",
"refresh_intervals": [
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "browser",
"title": "HMCi - Power LPAR Utilization",
"uid": "jFsbpTH4k",
"version": 2,
"weekStart": ""
}

View File

@ -1,20 +1,27 @@
{
"__inputs": [
{
"name": "DS_INFLUXDB-HMCI",
"label": "InfluxDB-hmci",
"name": "DS_HMCI",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "bargauge",
"name": "Bar gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "8.1.4"
"version": "9.1.6"
},
{
"type": "datasource",
@ -28,6 +35,12 @@
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
@ -39,7 +52,10 @@
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
@ -54,145 +70,63 @@
}
]
},
"description": "https://bitbucket.org/mnellemann/hmci/",
"description": "https://git.data.coop/nellemann/hmci/ - Metrics from IBM Power Systems",
"editable": true,
"gnetId": null,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"iteration": 1635428793809,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": "${DS_INFLUXDB-HMCI}",
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "watt"
},
"overrides": []
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"gridPos": {
"h": 9,
"w": 7,
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 2,
"id": 11,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
},
"tooltip": {
"mode": "multi"
}
"content": "## Metrics collected from IBM Power HMC\n \nFor more information visit: [git.data.coop/nellemann/hmci](https://git.data.coop/nellemann/hmci)\n ",
"mode": "markdown"
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$tag_servername",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
],
"type": "fill"
}
],
"measurement": "server_energy_power",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"powerReading"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Power Consumption",
"type": "timeseries"
"transparent": true,
"type": "text"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 3
},
"id": 15,
"panels": [],
"repeat": "ServerName",
"repeatDirection": "h",
"title": "$ServerName",
"type": "row"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"fieldConfig": {
"defaults": {
"mappings": [],
@ -218,10 +152,10 @@
"overrides": []
},
"gridPos": {
"h": 9,
"w": 17,
"x": 7,
"y": 0
"h": 7,
"w": 24,
"x": 0,
"y": 4
},
"id": 7,
"options": {
@ -239,10 +173,14 @@
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
@ -258,7 +196,7 @@
},
{
"params": [
"linear"
"none"
],
"type": "fill"
}
@ -291,35 +229,20 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Power Consumption",
"type": "stat"
},
{
"collapsed": false,
"datasource": "${DS_INFLUXDB-HMCI}",
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 9
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"id": 9,
"panels": [],
"repeat": "ServerName",
"title": "$ServerName Thermal",
"type": "row"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"description": "Inlet air temperature.",
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
"mode": "continuous-BlYlRd"
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
@ -328,13 +251,9 @@
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 25
},
{
"color": "red",
"value": 30
"value": 80
}
]
},
@ -343,31 +262,34 @@
"overrides": []
},
"gridPos": {
"h": 9,
"w": 7,
"h": 11,
"w": 8,
"x": 0,
"y": 10
"y": 11
},
"id": 5,
"id": 4,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"displayMode": "lcd",
"minVizHeight": 10,
"minVizWidth": 0,
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"mean"
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
"showUnfilled": true
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$tag_servername - $tag_name",
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
@ -377,13 +299,13 @@
},
{
"params": [
"servername"
"system"
],
"type": "tag"
},
{
"params": [
"linear"
"null"
],
"type": "fill"
}
@ -391,19 +313,81 @@
"measurement": "server_energy_thermal",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"cpuTemperature*\") FROM \"server_energy_thermal\" WHERE (\"servername\" =~ /^$ServerName$/) AND $timeFilter GROUP BY time($__interval), \"system\", \"servername\" fill(linear)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"inletTemperature_1"
"cpuTemperature_1"
],
"type": "field"
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [
"CPU-1"
],
"type": "alias"
}
],
[
{
"params": [
"cpuTemperature_2"
],
"type": "field"
},
{
"params": [],
"type": "last"
},
{
"params": [
"CPU-2"
],
"type": "alias"
}
],
[
{
"params": [
"cpuTemperature_3"
],
"type": "field"
},
{
"params": [],
"type": "last"
},
{
"params": [
"CPU-3"
],
"type": "alias"
}
],
[
{
"params": [
"cpuTemperature_4"
],
"type": "field"
},
{
"params": [],
"type": "last"
},
{
"params": [
"CPU-4"
],
"type": "alias"
}
]
],
@ -416,13 +400,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "$ServerName - Inlet",
"type": "stat"
"title": "$ServerName - CPU Temperature",
"type": "bargauge"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -430,6 +415,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -478,28 +465,34 @@
"overrides": []
},
"gridPos": {
"h": 9,
"w": 17,
"x": 7,
"y": 10
"h": 11,
"w": 16,
"x": 8,
"y": 11
},
"id": 4,
"id": 12,
"options": {
"legend": {
"calcs": [
"lastNotNull"
],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
@ -610,14 +603,280 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "$ServerName - CPU Temperature",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "Inlet air temperature.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 25
},
{
"color": "red",
"value": 30
}
]
},
"unit": "celsius"
},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 8,
"x": 0,
"y": 22
},
"id": 13,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_energy_thermal",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"inletTemperature_1"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"inlet1"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "$ServerName - Inlet Temperature",
"type": "stat"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "Inlet air temperature.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "area"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "transparent",
"value": null
},
{
"color": "#EAB839",
"value": 25
},
{
"color": "red",
"value": 30
}
]
},
"unit": "celsius"
},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 16,
"x": 8,
"y": 22
},
"id": 5,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_energy_thermal",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"inletTemperature_1"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"Inlet 1"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "$ServerName - Inlet Temperature",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 30,
"schemaVersion": 37,
"style": "dark",
"tags": [
"Power"
@ -625,23 +884,22 @@
"templating": {
"list": [
{
"allValue": null,
"current": {},
"datasource": "${DS_INFLUXDB-HMCI}",
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"description": null,
"error": null,
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"server_energy_power\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"hide": 0,
"includeAll": false,
"label": null,
"includeAll": true,
"multi": true,
"name": "ServerName",
"options": [],
"query": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"query": "SHOW TAG VALUES FROM \"server_energy_power\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"sort": 5,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
@ -667,7 +925,8 @@
]
},
"timezone": "",
"title": "IBM Power - HMCi - System Energy",
"title": "HMCi - Power System Energy",
"uid": "oHcrgD1Mk",
"version": 7
}
"version": 7,
"weekStart": ""
}

View File

@ -0,0 +1,719 @@
{
"__inputs": [
{
"name": "DS_HMCI",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": [],
"__requires": [
{
"type": "panel",
"id": "bargauge",
"name": "Bar gauge",
"version": ""
},
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "8.3.5"
},
{
"type": "panel",
"id": "heatmap",
"name": "Heatmap",
"version": ""
},
{
"type": "datasource",
"id": "influxdb",
"name": "InfluxDB",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
}
],
"annotations": {
"enable": false,
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/hmci/ - Metrics from IBM Power Systems",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": 1465,
"graphTooltip": 0,
"id": null,
"iteration": 1669798059148,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 33,
"options": {
"content": "## Metrics collected from IBM Power HMC\n \nFor more information visit: [git.data.coop/nellemann/hmci](https://git.data.coop/nellemann/hmci)\n ",
"mode": "markdown"
},
"pluginVersion": "8.3.5",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"cards": {},
"color": {
"cardColor": "#b4ff00",
"colorScale": "sqrt",
"colorScheme": "interpolateOranges",
"exponent": 0.5,
"mode": "opacity"
},
"dataFormat": "timeseries",
"description": "",
"gridPos": {
"h": 11,
"w": 24,
"x": 0,
"y": 3
},
"heatmap": {},
"hideZeroBuckets": true,
"highlightCards": true,
"id": 30,
"legend": {
"show": false
},
"pluginVersion": "8.3.5",
"reverseYBuckets": false,
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"poolname"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_sharedProcessorPool",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"configurableProcUnits\") AS \"Utilization\" FROM \"server_processor\" WHERE $timeFilter GROUP BY time($__interval), \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "Processors - Utilized / Configurable",
"tooltip": {
"show": true,
"showHistogram": false
},
"transparent": true,
"type": "heatmap",
"xAxis": {
"show": true
},
"yAxis": {
"decimals": 1,
"format": "percentunit",
"logBase": 1,
"max": "1",
"min": "0",
"show": true
},
"yBucketBound": "auto"
},
{
"description": "",
"fieldConfig": {
"defaults": {
"decimals": 2,
"mappings": [],
"max": 1,
"min": 0,
"thresholds": {
"mode": "percentage",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 70
},
{
"color": "red",
"value": 85
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 12,
"x": 0,
"y": 14
},
"id": 36,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "8.3.5",
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"poolname"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_sharedProcessorPool",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"configurableProcUnits\") AS \"Utilization\" FROM \"server_processor\" WHERE $timeFilter GROUP BY time($__interval), \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "Processors - Utilized / Configurable",
"type": "gauge"
},
{
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "continuous-BlYlRd"
},
"decimals": 1,
"mappings": [],
"max": 1,
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 12,
"x": 12,
"y": 14
},
"id": 37,
"options": {
"displayMode": "lcd",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showUnfilled": true
},
"pluginVersion": "8.3.5",
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"poolname"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_sharedProcessorPool",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"configurableProcUnits\") AS \"Utilization\" FROM \"server_processor\" WHERE $timeFilter GROUP BY time($__interval), \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "Processors - Utilized / Configurable",
"type": "bargauge"
},
{
"description": "Configurable processors are activated and available for use and assignment. The difference up to the total is \"dark cores\" which can be activated by code or used with PEP-2.0.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "continuous-BlPu"
},
"mappings": [],
"max": 1,
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 12,
"x": 0,
"y": 25
},
"id": 35,
"options": {
"displayMode": "lcd",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [],
"fields": "",
"values": false
},
"showUnfilled": true
},
"pluginVersion": "8.3.5",
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"poolname"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "server_sharedProcessorPool",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"configurableProcUnits\") / mean(\"totalProcUnits\") AS \"Utilization\" FROM \"server_processor\" WHERE $timeFilter GROUP BY time($__interval), \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
}
]
}
],
"title": "Processors - Configurable / Total",
"type": "bargauge"
},
{
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "percentage",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 85
},
{
"color": "red",
"value": 95
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 12,
"x": 12,
"y": 25
},
"id": 2,
"links": [],
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "center",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {
"titleSize": 16
},
"textMode": "value_and_name"
},
"pluginVersion": "8.3.5",
"targets": [
{
"alias": "$tag_servername",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"hide": false,
"measurement": "server_memory",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"assignedMemToLpars\") / mean(\"totalMem\") AS \"Utilization\" FROM \"server_memory\" WHERE $timeFilter GROUP BY time($__interval), \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"assignedMemToLpars"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"assigned"
],
"type": "alias"
}
],
[
{
"params": [
"availableMem"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"available"
],
"type": "alias"
}
]
],
"tags": []
}
],
"title": "Memory Utilization - Assigned / Total",
"type": "stat"
}
],
"refresh": "30s",
"schemaVersion": 34,
"style": "dark",
"tags": [
"Power"
],
"templating": {
"list": []
},
"time": {
"from": "now-7d",
"now": false,
"to": "now-30s"
},
"timepicker": {
"nowDelay": "30s",
"refresh_intervals": [
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "browser",
"title": "HMCi - Power System Utilization",
"uid": "MZ7Q-4K4k",
"version": 3,
"weekStart": ""
}

View File

@ -1,14 +1,15 @@
{
"__inputs": [
{
"name": "DS_INFLUXDB-HMCI",
"label": "InfluxDB-hmci",
"name": "DS_HMCI",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
@ -20,7 +21,7 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "8.1.4"
"version": "9.1.6"
},
{
"type": "datasource",
@ -34,6 +35,12 @@
"name": "Table",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
@ -46,7 +53,10 @@
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
@ -61,16 +71,49 @@
}
]
},
"description": "https://bitbucket.org/mnellemann/hmci/",
"description": "https://git.data.coop/nellemann/hmci/ - Metrics from IBM Power Systems",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": 1465,
"graphTooltip": 0,
"id": null,
"iteration": 1635485979203,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 29,
"options": {
"content": "## Metrics collected from IBM Power HMC\n \nFor more information visit: [git.data.coop/nellemann/hmci](https://git.data.coop/nellemann/hmci)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -80,7 +123,8 @@
"custom": {
"align": "center",
"displayMode": "auto",
"filterable": true
"filterable": true,
"inspect": false
},
"mappings": [],
"thresholds": {
@ -147,17 +191,28 @@
"h": 6,
"w": 10,
"x": 0,
"y": 0
"y": 3
},
"id": 21,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "Read",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
@ -219,8 +274,6 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "VIOS Details",
"transformations": [
{
@ -259,7 +312,10 @@
"type": "table"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -269,7 +325,8 @@
"custom": {
"align": "center",
"displayMode": "auto",
"filterable": true
"filterable": true,
"inspect": false
},
"mappings": [],
"thresholds": {
@ -342,7 +399,7 @@
"properties": [
{
"id": "custom.width",
"value": 87
"value": 149
}
]
},
@ -369,18 +426,6 @@
"value": 203
}
]
},
{
"matcher": {
"id": "byName",
"options": "viosname"
},
"properties": [
{
"id": "custom.width",
"value": 219
}
]
}
]
},
@ -388,22 +433,28 @@
"h": 6,
"w": 14,
"x": 10,
"y": 0
"y": 3
},
"id": 27,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "Utilization"
}
]
"sortBy": []
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "Read",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"groupBy": [
{
"params": [
@ -416,7 +467,7 @@
"measurement": "lpar_details",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT last(\"weight\") AS \"Weight\", last(\"entitledProcUnits\") AS \"Entitled\", last(\"currentVirtualProcessors\") AS \"VP\", (last(\"utilizedProcUnits\") / last(\"maxProcUnits\")) * 100 AS \"Utilization\", last(\"mode\") AS \"Mode\" FROM \"vios_processor\" WHERE (\"servername\" =~ /^$ServerName$/) AND (\"viosname\" =~ /^$ViosName$/) AND $timeFilter GROUP BY \"viosname\" fill(previous)",
"query": "SELECT last(\"weight\") AS \"Weight\", last(\"entitledProcUnits\") AS \"Entitled\", last(\"currentVirtualProcessors\") AS \"VP\", (mean(\"utilizedProcUnits\") / mean(\"entitledProcUnits\")) * 100 AS \"Utilization\", last(\"mode\") AS \"Mode\" FROM \"vios_processor\" WHERE (\"servername\" =~ /^$ServerName$/) AND (\"viosname\" =~ /^$ViosName$/) AND $timeFilter GROUP BY \"viosname\" fill(previous)",
"queryType": "randomWalk",
"rawQuery": true,
"refId": "A",
@ -482,8 +533,6 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "VIOS Processor",
"transformations": [
{
@ -519,7 +568,10 @@
"type": "table"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -551,7 +603,7 @@
"h": 9,
"w": 6,
"x": 0,
"y": 6
"y": 9
},
"id": 16,
"links": [],
@ -568,10 +620,14 @@
"showThresholdMarkers": true,
"text": {}
},
"pluginVersion": "8.1.4",
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$tag_viosname",
"alias": "$tag_servername - $tag_viosname",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -588,7 +644,13 @@
},
{
"params": [
"linear"
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
@ -628,13 +690,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": " Memory - $ServerName - $ViosName",
"type": "gauge"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -642,6 +705,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -669,6 +734,7 @@
"mode": "off"
}
},
"decimals": 2,
"links": [],
"mappings": [],
"thresholds": {
@ -692,7 +758,7 @@
"h": 9,
"w": 18,
"x": 6,
"y": 6
"y": 9
},
"id": 19,
"links": [],
@ -700,16 +766,22 @@
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_viosname - $col",
"alias": "$tag_servername - $tag_viosname ($col)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -732,7 +804,7 @@
},
{
"params": [
"linear"
"none"
],
"type": "fill"
}
@ -796,13 +868,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Processors - $ServerName - $ViosName",
"type": "timeseries"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -810,6 +883,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -856,7 +931,7 @@
"h": 10,
"w": 24,
"x": 0,
"y": 15
"y": 18
},
"id": 17,
"links": [],
@ -864,16 +939,22 @@
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_viosname - $tag_location (shared)",
"alias": "$tag_servername - $tag_viosname - ($tag_location shared)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -896,7 +977,13 @@
},
{
"params": [
"linear"
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
@ -918,6 +1005,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -936,7 +1029,11 @@
]
},
{
"alias": "$tag_viosname - $tag_location (generic)",
"alias": "$tag_servername - $tag_viosname - ($tag_location shared)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -957,6 +1054,12 @@
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
@ -999,7 +1102,11 @@
]
},
{
"alias": "$tag_viosname - $tag_location (virtual)",
"alias": "$tag_systemname - $tag_viosname - ($tag_location shared)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -1014,6 +1121,12 @@
],
"type": "tag"
},
{
"params": [
"systemname"
],
"type": "tag"
},
{
"params": [
"linear"
@ -1056,13 +1169,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Network - $ServerName - $ViosName",
"type": "timeseries"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -1070,6 +1184,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -1120,7 +1236,7 @@
"h": 11,
"w": 24,
"x": 0,
"y": 25
"y": 28
},
"id": 18,
"links": [],
@ -1128,16 +1244,22 @@
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_viosname - $tag_id ($tag_location)",
"alias": "$tag_servername - $tag_viosname - ($tag_id / $tag_location)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -1164,6 +1286,12 @@
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
@ -1188,6 +1316,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -1206,13 +1340,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Fiber Channel Adapters - $ServerName - $ViosName",
"type": "timeseries"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -1220,6 +1355,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -1279,7 +1416,7 @@
"h": 11,
"w": 12,
"x": 0,
"y": 36
"y": 39
},
"id": 23,
"links": [],
@ -1287,16 +1424,22 @@
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_viosname - $tag_location",
"alias": "$tag_servername - $tag_viosname ($tag_location)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -1317,6 +1460,12 @@
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
@ -1341,6 +1490,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -1359,13 +1514,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Physical Storage - $ServerName - $ViosName",
"type": "timeseries"
},
{
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -1373,6 +1529,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -1423,7 +1581,7 @@
"h": 11,
"w": 12,
"x": 12,
"y": 36
"y": 39
},
"id": 25,
"links": [],
@ -1431,16 +1589,22 @@
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_viosname - $tag_id ($tag_location)",
"alias": "$tag_servername - $tag_viosname - ($tag_id - $tag_location)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
@ -1469,13 +1633,19 @@
},
{
"params": [
"linear"
"servername"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"hide": false,
"measurement": "vios_storage_vFC",
"measurement": "vios_storage_virtual",
"orderByTime": "ASC",
"policy": "default",
"refId": "B",
@ -1491,6 +1661,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -1509,14 +1685,12 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Virtual Fiber Channel Adapters - $ServerName - $ViosName",
"title": "Virtual Adapters - $ServerName - $ViosName",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 30,
"schemaVersion": 37,
"style": "dark",
"tags": [
"Power",
@ -1527,14 +1701,14 @@
"list": [
{
"allFormat": "regex values",
"allValue": null,
"current": {},
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"includeAll": true,
"label": "Server",
"multi": true,
"multiFormat": "regex values",
@ -1545,20 +1719,18 @@
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"tagValuesQuery": null,
"tagsQuery": null,
"sort": 5,
"type": "query",
"useTags": false
},
{
"allFormat": "regex values",
"allValue": null,
"current": {},
"datasource": "${DS_INFLUXDB-HMCI}",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"vios_details\" WITH KEY = \"viosname\" WHERE servername =~ /$ServerName/ AND time > now() - 24h",
"description": null,
"error": null,
"hide": 0,
"includeAll": true,
"label": "Virtual I/O Server",
@ -1571,16 +1743,14 @@
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"tagValuesQuery": null,
"tagsQuery": null,
"sort": 5,
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-6h",
"from": "now-7d",
"now": false,
"to": "now-30s"
},
@ -1609,7 +1779,8 @@
]
},
"timezone": "browser",
"title": "IBM Power - HMCi - Virtual I/O Servers",
"title": "HMCi - Power VIO Overview",
"uid": "DDNEv5vGz",
"version": 21
"version": 3,
"weekStart": ""
}

View File

@ -0,0 +1,942 @@
{
"__inputs": [
{
"name": "DS_HMCI",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.6"
},
{
"type": "datasource",
"id": "influxdb",
"name": "InfluxDB",
"version": "1.0.0"
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"enable": false,
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/hmci/ - Metrics from IBM Power Systems",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": 1465,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 29,
"options": {
"content": "## Metrics collected from IBM Power HMC\n \nFor more information visit: [git.data.coop/nellemann/hmci](https://git.data.coop/nellemann/hmci)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"links": [],
"mappings": [],
"max": 1,
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 6,
"x": 0,
"y": 3
},
"id": 30,
"links": [],
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": false
},
"pluginVersion": "9.1.6",
"targets": [
{
"alias": "$tag_servername - $tag_viosname",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"viosname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
],
"type": "fill"
}
],
"hide": false,
"measurement": "vios_processor",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"entitledProcUnits\") AS \"utilization\" FROM \"vios_processor\" WHERE (\"servername\" =~ /^$ServerName$/ AND \"viosname\" =~ /^$ViosName$/) AND $timeFilter GROUP BY time($interval), \"viosname\", \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"utilized"
],
"type": "alias"
}
],
[
{
"params": [
"maxProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"max"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
},
{
"condition": "AND",
"key": "viosname",
"operator": "=~",
"value": "/^$ViosName$/"
}
]
}
],
"title": "Processor Utilization",
"type": "gauge"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "line+area"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "transparent",
"value": null
},
{
"color": "#EAB839",
"value": 0.8
},
{
"color": "red",
"value": 0.9
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 18,
"x": 6,
"y": 3
},
"id": 19,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_servername - $tag_viosname",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"viosname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"linear"
],
"type": "fill"
}
],
"hide": false,
"measurement": "vios_processor",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(\"utilizedProcUnits\") / mean(\"entitledProcUnits\") AS \"utilization\" FROM \"vios_processor\" WHERE (\"servername\" =~ /^$ServerName$/ AND \"viosname\" =~ /^$ViosName$/) AND $timeFilter GROUP BY time($interval), \"viosname\", \"servername\" fill(none)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"utilizedProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"utilized"
],
"type": "alias"
}
],
[
{
"params": [
"maxProcUnits"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
"max"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
},
{
"condition": "AND",
"key": "viosname",
"operator": "=~",
"value": "/^$ViosName$/"
}
]
}
],
"title": "Processor Utilization",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "line"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "transparent",
"value": null
},
{
"color": "dark-green",
"value": 16000000000
},
{
"color": "dark-blue",
"value": 32000000000
}
]
},
"unit": "Bps"
},
"overrides": []
},
"gridPos": {
"h": 20,
"w": 12,
"x": 0,
"y": 12
},
"id": 18,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_servername - $tag_viosname ($tag_location - $col)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"viosname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"location"
],
"type": "tag"
},
{
"params": [
"linear"
],
"type": "fill"
}
],
"hide": false,
"measurement": "vios_storage_FC",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"readBytes"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"read"
],
"type": "alias"
}
],
[
{
"params": [
"writeBytes"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"*-1"
],
"type": "math"
},
{
"params": [
"write"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
},
{
"condition": "AND",
"key": "viosname",
"operator": "=~",
"value": "/^$ViosName$/"
}
]
}
],
"title": "Fiber Channel Adapters",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": true,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "opacity",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
},
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "line"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "transparent",
"value": null
}
]
},
"unit": "Bps"
},
"overrides": []
},
"gridPos": {
"h": 20,
"w": 12,
"x": 12,
"y": 12
},
"id": 17,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "8.1.4",
"targets": [
{
"alias": "$tag_servername - $tag_viosname ($tag_location - $col)",
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"viosname"
],
"type": "tag"
},
{
"params": [
"servername"
],
"type": "tag"
},
{
"params": [
"location"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"hide": false,
"measurement": "vios_network_shared",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT moving_average(mean(\"transferredBytes\"), 10) FROM \"vios_network_shared\" WHERE (\"servername\" =~ /^$ServerName$/ AND \"viosname\" =~ /^$ViosName$/) AND $timeFilter GROUP BY time($interval), \"viosname\", \"servername\", \"location\" fill(none)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"receivedBytes"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"recv"
],
"type": "alias"
}
],
[
{
"params": [
"sentBytes"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
5
],
"type": "moving_average"
},
{
"params": [
"*-1"
],
"type": "math"
},
{
"params": [
"sent"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "servername",
"operator": "=~",
"value": "/^$ServerName$/"
},
{
"condition": "AND",
"key": "viosname",
"operator": "=~",
"value": "/^$ViosName$/"
}
]
}
],
"title": "Network SEA Traffic",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 37,
"style": "dark",
"tags": [
"Power",
"AIX",
"VIOS"
],
"templating": {
"list": [
{
"allFormat": "regex values",
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"hide": 0,
"includeAll": true,
"label": "Server",
"multi": true,
"multiFormat": "regex values",
"name": "ServerName",
"options": [],
"query": "SHOW TAG VALUES FROM \"server_processor\" WITH KEY = \"servername\" WHERE time > now() - 24h",
"refresh": 1,
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"type": "query",
"useTags": false
},
{
"allFormat": "regex values",
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_HMCI}"
},
"definition": "SHOW TAG VALUES FROM \"vios_details\" WITH KEY = \"viosname\" WHERE servername =~ /$ServerName/ AND time > now() - 24h",
"hide": 0,
"includeAll": true,
"label": "Virtual I/O Server",
"multi": true,
"multiFormat": "regex values",
"name": "ViosName",
"options": [],
"query": "SHOW TAG VALUES FROM \"vios_details\" WITH KEY = \"viosname\" WHERE servername =~ /$ServerName/ AND time > now() - 24h",
"refresh": 1,
"refresh_on_load": false,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-7d",
"now": false,
"to": "now-30s"
},
"timepicker": {
"nowDelay": "30s",
"refresh_intervals": [
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "browser",
"title": "HMCi - Power VIO Utilization",
"uid": "DDNEv5vGy",
"version": 2,
"weekStart": ""
}

View File

@ -1,26 +1,25 @@
# HMCi Configuration
# Copy this file into /etc/hmci.toml and customize it to your environment.
###
### General HMCi Settings
###
# How often to query HMC's for data - in seconds
hmci.update = 30
# Rescan HMC's for new systems and partitions - every x update
hmci.rescan = 120
###
### Define one InfluxDB to save metrics into
### There must be only one and it should be named [influx]
###
# InfluxDB v1.x example
#[influx]
#url = "http://localhost:8086"
#username = "root"
#password = ""
#database = "hmci"
# InfluxDB v2.x example
[influx]
url = "http://localhost:8086"
username = "root"
password = ""
database = "hmci"
org = "myOrg"
token = "rAnd0mT0k3nG3neRaT3dByInF1uxDb=="
bucket = "hmci"
###
@ -31,19 +30,21 @@ database = "hmci"
# HMC to query for data and metrics
[hmc.site1]
url = "https://10.10.10.10:12443"
url = "https://10.10.10.5:12443"
username = "hmci"
password = "hmcihmci"
unsafe = true # Ignore SSL cert. errors
refresh = 30 # How often to query HMC for data - in seconds
discover = 120 # Rescan HMC for new systems and partitions - in minutes
trust = true # Ignore SSL cert. errors (due to default self-signed cert. on HMC)
energy = true # Collect energy metrics on supported systems
# Another HMC example
#[hmc.Prod-HMC]
#url = "https://10.10.10.30:12443"
#[hmc.site2]
#url = "https://10.10.20.5:12443"
#username = "user"
#password = "password"
#unsafe = false # When false, validate SSL/TLS cerfificate, default is true
#energy = false # When false, do not collect energy metrics, default is true
#trace = "/tmp/hmci-trace" # When present, store JSON metrics files from HMC into this folder
#trace = "/tmp/hmci-trace" # When present, store JSON metrics files from HMC into this folder
#excludeSystems = [ 'notThisSystem' ] # Collect metrics from all systems except those listed here
#includeSystems = [ 'onlyThisSystems' ] # Collcet metrics from no systems but those listed here
#excludePartitions = [ 'skipThisPartition' ] # Collect metrics from all partitions except those listed here

22
doc/readme-aix.md Normal file
View File

@ -0,0 +1,22 @@
# Instructions for AIX Systems
Ensure you have **correct date/time** and NTPd running to keep it accurate!
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
- Grafana and InfluxDB can be downloaded from the [Power DevOps](https://www.power-devops.com/) website - look under the *Monitor* section.
- Ensure Java (version 8 or later) is installed and available in your PATH (eg. in the */etc/environment* file).
## Download and Install HMCi
[Download](https://git.data.coop/nellemann/-/packages/generic/hmci/) the latest version of HMCi package for rpm.
```shell
rpm -ivh --ignoreos hmci-1.4.2-1_all.rpm
cp /opt/hmci/doc/hmci.toml /etc/
```
Now modify */etc/hmci.toml* and test your setup by running ```/opt/hmci/bin/hmci -d```

62
doc/readme-debian.md Normal file
View File

@ -0,0 +1,62 @@
# Instruction for Debian / Ubuntu Systems
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
Ensure you have **correct date/time** and NTPd running to keep it accurate!
All commands should be run as root or through sudo.
## Install the Java Runtime from repository
```shell
apt-get install default-jre-headless wget
```
## Download and Install InfluxDB
```shell
wget https://dl.influxdata.com/influxdb/releases/influxdb_1.8.10_amd64.deb
dpkg -i influxdb_1.8.10_amd64.deb
systemctl daemon-reload
systemctl enable influxdb
systemctl start influxdb
```
Run the ```influx``` cli command and create the *hmci* database.
```sql
CREATE DATABASE "hmci" WITH DURATION 365d REPLICATION 1;
```
## Download and Install Grafana
```shell
apt-get install -y adduser libfontconfig1
wget https://dl.grafana.com/oss/release/grafana_9.1.7_amd64.deb
dpkg -i grafana_9.1.7_amd64.deb
systemctl daemon-reload
systemctl enable grafana-server
systemctl start grafana-server
```
## Download and Install HMCi
[Download](https://git.data.coop/nellemann/-/packages/generic/hmci/) the latest version of HMCi packaged for deb.
```shell
wget https://git.data.coop/api/packages/nellemann/generic/hmci/v1.4.2/hmci_1.4.2-1_all.deb
dpkg -i hmci_1.4.2-1_all.deb
cp /opt/hmci/doc/hmci.toml /etc/
cp /opt/hmci/doc/hmci.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable hmci
```
## Configure HMCi
Now modify **/etc/hmci.toml** (edit URL and credentials to your HMCs) and test the setup by running ```/opt/hmci/bin/hmci -d``` in the foreground/terminal and look for any errors.
Press CTRL+C to stop and then start as a background service with ```systemctl start hmci```.
You can see the log/output by running ```journalctl -f -u hmci```.

14
doc/readme-firewall.md Normal file
View File

@ -0,0 +1,14 @@
# Firewall Notes
## RedHat, CentOS, Rocky & Alma Linux
And any other Linux distribution using *firewalld*.
All commands should be run as root or through sudo.
### Allow remote access to Grafana on port 3000
```shell
firewall-cmd --zone=public --add-port=3000/tcp --permanent
firewall-cmd --reload
```

40
doc/readme-grafana.md Normal file
View File

@ -0,0 +1,40 @@
# Grafana Setup
When installed Grafana listens on [http://localhost:3000](http://localhost:3000) and you can login as user *admin* with password *admin*. Once logged in you are asked to change the default password.
## Datasource
- Configure Grafana to use InfluxDB as a new datasource
- Name the datasource **hmci** to make it obvious what it contains.
- You would typically use *http://localhost:8086* without any credentials.
- For InfluxDB 2.x add a custom header: Authorization = Token myTokenFromInfluxDB
- The name of the database would be *hmci* (or another name you used when creating it)
- **NOTE:** set *Min time interval* to *30s* or *1m* depending on your HMCi *refresh* setting.
## Dashboards
Import all or some of the example dashboards from [dashboards/*.json](dashboards/) into Grafana as a starting point and get creative making your own cool dashboards - please share anything useful :)
- When importing a dashboard, select the **hmci** datasource you have created.
## Security and Proxy
The easiest way to secure Grafana with https is to put it behind a proxy server such as nginx.
If you want to serve /grafana as shown below, you also need to edit */etc/grafana/grafana.ini* and change the *root_url*:
```
root_url = %(protocol)s://%(domain)s:%(http_port)s/grafana/
```
Nginx snippet:
```nginx
location /grafana/ {
proxy_pass http://localhost:3000/;
proxy_set_header Host $host;
}
```

39
doc/readme-hmc.md Normal file
View File

@ -0,0 +1,39 @@
# IBM Power HMC Preparations
Ensure you have **correct date/time** and NTPd running to keep it accurate!
- Login to your HMC
- Navigate to *Console Settings*
- Go to *Change Date and Time*
- Set correct timezone, if not done already
- Configure one or more NTP servers, if not done already
- Enable the NTP client, if not done already
- Navigate to *Users and Security*
- Create a new read-only/viewer **hmci** user, which will be used to connect to the HMC.
- Click *Manage User Profiles and Access*, edit the newly created *hmci* user and click *User Properties*:
- Set *Session timeout minutes* to **120** (or at least 61 minutes)
- Set *Verify timeout minutes* to **15**
- Set *Idle timeout minutes* to **15**
- Set *Minimum time in days between password changes* to **0**
- **Enable** *Allow remote access via the web*
- Navigate to *HMC Management* and *Console Settings*
- Click *Change Performance Monitoring Settings*:
- Enable *Performance Monitoring Data Collection for Managed Servers*: **All On**
- Set *Performance Data Storage* to **1** day or preferable more
If you do not enable *Performance Monitoring Data Collection for Managed Servers*, you will see errors such as *Unexpected response: 403*.
Use the HMCi debug option (*--debug*) to get more details about what is going on.
## Configure date/time through CLI
Example showing how you configure related settings through the HMC CLI:
```shell
chhmc -c date -s modify --datetime MMDDhhmm # Set current date/time: MMDDhhmm[[CC]YY][.ss]
chhmc -c date -s modify --timezone Europe/Copenhagen # Configure your timezone
chhmc -c xntp -s enable # Enable the NTP service
chhmc -c xntp -s add -a IP_Addr # Add a remote NTP server
```
Remember to reboot your HMC after changing the timezone.

10
doc/readme-influxdb.md Normal file
View File

@ -0,0 +1,10 @@
# InfluxDB Notes
## Delete data
To delete *all* data before a specific date, run:
```sql
DELETE WHERE time < '2023-01-01'
```

68
doc/readme-redhat.md Normal file
View File

@ -0,0 +1,68 @@
# Instruction for RedHat / CentOS / AlmaLinux Systems
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
Ensure you have **correct date/time** and NTPd running to keep it accurate!
All commands should be run as root or through sudo.
## Install the Java Runtime from repository
```shell
dnf install java-11-openjdk-headless wget
# or
yum install java-11-openjdk-headless wget
```
## Download and Install InfluxDB
```shell
wget https://dl.influxdata.com/influxdb/releases/influxdb-1.8.10.x86_64.rpm
rpm -ivh influxdb-1.8.10.x86_64.rpm
systemctl daemon-reload
systemctl enable influxdb
systemctl start influxdb
```
If you are running Linux on Power, you can find ppc64le InfluxDB packages on the [Power DevOps](https://www.power-devops.com/influxdb) site. Remember to pick the 1.8 or 1.9 version.
Run the ```influx``` cli command and create the *hmci* database.
```sql
CREATE DATABASE "hmci" WITH DURATION 365d REPLICATION 1;
```
## Download and Install Grafana
```shell
wget https://dl.grafana.com/oss/release/grafana-9.1.7-1.x86_64.rpm
rpm -ivh grafana-9.1.7-1.x86_64.rpm
systemctl daemon-reload
systemctl enable grafana-server
systemctl start grafana-server
```
If you are running Linux on Power, you can find ppc64le Grafana packages on the [Power DevOps](https://www.power-devops.com/grafana) site.
## Download and Install HMCi
[Download](https://git.data.coop/nellemann/-/packages/generic/hmci/) the latest version of HMCi packaged for rpm.
```shell
wget https://git.data.coop/api/packages/nellemann/generic/hmci/v1.4.4/hmci-1.4.2-1.noarch.rpm
rpm -ivh hmci-1.4.4-1_all.rpm
cp /opt/hmci/doc/hmci.toml /etc/
cp /opt/hmci/doc/hmci.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable hmci
```
## Configure HMCi
Now modify **/etc/hmci.toml** (edit URL and credentials to your HMCs) and test the setup by running ```/opt/hmci/bin/hmci -d``` in the foreground/terminal and look for any errors.
Press CTRL+C to stop and then start as a background service with ```systemctl start hmci```.
You can see the log/output by running ```journalctl -f -u hmci```.

View File

@ -1,20 +0,0 @@
# HMCi as a System Service
## Systemd
To install as a systemd service, copy the **hmci.service**
file into */etc/systemd/system/* and enable the service:
```shell
cp /opt/hmci/doc/hmci.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable hmci.service
systemctl restart hmci.service
```
To read log output from the service:
```shell
journalctl -f -u hmci.service
```

69
doc/readme-suse.md Normal file
View File

@ -0,0 +1,69 @@
# Instruction for SLES / OpenSUSE Systems
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
Ensure you have **correct date/time** and NTPd running to keep it accurate!
All commands should be run as root or through sudo.
## Install the Java Runtime from repository
```shell
zypper install java-11-openjdk-headless wget
```
## Download and Install InfluxDB
```shell
wget https://dl.influxdata.com/influxdb/releases/influxdb-1.8.10.x86_64.rpm
rpm -ivh influxdb-1.8.10.x86_64.rpm
systemctl daemon-reload
systemctl enable influxdb
systemctl start influxdb
```
If you are running Linux on Power, you can find ppc64le InfluxDB packages on the [Power DevOps](https://www.power-devops.com/influxdb) site. Remember to pick the 1.8 or 1.9 version.
Run the ```influx``` cli command and create the *hmci* database.
```sql
CREATE DATABASE "hmci" WITH DURATION 365d REPLICATION 1;
```
## Download and Install Grafana
```shell
wget https://dl.grafana.com/oss/release/grafana-9.1.7-1.x86_64.rpm
rpm -ivh --nodeps grafana-9.1.7-1.x86_64.rpm
systemctl daemon-reload
systemctl enable grafana-server
systemctl start grafana-server
```
If you are running Linux on Power, you can find ppc64le Grafana packages on the [Power DevOps](https://www.power-devops.com/grafana) site.
## Download and Install HMCi
[Download](https://git.data.coop/nellemann/-/packages/generic/hmci/) the latest version of HMCi packaged for rpm.
```shell
wget https://git.data.coop/api/packages/nellemann/generic/hmci/v1.4.2/hmci-1.4.2-1.noarch.rpm
rpm -ivh hmci-1.4.2-1_all.rpm
cp /opt/hmci/doc/hmci.toml /etc/
cp /opt/hmci/doc/hmci.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable hmci
```
## Configure HMCi
Now modify **/etc/hmci.toml** (edit URL and credentials to your HMCs) and test the setup by running ```/opt/hmci/bin/hmci -d``` in the foreground/terminal and look for any errors.
Press CTRL+C to stop and then start as a background service with ```systemctl start hmci```.
You can see the log/output by running ```journalctl -f -u hmci```.

Binary file not shown.

After

Width:  |  Height:  |  Size: 860 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

BIN
doc/screenshots/vio-io.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

View File

@ -0,0 +1,19 @@
#!/bin/bash
# For InfluxDB v. 1.x
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <lpar>"
exit 1
fi
DB="hmci"
LPAR=$1
for s in $(influx -database ${DB} -execute 'SHOW SERIES' -format column | grep $LPAR); do
n=$(echo $s | cut -f 1 -d,)
influx -database ${DB} -execute "DELETE FROM ${n} WHERE \"lparname\"=\"${LPAR}\" AND time > '1980-01-01';"
influx -database ${DB} -execute "DROP SERIES FROM ${n} WHERE \"lparname\"=\"${LPAR}\";"
done

View File

@ -1,4 +1,3 @@
projectId = hmci
projectGroup = biz.nellemann.hmci
projectVersion = 1.3.0
projectJavaVersion = 1.8
projectVersion = 1.4.5

Binary file not shown.

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

6
gradlew vendored
View File

@ -205,6 +205,12 @@ set -- \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.

14
gradlew.bat vendored
View File

@ -14,7 +14,7 @@
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@ -25,7 +25,7 @@
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
if "%DIRNAME%"=="" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -75,13 +75,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View File

@ -15,26 +15,29 @@
*/
package biz.nellemann.hmci;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import picocli.CommandLine.Command;
import org.slf4j.impl.SimpleLogger;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import com.fasterxml.jackson.dataformat.toml.TomlMapper;
import biz.nellemann.hmci.dto.toml.Configuration;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
@Command(name = "hmci",
mixinStandardHelpOptions = true,
versionProvider = biz.nellemann.hmci.VersionProvider.class)
versionProvider = biz.nellemann.hmci.VersionProvider.class,
defaultValueProvider = biz.nellemann.hmci.DefaultProvider.class)
public class Application implements Callable<Integer> {
@Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/hmci.toml'].", defaultValue = "/etc/hmci.toml", paramLabel = "<file>")
@Option(names = { "-c", "--conf" }, description = "Configuration file [default: ${DEFAULT-VALUE}].", paramLabel = "<file>")
private File configurationFile;
@Option(names = { "-d", "--debug" }, description = "Enable debugging [default: 'false'].")
@Option(names = { "-d", "--debug" }, description = "Enable debugging [default: false].")
private boolean[] enableDebug = new boolean[0];
@ -45,9 +48,8 @@ public class Application implements Callable<Integer> {
@Override
public Integer call() throws IOException {
public Integer call() {
Configuration configuration;
InfluxClient influxClient;
List<Thread> threadList = new ArrayList<>();
@ -58,30 +60,39 @@ public class Application implements Callable<Integer> {
switch (enableDebug.length) {
case 1:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG");
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel" , "DEBUG");
break;
case 2:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "TRACE");
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel ", "TRACE");
break;
}
try {
configuration = new Configuration(configurationFile.toPath());
influxClient = new InfluxClient(configuration.getInflux());
TomlMapper mapper = new TomlMapper();
Configuration configuration = mapper.readerFor(Configuration.class)
.readValue(configurationFile);
influxClient = new InfluxClient(configuration.influx);
influxClient.login();
for(Configuration.HmcObject configHmc : configuration.getHmc()) {
Thread t = new Thread(new HmcInstance(configHmc, influxClient));
t.setName(configHmc.name);
t.start();
threadList.add(t);
}
configuration.hmc.forEach((key, value) -> {
try {
ManagementConsole managementConsole = new ManagementConsole(value, influxClient);
Thread t = new Thread(managementConsole);
t.setName(key);
t.start();
threadList.add(t);
} catch (Exception e) {
System.err.println(e.getMessage());
}
});
for (Thread thread : threadList) {
thread.join();
}
} catch (InterruptedException | RuntimeException e) {
influxClient.logoff();
} catch (IOException | InterruptedException e) {
System.err.println(e.getMessage());
return 1;
}

View File

@ -1,268 +0,0 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import org.tomlj.Toml;
import org.tomlj.TomlParseResult;
import org.tomlj.TomlTable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public final class Configuration {
final private Long update;
final private Long rescan;
final private InfluxObject influx;
final private List<HmcObject> hmcList;
Configuration(Path configurationFile) throws IOException {
TomlParseResult result = Toml.parse(configurationFile);
result.errors().forEach(error -> System.err.println(error.toString()));
if(result.contains("hmci.update")) {
update = result.getLong("hmci.update");
} else {
update = 30L;
}
if(result.contains("hmci.rescan")) {
rescan = result.getLong("hmci.rescan");
} else {
rescan = 60L;
}
hmcList = parseConfigurationForHmc(result);
influx = parseConfigurationForInflux(result);
}
private List<HmcObject> parseConfigurationForHmc(TomlParseResult result) {
ArrayList<HmcObject> list = new ArrayList<>();
if(result.contains("hmc") && result.isTable("hmc")) {
TomlTable hmcTable = result.getTable("hmc");
if(hmcTable == null) {
return list;
}
for(String key : hmcTable.keySet()) {
HmcObject c = new HmcObject();
c.name = key;
c.update = update;
c.rescan = rescan;
if(hmcTable.contains(key+".url")) {
c.url = hmcTable.getString(key+".url");
}
if(hmcTable.contains(key+".username")) {
c.username = hmcTable.getString(key+".username");
}
if(hmcTable.contains(key+".password")) {
c.password = hmcTable.getString(key+".password");
}
if(hmcTable.contains(key+".unsafe")) {
c.unsafe = hmcTable.getBoolean(key+".unsafe");
} else {
c.unsafe = false;
}
if(hmcTable.contains(key+".energy")) {
c.energy = hmcTable.getBoolean(key+".energy");
} else {
c.energy = true;
}
if(hmcTable.contains(key+".trace")) {
c.trace = hmcTable.getString(key+".trace");
} else {
c.trace = null;
}
if(hmcTable.contains(key+".excludeSystems")) {
List<Object> tmpList = hmcTable.getArrayOrEmpty(key+".excludeSystems").toList();
c.excludeSystems = tmpList.stream()
.map(object -> Objects.toString(object, null))
.collect(Collectors.toList());
} else {
c.excludeSystems = new ArrayList<>();
}
if(hmcTable.contains(key+".includeSystems")) {
List<Object> tmpList = hmcTable.getArrayOrEmpty(key+".includeSystems").toList();
c.includeSystems = tmpList.stream()
.map(object -> Objects.toString(object, null))
.collect(Collectors.toList());
} else {
c.includeSystems = new ArrayList<>();
}
if(hmcTable.contains(key+".excludePartitions")) {
List<Object> tmpList = hmcTable.getArrayOrEmpty(key+".excludePartitions").toList();
c.excludePartitions = tmpList.stream()
.map(object -> Objects.toString(object, null))
.collect(Collectors.toList());
} else {
c.excludePartitions = new ArrayList<>();
}
if(hmcTable.contains(key+".includePartitions")) {
List<Object> tmpList = hmcTable.getArrayOrEmpty(key+".includePartitions").toList();
c.includePartitions = tmpList.stream()
.map(object -> Objects.toString(object, null))
.collect(Collectors.toList());
} else {
c.includePartitions = new ArrayList<>();
}
list.add(c);
}
}
return list;
}
private InfluxObject parseConfigurationForInflux(TomlParseResult result) {
InfluxObject c = new InfluxObject();
if(result.contains("influx")) {
TomlTable t = result.getTable("influx");
if(t != null && t.contains("url")) {
c.url = t.getString("url");
}
if(t != null && t.contains("username")) {
c.username = t.getString("username");
}
if(t != null && t.contains("password")) {
c.password = t.getString("password");
}
if(t != null && t.contains("database")) {
c.database = t.getString("database");
}
}
return c;
}
public List<HmcObject> getHmc() {
return hmcList;
}
public InfluxObject getInflux() {
return influx;
}
static class InfluxObject {
String url = "http://localhost:8086";
String username = "root";
String password = "";
String database = "hmci";
private boolean validated = false;
InfluxObject() { }
InfluxObject(String url, String username, String password, String database) {
this.url = url;
this.username = username;
this.password = password;
this.database = database;
}
Boolean isValid() {
return validated;
}
// TODO: Implement validation
void validate() {
validated = true;
}
@Override
public String toString() {
return url;
}
}
static class HmcObject {
String name;
String url;
String username;
String password;
Boolean unsafe = false;
Boolean energy = true;
String trace;
List<String> excludeSystems;
List<String> includeSystems;
List<String> excludePartitions;
List<String> includePartitions;
Long update = 30L;
Long rescan = 60L;
private boolean validated = false;
HmcObject() { }
HmcObject(String name, String url, String username, String password, Boolean unsafe, Long update, Long rescan) {
this.url = url;
this.username = username;
this.password = password;
this.unsafe = unsafe;
this.update = update;
this.rescan = rescan;
}
Boolean isValid() {
return validated;
}
// TODO: Implement validation
void validate() {
validated = true;
}
@Override
public String toString() {
return name;
}
}
}

View File

@ -0,0 +1,33 @@
package biz.nellemann.hmci;
import picocli.CommandLine;
public class DefaultProvider implements CommandLine.IDefaultValueProvider {
public String defaultValue(CommandLine.Model.ArgSpec argSpec) throws Exception {
if(argSpec.isOption()) {
switch (argSpec.paramLabel()) {
case "<file>":
return getDefaultConfigFileLocation();
default:
return null;
}
}
return null;
}
private boolean isWindowsOperatingSystem() {
String os = System.getProperty("os.name");
return os.toLowerCase().startsWith("windows");
}
private String getDefaultConfigFileLocation() {
String configFilePath;
if(isWindowsOperatingSystem()) {
configFilePath = System.getProperty("user.home") + "\\hmci.toml";
} else {
configFilePath = "/etc/hmci.toml";
}
return configFilePath;
}
}

View File

@ -1,337 +0,0 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.HmcObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.lang.Thread.sleep;
class HmcInstance implements Runnable {
private final static Logger log = LoggerFactory.getLogger(HmcInstance.class);
private final String hmcId;
private final Long updateValue;
private final Long rescanValue;
private final Map<String,ManagedSystem> systems = new HashMap<>();
private final Map<String, LogicalPartition> partitions = new HashMap<>();
private final HmcRestClient hmcRestClient;
private final InfluxClient influxClient;
private final AtomicBoolean keepRunning = new AtomicBoolean(true);
private File traceDir;
private Boolean doTrace = false;
private Boolean doEnergy = true;
private List<String> excludeSystems;
private List<String> includeSystems;
private List<String> excludePartitions;
private List<String> includePartitions;
HmcInstance(HmcObject configHmc, InfluxClient influxClient) {
this.hmcId = configHmc.name;
this.updateValue = configHmc.update;
this.rescanValue = configHmc.rescan;
this.doEnergy = configHmc.energy;
this.influxClient = influxClient;
hmcRestClient = new HmcRestClient(configHmc.url, configHmc.username, configHmc.password, configHmc.unsafe);
log.debug("HmcInstance() - id: {}, update: {}, refresh {}", hmcId, updateValue, rescanValue);
if(configHmc.trace != null) {
try {
traceDir = new File(configHmc.trace);
traceDir.mkdirs();
if(traceDir.canWrite()) {
doTrace = true;
} else {
log.warn("HmcInstance() - can't write to trace dir: " + traceDir.toString());
}
} catch (Exception e) {
log.error("HmcInstance() - trace error: " + e.getMessage());
}
}
this.excludeSystems = configHmc.excludeSystems;
this.includeSystems = configHmc.includeSystems;
this.excludePartitions = configHmc.excludePartitions;
this.includePartitions = configHmc.includePartitions;
}
@Override
public String toString() {
return hmcId;
}
@Override
public void run() {
log.trace("run() - " + hmcId);
int executions = 0;
discover();
do {
Instant instantStart = Instant.now();
try {
if (doEnergy) {
getMetricsForEnergy();
}
getMetricsForSystems();
getMetricsForPartitions();
writeMetricsForSystemEnergy();
writeMetricsForManagedSystems();
writeMetricsForLogicalPartitions();
//influxClient.writeBatchPoints();
// Refresh
if (++executions > rescanValue) {
executions = 0;
discover();
}
} catch (Exception e) {
log.error("run() - fatal error: {}", e.getMessage());
keepRunning.set(false);
throw new RuntimeException(e);
}
Instant instantEnd = Instant.now();
long timeSpend = Duration.between(instantStart, instantEnd).toMillis();
log.trace("run() - duration millis: " + timeSpend);
if(timeSpend < (updateValue * 1000)) {
try {
long sleepTime = (updateValue * 1000) - timeSpend;
log.trace("run() - sleeping millis: " + sleepTime);
if(sleepTime > 0) {
//noinspection BusyWait
sleep(sleepTime);
}
} catch (InterruptedException e) {
log.error("run() - sleep interrupted", e);
}
} else {
log.warn("run() - possible slow response from this HMC");
}
} while (keepRunning.get());
// Logout of HMC
try {
hmcRestClient.logoff();
} catch (IOException e) {
log.warn("run() - error logging out of HMC: " + e.getMessage());
}
}
void discover() {
log.info("discover() - Querying HMC for Managed Systems and Logical Partitions");
Map<String, LogicalPartition> tmpPartitions = new HashMap<>();
try {
hmcRestClient.login();
hmcRestClient.getManagedSystems().forEach((systemId, system) -> {
// Add to list of known systems
if(!systems.containsKey(systemId)) {
// Check excludeSystems and includeSystems
if(!excludeSystems.contains(system.name) && includeSystems.isEmpty()) {
systems.put(systemId, system);
log.info("discover() - ManagedSystem: {}", system);
if (doEnergy) {
hmcRestClient.enableEnergyMonitoring(system);
}
} else if(!includeSystems.isEmpty() && includeSystems.contains(system.name)) {
systems.put(systemId, system);
log.info("discover() - ManagedSystem (include): {}", system);
if (doEnergy) {
hmcRestClient.enableEnergyMonitoring(system);
}
} else {
log.debug("discover() - Skipping ManagedSystem: {}", system);
}
}
// Get partitions for this system
try {
tmpPartitions.putAll(hmcRestClient.getLogicalPartitionsForManagedSystem(system));
if(!tmpPartitions.isEmpty()) {
partitions.clear();
//partitions.putAll(tmpPartitions);
tmpPartitions.forEach((lparKey, lpar) -> {
if(!excludePartitions.contains(lpar.name) && includePartitions.isEmpty()) {
partitions.put(lparKey, lpar);
log.info("discover() - LogicalPartition: {}", lpar);
} else if(!includePartitions.isEmpty() && includePartitions.contains(lpar.name)) {
partitions.put(lparKey, lpar);
log.info("discover() - LogicalPartition (include): {}", lpar);
} else {
log.debug("discover() - Skipping LogicalPartition: {}", lpar);
}
});
}
} catch (Exception e) {
log.warn("discover() - getLogicalPartitions error: {}", e.getMessage());
}
});
} catch(Exception e) {
log.warn("discover() - getManagedSystems error: {}", e.getMessage());
}
}
void getMetricsForSystems() {
systems.forEach((systemId, system) -> {
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcRestClient.getPcmDataForManagedSystem(system);
} catch (Exception e) {
log.warn("getMetricsForSystems() - error: {}", e.getMessage());
}
if(tmpJsonString != null && !tmpJsonString.isEmpty()) {
system.processMetrics(tmpJsonString);
if(doTrace) {
writeTraceFile(systemId, tmpJsonString);
}
}
});
}
void getMetricsForPartitions() {
try {
// Get partitions for this system
partitions.forEach((partitionId, partition) -> {
// Get and process metrics for this partition
String tmpJsonString2 = null;
try {
tmpJsonString2 = hmcRestClient.getPcmDataForLogicalPartition(partition);
} catch (Exception e) {
log.warn("getMetricsForPartitions() - getPcmDataForLogicalPartition error: {}", e.getMessage());
}
if(tmpJsonString2 != null && !tmpJsonString2.isEmpty()) {
partition.processMetrics(tmpJsonString2);
if(doTrace) {
writeTraceFile(partitionId, tmpJsonString2);
}
}
});
} catch(Exception e) {
log.warn("getMetricsForPartitions() - error: {}", e.getMessage());
}
}
void getMetricsForEnergy() {
systems.forEach((systemId, system) -> {
// Get and process metrics for this system
String tmpJsonString = null;
try {
tmpJsonString = hmcRestClient.getPcmDataForEnergy(system.energy);
} catch (Exception e) {
log.warn("getMetricsForEnergy() - error: {}", e.getMessage());
}
if(tmpJsonString != null && !tmpJsonString.isEmpty()) {
system.energy.processMetrics(tmpJsonString);
}
});
}
void writeMetricsForManagedSystems() {
try {
systems.forEach((systemId, system) -> influxClient.writeManagedSystem(system));
} catch (NullPointerException npe) {
log.warn("writeMetricsForManagedSystems() - NPE: {}", npe.getMessage(), npe);
}
}
void writeMetricsForLogicalPartitions() {
try {
partitions.forEach((partitionId, partition) -> influxClient.writeLogicalPartition(partition));
} catch (NullPointerException npe) {
log.warn("writeMetricsForLogicalPartitions() - NPE: {}", npe.getMessage(), npe);
}
}
void writeMetricsForSystemEnergy() {
try {
systems.forEach((systemId, system) -> influxClient.writeSystemEnergy(system.energy));
} catch (NullPointerException npe) {
log.warn("writeMetricsForSystemEnergy() - NPE: {}", npe.getMessage(), npe);
}
}
private void writeTraceFile(String id, String json) {
String fileName = String.format("%s-%s.json", id, Instant.now().toString());
try {
log.debug("Writing trace file: " + fileName);
File traceFile = new File(traceDir, fileName);
BufferedWriter writer = new BufferedWriter(new FileWriter(traceFile));
writer.write(json);
writer.close();
} catch (IOException e) {
log.warn("writeTraceFile() - " + e.getMessage());
}
}
}

View File

@ -1,545 +0,0 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import okhttp3.*;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Entities;
import org.jsoup.parser.Parser;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.*;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
public class HmcRestClient {
private final static Logger log = LoggerFactory.getLogger(HmcRestClient.class);
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
protected Integer responseErrors = 0;
protected String authToken;
private final OkHttpClient client;
// OkHttpClient timeouts
private final static int CONNECT_TIMEOUT = 30;
private final static int WRITE_TIMEOUT = 30;
private final static int READ_TIMEOUT = 30;
private final String baseUrl;
private final String username;
private final String password;
HmcRestClient(String url, String username, String password, Boolean unsafe) {
this.baseUrl = url;
this.username = username;
this.password = password;
if(unsafe) {
this.client = getUnsafeOkHttpClient();
} else {
this.client = getSafeOkHttpClient();
}
}
@Override
public String toString() {
return baseUrl;
}
/**
* Logon to the HMC and get an authentication token for further requests.
*/
synchronized void login() throws Exception {
log.debug("Connecting to HMC - " + baseUrl);
logoff();
StringBuilder payload = new StringBuilder();
payload.append("<?xml version='1.0' encoding='UTF-8' standalone='yes'?>");
payload.append("<LogonRequest xmlns='http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/' schemaVersion='V1_0'>");
payload.append("<UserID>").append(username).append("</UserID>");
payload.append("<Password>").append(password).append("</Password>");
payload.append("</LogonRequest>");
try {
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
Request request = new Request.Builder()
.url(url)
.addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse")
.addHeader("X-Audit-Memento", "hmci")
.put(RequestBody.create(payload.toString(), MEDIA_TYPE_IBM_XML_LOGIN))
.build();
Response response = client.newCall(request).execute();
String responseBody = Objects.requireNonNull(response.body()).string();
if (!response.isSuccessful()) {
log.warn("login() - Unexpected response: {}", response.code());
throw new IOException("Unexpected code: " + response);
}
Document doc = Jsoup.parse(responseBody);
authToken = doc.select("X-API-Session").text();
log.debug("login() - Auth Token: " + authToken);
} catch (MalformedURLException e) {
log.error("login() - URL Error: {}", e.getMessage());
throw e;
} catch (Exception e) {
log.error("login() - Error: {}", e.getMessage());
throw e;
}
}
/**
* Logoff from the HMC and remove any session
*
*/
synchronized void logoff() throws IOException {
if(authToken == null) {
return;
}
URL absUrl = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
Request request = new Request.Builder()
.url(absUrl)
.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
.addHeader("X-API-Session", authToken)
.delete()
.build();
try {
client.newCall(request).execute();
} catch (IOException e) {
log.warn("logoff() error: {}", e.getMessage());
} finally {
authToken = null;
}
}
/**
* Return Map of ManagedSystems seen by this HMC
*
* @return Map of system-id and ManagedSystem
*/
Map<String, ManagedSystem> getManagedSystems() throws Exception {
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem", baseUrl));
String responseBody = sendGetRequest(url);
Map<String,ManagedSystem> managedSystemsMap = new HashMap<>();
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
return managedSystemsMap;
}
try {
Document doc = Jsoup.parse(responseBody);
Elements managedSystems = doc.select("ManagedSystem|ManagedSystem"); // doc.select("img[src$=.png]");
for(Element el : managedSystems) {
ManagedSystem system = new ManagedSystem(
el.select("Metadata > Atom > AtomID").text(),
el.select("SystemName").text(),
el.select("MachineTypeModelAndSerialNumber > MachineType").text(),
el.select("MachineTypeModelAndSerialNumber > Model").text(),
el.select("MachineTypeModelAndSerialNumber > SerialNumber").text()
);
managedSystemsMap.put(system.id, system);
log.debug("getManagedSystems() - Found system: {}", system);
}
} catch(Exception e) {
log.warn("getManagedSystems() - XML parse error", e);
}
return managedSystemsMap;
}
/**
* Return Map of LogicalPartitions seen by a ManagedSystem on this HMC
* @param system a valid ManagedSystem
* @return Map of partition-id and LogicalPartition
*/
Map<String, LogicalPartition> getLogicalPartitionsForManagedSystem(ManagedSystem system) throws Exception {
URL url = new URL(String.format("%s/rest/api/uom/ManagedSystem/%s/LogicalPartition", baseUrl, system.id));
String responseBody = sendGetRequest(url);
Map<String, LogicalPartition> partitionMap = new HashMap<>();
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
return partitionMap;
}
try {
Document doc = Jsoup.parse(responseBody);
Elements logicalPartitions = doc.select("LogicalPartition|LogicalPartition");
for(Element el : logicalPartitions) {
LogicalPartition logicalPartition = new LogicalPartition(
el.select("PartitionUUID").text(),
el.select("PartitionName").text(),
el.select("PartitionType").text(),
system
);
partitionMap.put(logicalPartition.id, logicalPartition);
log.debug("getLogicalPartitionsForManagedSystem() - Found partition: {}", logicalPartition);
}
} catch(Exception e) {
log.warn("getLogicalPartitionsForManagedSystem() - XML parse error: {}", system, e);
}
return partitionMap;
}
/**
* Parse XML feed to get PCM Data in JSON format
* @param system a valid ManagedSystem
* @return JSON string with PCM data for this ManagedSystem
*/
String getPcmDataForManagedSystem(ManagedSystem system) throws Exception {
log.trace("getPcmDataForManagedSystem() - {}", system.id);
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, system.id));
String responseBody = sendGetRequest(url);
String jsonBody = null;
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
log.warn("getPcmDataForManagedSystem() - empty response, skipping: {}", system.name);
return null;
}
try {
Document doc = Jsoup.parse(responseBody);
Element entry = doc.select("feed > entry").first();
Element link = Objects.requireNonNull(entry).select("link[href]").first();
if(Objects.requireNonNull(link).attr("type").equals("application/json")) {
String href = link.attr("href");
log.trace("getPcmDataForManagedSystem() - URL: {}", href);
jsonBody = sendGetRequest(new URL(href));
}
} catch(Exception e) {
log.warn("getPcmDataForManagedSystem() - XML parse error: {}", system, e);
}
return jsonBody;
}
/**
* Parse XML feed to get PCM Data in JSON format
* @param partition a valid LogicalPartition
* @return JSON string with PCM data for this LogicalPartition
*/
String getPcmDataForLogicalPartition(LogicalPartition partition) throws Exception {
log.trace("getPcmDataForLogicalPartition() - {} @ {}", partition.id, partition.system.id);
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", baseUrl, partition.system.id, partition.id));
String responseBody = sendGetRequest(url);
String jsonBody = null;
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
log.warn("getPcmDataForLogicalPartition() - empty response, skipping: {}", partition.name);
return null;
}
try {
Document doc = Jsoup.parse(responseBody);
Element entry = doc.select("feed > entry").first();
Element link = Objects.requireNonNull(entry).select("link[href]").first();
if(Objects.requireNonNull(link).attr("type").equals("application/json")) {
String href = link.attr("href");
log.trace("getPcmDataForLogicalPartition() - URL: {}", href);
jsonBody = sendGetRequest(new URL(href));
}
} catch(Exception e) {
log.warn("getPcmDataForLogicalPartition() - XML parse error: {}", partition.id, e);
}
return jsonBody;
}
/**
* Parse XML feed to get PCM Data in JSON format.
* Does not work for older HMC (pre v9) and older Power server (pre Power 8).
* @param systemEnergy a valid SystemEnergy
* @return JSON string with PCM data for this SystemEnergy
*/
String getPcmDataForEnergy(SystemEnergy systemEnergy) throws Exception {
log.trace("getPcmDataForEnergy() - " + systemEnergy.system.id);
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?Type=Energy&NoOfSamples=1", baseUrl, systemEnergy.system.id));
String responseBody = sendGetRequest(url);
String jsonBody = null;
//log.info(responseBody);
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
log.trace("getPcmDataForEnergy() - empty response, skipping: {}", systemEnergy);
return null;
}
try {
Document doc = Jsoup.parse(responseBody);
Element entry = doc.select("feed > entry").first();
Element link = Objects.requireNonNull(entry).select("link[href]").first();
if(Objects.requireNonNull(link).attr("type").equals("application/json")) {
String href = link.attr("href");
log.trace("getPcmDataForEnergy() - URL: {}", href);
jsonBody = sendGetRequest(new URL(href));
}
} catch(Exception e) {
log.warn("getPcmDataForEnergy() - XML parse error: {}", systemEnergy, e);
}
return jsonBody;
}
/**
* Set EnergyMonitorEnabled preference to true, if possible.
* @param system
*/
void enableEnergyMonitoring(ManagedSystem system) {
log.trace("enableEnergyMonitoring() - {}", system);
try {
URL url = new URL(String.format("%s/rest/api/pcm/ManagedSystem/%s/preferences", baseUrl, system.id));
String responseBody = sendGetRequest(url);
// Do not try to parse empty response
if(responseBody == null || responseBody.length() <= 1) {
responseErrors++;
log.warn("enableEnergyMonitoring() - empty response, skipping: {}", system);
return;
}
Document doc = Jsoup.parse(responseBody, "", Parser.xmlParser());
doc.outputSettings().escapeMode(Entities.EscapeMode.xhtml);
doc.outputSettings().prettyPrint(false);
doc.outputSettings().charset("US-ASCII");
Element entry = doc.select("feed > entry").first();
Element link1 = Objects.requireNonNull(entry).select("EnergyMonitoringCapable").first();
Element link2 = entry.select("EnergyMonitorEnabled").first();
if(Objects.requireNonNull(link1).text().equals("true")) {
log.debug("enableEnergyMonitoring() - EnergyMonitoringCapable == true");
if(Objects.requireNonNull(link2).text().equals("false")) {
//log.warn("enableEnergyMonitoring() - EnergyMonitorEnabled == false");
link2.text("true");
Document content = Jsoup.parse(Objects.requireNonNull(doc.select("Content").first()).html(), "", Parser.xmlParser());
content.outputSettings().escapeMode(Entities.EscapeMode.xhtml);
content.outputSettings().prettyPrint(false);
content.outputSettings().charset("UTF-8");
String updateXml = content.outerHtml();
sendPostRequest(url, updateXml);
}
} else {
log.warn("enableEnergyMonitoring() - EnergyMonitoringCapable == false");
}
} catch (Exception e) {
log.debug("enableEnergyMonitoring() - Error: {}", e.getMessage());
}
}
/**
* Return a Response from the HMC
* @param url to get Response from
* @return Response body string
*/
private String sendGetRequest(URL url) throws Exception {
log.trace("getResponse() - URL: {}", url.toString());
if(authToken == null) {
return null;
}
Request request = new Request.Builder()
.url(url)
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
.addHeader("X-API-Session", authToken)
.get().build();
Response response = client.newCall(request).execute();
String body = Objects.requireNonNull(response.body()).string();
if (!response.isSuccessful()) {
response.close();
if(response.code() == 401) {
log.warn("getResponse() - 401 - login and retry.");
authToken = null;
login();
return null;
}
log.error("getResponse() - Unexpected response: {}", response.code());
throw new IOException("getResponse() - Unexpected response: " + response.code());
}
return body;
}
/**
* Send a POST request with a payload (can be null) to the HMC
* @param url
* @param payload
* @return
* @throws Exception
*/
public String sendPostRequest(URL url, String payload) throws Exception {
log.trace("sendPostRequest() - URL: {}", url.toString());
if(authToken == null) {
return null;
}
RequestBody requestBody;
if(payload != null) {
//log.debug("sendPostRequest() - payload: " + payload);
requestBody = RequestBody.create(payload, MediaType.get("application/xml"));
} else {
requestBody = RequestBody.create("", null);
}
Request request = new Request.Builder()
.url(url)
//.addHeader("Content-Type", "application/xml")
.addHeader("content-type", "application/xml")
.addHeader("X-API-Session", authToken)
.post(requestBody).build();
Response response = client.newCall(request).execute();
String body = Objects.requireNonNull(response.body()).string();
if (!response.isSuccessful()) {
response.close();
log.warn(body);
log.error("sendPostRequest() - Unexpected response: {}", response.code());
throw new IOException("sendPostRequest() - Unexpected response: " + response.code());
}
return body;
}
/**
* Provide an unsafe (ignoring SSL problems) OkHttpClient
*
* @return OkHttpClient ignoring SSL/TLS errors
*/
private static OkHttpClient getUnsafeOkHttpClient() {
try {
// Create a trust manager that does not validate certificate chains
final TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) { }
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[]{};
}
}
};
// Install the all-trusting trust manager
final SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, trustAllCerts, new SecureRandom());
// Create a ssl socket factory with our all-trusting manager
final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
builder.hostnameVerifier((hostname, session) -> true);
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
/**
* Get OkHttpClient with our preferred timeout values.
* @return OkHttpClient
*/
private static OkHttpClient getSafeOkHttpClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
return builder.build();
}
}

View File

@ -15,73 +15,84 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.Configuration.InfluxObject;
import org.influxdb.BatchOptions;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBException;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import static java.lang.Thread.sleep;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.SocketException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import com.influxdb.client.WriteApi;
import com.influxdb.client.WriteOptions;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import biz.nellemann.hmci.dto.toml.InfluxConfiguration;
import static java.lang.Thread.sleep;
public final class InfluxClient {
private final static Logger log = LoggerFactory.getLogger(InfluxClient.class);
final private String url;
final private String username;
final private String password;
final private String database;
private InfluxDB influxDB;
final private String org; // v2 only
final private String token;
final private String bucket; // Bucket in v2, Database in v1
InfluxClient(InfluxObject config) {
private InfluxDBClient influxDBClient;
private WriteApi writeApi;
InfluxClient(InfluxConfiguration config) {
this.url = config.url;
this.username = config.username;
this.password = config.password;
this.database = config.database;
if(config.org != null) {
this.org = config.org;
} else {
this.org = "hmci"; // In InfluxDB 1.x, there is no concept of organization.
}
if(config.token != null) {
this.token = config.token;
} else {
this.token = config.username + ":" + config.password;
}
if(config.bucket != null) {
this.bucket = config.bucket;
} else {
this.bucket = config.database;
}
}
synchronized void login() throws RuntimeException, InterruptedException {
if(influxDB != null) {
if(influxDBClient != null) {
return;
}
boolean connected = false;
int loginErrors = 0;
do {
try {
log.debug("Connecting to InfluxDB - {}", url);
influxDB = InfluxDBFactory.connect(url, username, password).setDatabase(database);
influxDB.version(); // This ensures that we actually try to connect to the db
influxDBClient = InfluxDBClientFactory.create(url, token.toCharArray(), org, bucket);
influxDBClient.version(); // This ensures that we actually try to connect to the db
Runtime.getRuntime().addShutdownHook(new Thread(influxDBClient::close));
influxDB.enableBatch(
BatchOptions.DEFAULTS
.threadFactory(runnable -> {
Thread thread = new Thread(runnable);
thread.setDaemon(true);
return thread;
})
); // (4)
Runtime.getRuntime().addShutdownHook(new Thread(influxDB::close));
// Todo: Handle events - https://github.com/influxdata/influxdb-client-java/tree/master/client#handle-the-events
writeApi = influxDBClient.makeWriteApi(
WriteOptions.builder()
.batchSize(15_000)
.bufferLimit(500_000)
.flushInterval(5_000)
.build());
connected = true;
} catch(Exception e) {
sleep(15 * 1000);
if(loginErrors++ > 3) {
@ -97,337 +108,33 @@ public final class InfluxClient {
synchronized void logoff() {
if(influxDB != null) {
influxDB.close();
if(influxDBClient != null) {
influxDBClient.close();
}
influxDB = null;
influxDBClient = null;
}
/*
synchronized void writeBatchPoints() throws Exception {
log.trace("writeBatchPoints()");
try {
influxDB.write(batchPoints);
batchPoints = BatchPoints.database(database).precision(TimeUnit.SECONDS).build();
errorCounter = 0;
} catch (InfluxDBException.DatabaseNotFoundException e) {
log.error("writeBatchPoints() - database \"{}\" not found/created: can't write data", database);
if (++errorCounter > 3) {
throw new RuntimeException(e);
}
} catch (org.influxdb.InfluxDBIOException e) {
log.warn("writeBatchPoints() - io exception: {}", e.getMessage());
if(++errorCounter < 3) {
log.warn("writeBatchPoints() - reconnecting to InfluxDB due to io exception.");
logoff();
login();
writeBatchPoints();
} else {
throw new RuntimeException(e);
}
} catch(Exception e) {
log.warn("writeBatchPoints() - general exception: {}", e.getMessage());
if(++errorCounter < 3) {
log.warn("writeBatchPoints() - reconnecting to InfluxDB due to general exception.");
logoff();
login();
writeBatchPoints();
} else {
throw new RuntimeException(e);
}
public void write(List<Measurement> measurements, String name) {
log.debug("write() - measurement: {} {}", name, measurements.size());
if(!measurements.isEmpty()) {
processMeasurementMap(measurements, name).forEach((point) -> {
writeApi.writePoint(point);
});
}
}
*/
/*
Managed System
*/
void writeManagedSystem(ManagedSystem system) {
if(system.metrics == null) {
log.trace("writeManagedSystem() - null metrics, skipping: {}", system.name);
return;
}
Instant timestamp = system.getTimestamp();
if(timestamp == null) {
log.warn("writeManagedSystem() - no timestamp, skipping: {}", system.name);
return;
}
//getSystemDetails(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemDetails(system, timestamp).forEach( it -> influxDB.write(it));
//getSystemProcessor(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemProcessor(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemPhysicalProcessorPool(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemPhysicalProcessorPool(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemSharedProcessorPools(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemSharedProcessorPools(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemMemory(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemMemory(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemViosDetails(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosDetails(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosProcessor(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemViosProcessor(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemViosMemory(system, timestamp).forEach( it -> batchPoints.point(it) );
getSystemViosMemory(system, timestamp).forEach( it -> influxDB.write(it) );
//getSystemViosNetworkLpars(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosNetworkLpars(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosNetworkGenericAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosNetworkGenericAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosNetworkSharedAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosNetworkSharedAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosNetworkVirtualAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosNetworkVirtualAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosStorageLpars(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosStorageLpars(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosFiberChannelAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosFiberChannelAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosStoragePhysicalAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosStoragePhysicalAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
//getSystemViosStorageVirtualAdapters(system, timestamp).forEach(it -> batchPoints.point(it) );
getSystemViosStorageVirtualAdapters(system, timestamp).forEach(it -> influxDB.write(it) );
}
// TODO: server_details
private static List<Point> getSystemDetails(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getDetails();
return processMeasurementMap(metrics, timestamp, "server_details");
}
private static List<Point> getSystemProcessor(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getProcessorMetrics();
return processMeasurementMap(metrics, timestamp, "server_processor");
}
private static List<Point> getSystemPhysicalProcessorPool (ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getPhysicalProcessorPool();
return processMeasurementMap(metrics, timestamp, "server_physicalProcessorPool");
}
private static List<Point> getSystemSharedProcessorPools(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getSharedProcessorPools();
return processMeasurementMap(metrics, timestamp, "server_sharedProcessorPool");
}
private static List<Point> getSystemMemory(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getMemoryMetrics();
return processMeasurementMap(metrics, timestamp, "server_memory");
}
private static List<Point> getSystemViosDetails(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosDetails();
return processMeasurementMap(metrics, timestamp, "vios_details");
}
private static List<Point> getSystemViosProcessor(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosProcessorMetrics();
return processMeasurementMap(metrics, timestamp, "vios_processor");
}
private static List<Point> getSystemViosMemory(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosMemoryMetrics();
return processMeasurementMap(metrics, timestamp, "vios_memory");
}
private static List<Point> getSystemViosNetworkLpars(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosNetworkLpars();
return processMeasurementMap(metrics, timestamp, "vios_network_lpars");
}
private static List<Point> getSystemViosNetworkVirtualAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosNetworkVirtualAdapters();
return processMeasurementMap(metrics, timestamp, "vios_network_virtual");
}
private static List<Point> getSystemViosNetworkSharedAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosNetworkSharedAdapters();
return processMeasurementMap(metrics, timestamp, "vios_network_shared");
}
private static List<Point> getSystemViosNetworkGenericAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosNetworkGenericAdapters();
return processMeasurementMap(metrics, timestamp, "vios_network_generic");
}
private static List<Point> getSystemViosStorageLpars(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosStorageLpars();
return processMeasurementMap(metrics, timestamp, "vios_storage_lpars");
}
private static List<Point> getSystemViosFiberChannelAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosStorageFiberChannelAdapters();
return processMeasurementMap(metrics, timestamp, "vios_storage_FC");
}
private static List<Point> getSystemViosSharedStoragePools(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosStorageSharedStoragePools();
return processMeasurementMap(metrics, timestamp, "vios_storage_SSP");
}
private static List<Point> getSystemViosStoragePhysicalAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosStoragePhysicalAdapters();
return processMeasurementMap(metrics, timestamp, "vios_storage_physical");
}
private static List<Point> getSystemViosStorageVirtualAdapters(ManagedSystem system, Instant timestamp) {
List<Measurement> metrics = system.getViosStorageVirtualAdapters();
return processMeasurementMap(metrics, timestamp, "vios_storage_vFC");
}
/*
Logical Partitions
*/
void writeLogicalPartition(LogicalPartition partition) {
if(partition.metrics == null) {
log.warn("writeLogicalPartition() - null metrics, skipping: {}", partition.name);
return;
}
Instant timestamp = partition.getTimestamp();
if(timestamp == null) {
log.warn("writeLogicalPartition() - no timestamp, skipping: {}", partition.name);
return;
}
//getPartitionDetails(partition, timestamp).forEach( it -> batchPoints.point(it));
getPartitionDetails(partition, timestamp).forEach( it -> influxDB.write(it));
//getPartitionMemory(partition, timestamp).forEach( it -> batchPoints.point(it));
getPartitionMemory(partition, timestamp).forEach( it -> influxDB.write(it));
//getPartitionProcessor(partition, timestamp).forEach( it -> batchPoints.point(it));
getPartitionProcessor(partition, timestamp).forEach( it -> influxDB.write(it));
//getPartitionNetworkVirtual(partition, timestamp).forEach(it -> batchPoints.point(it));
getPartitionNetworkVirtual(partition, timestamp).forEach(it -> influxDB.write(it));
//getPartitionStorageVirtualGeneric(partition, timestamp).forEach(it -> batchPoints.point(it));
getPartitionStorageVirtualGeneric(partition, timestamp).forEach(it -> influxDB.write(it));
//getPartitionStorageVirtualFibreChannel(partition, timestamp).forEach(it -> batchPoints.point(it));
getPartitionStorageVirtualFibreChannel(partition, timestamp).forEach(it -> influxDB.write(it));
}
private static List<Point> getPartitionDetails(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getDetails();
return processMeasurementMap(metrics, timestamp, "lpar_details");
}
private static List<Point> getPartitionProcessor(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getProcessorMetrics();
return processMeasurementMap(metrics, timestamp, "lpar_processor");
}
private static List<Point> getPartitionMemory(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getMemoryMetrics();
return processMeasurementMap(metrics, timestamp, "lpar_memory");
}
private static List<Point> getPartitionNetworkVirtual(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getVirtualEthernetAdapterMetrics();
return processMeasurementMap(metrics, timestamp, "lpar_net_virtual"); // Not 'network'
}
// TODO: lpar_net_sriov
private static List<Point> getPartitionStorageVirtualGeneric(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getVirtualGenericAdapterMetrics();
return processMeasurementMap(metrics, timestamp, "lpar_storage_virtual");
}
private static List<Point> getPartitionStorageVirtualFibreChannel(LogicalPartition partition, Instant timestamp) {
List<Measurement> metrics = partition.getVirtualFibreChannelAdapterMetrics();
return processMeasurementMap(metrics, timestamp, "lpar_storage_vFC");
}
/*
System Energy
Not supported on older HMC (pre v8) or older Power server (pre Power 8)
*/
void writeSystemEnergy(SystemEnergy systemEnergy) {
if(systemEnergy.metrics == null) {
log.trace("writeSystemEnergy() - null metrics, skipping: {}", systemEnergy.system.name);
return;
}
Instant timestamp = systemEnergy.getTimestamp();
if(timestamp == null) {
log.warn("writeSystemEnergy() - no timestamp, skipping: {}", systemEnergy.system.name);
return;
}
//getSystemEnergyPower(systemEnergy, timestamp).forEach(it -> batchPoints.point(it) );
getSystemEnergyPower(systemEnergy, timestamp).forEach(it -> influxDB.write(it) );
//getSystemEnergyTemperature(systemEnergy, timestamp).forEach(it -> batchPoints.point(it) );
getSystemEnergyTemperature(systemEnergy, timestamp).forEach(it -> influxDB.write(it) );
}
private static List<Point> getSystemEnergyPower(SystemEnergy system, Instant timestamp) {
List<Measurement> metrics = system.getPowerMetrics();
return processMeasurementMap(metrics, timestamp, "server_energy_power");
}
private static List<Point> getSystemEnergyTemperature(SystemEnergy system, Instant timestamp) {
List<Measurement> metrics = system.getThermalMetrics();
return processMeasurementMap(metrics, timestamp, "server_energy_thermal");
}
/*
Shared
*/
private static List<Point> processMeasurementMap(List<Measurement> measurements, Instant timestamp, String measurement) {
private List<Point> processMeasurementMap(List<Measurement> measurements, String name) {
List<Point> listOfPoints = new ArrayList<>();
measurements.forEach( m -> {
Point.Builder builder = Point.measurement(measurement)
.time(timestamp.getEpochSecond(), TimeUnit.SECONDS);
// Iterate fields
m.fields.forEach((fieldName, fieldValue) -> {
log.trace("processMeasurementMap() {} - fieldName: {}, fieldValue: {}", measurement, fieldName, fieldValue);
if(fieldValue instanceof Number) {
Number num = (Number) fieldValue;
builder.addField(fieldName, num);
} else if(fieldValue instanceof Boolean) {
Boolean bol = (Boolean) fieldValue;
builder.addField(fieldName, bol);
} else {
String str = (String) fieldValue;
builder.addField(fieldName, str);
}
});
// Iterate sorted tags
Map<String, String> sortedTags = new TreeMap<String, String>(m.tags);
sortedTags.forEach((tagName, tagValue) -> {
log.trace("processMeasurementMap() {} - tagName: {}, tagValue: {}", measurement, tagName, tagValue);
builder.tag(tagName, tagValue);
});
listOfPoints.add(builder.build());
measurements.forEach( (m) -> {
log.trace("processMeasurementMap() - timestamp: {}, tags: {}, fields: {}", m.timestamp, m.tags, m.fields);
Point point = new Point(name)
.time(m.timestamp.getEpochSecond(), WritePrecision.S)
.addTags(m.tags)
.addFields(m.fields);
listOfPoints.add(point);
});
return listOfPoints;
}

View File

@ -15,130 +15,239 @@
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.dto.xml.Link;
import biz.nellemann.hmci.dto.xml.LogicalPartitionEntry;
import biz.nellemann.hmci.dto.xml.XmlEntry;
import biz.nellemann.hmci.dto.xml.XmlFeed;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;
class LogicalPartition extends MetaSystem {
class LogicalPartition extends Resource {
private final static Logger log = LoggerFactory.getLogger(LogicalPartition.class);
public final String id;
public final String name;
public final String type;
public final ManagedSystem system;
private final RestClient restClient;
private final InfluxClient influxClient;
private final ManagedSystem managedSystem;
LogicalPartition(String id, String name, String type, ManagedSystem system) {
this.id = id;
this.name = name;
this.type = type;
this.system = system;
protected String id;
protected String name;
protected LogicalPartitionEntry entry;
private String uriPath;
public LogicalPartition(RestClient restClient, InfluxClient influxClient, String href, ManagedSystem managedSystem) throws URISyntaxException {
log.debug("LogicalPartition() - {}", href);
this.restClient = restClient;
this.influxClient = influxClient;
this.managedSystem = managedSystem;
try {
URI uri = new URI(href);
uriPath = uri.getPath();
} catch (URISyntaxException e) {
log.error("LogicalPartition() - {}", e.getMessage());
}
}
@Override
public String toString() {
return String.format("[%s] %s (%s)", id, name, type);
return entry.getName();
}
public void discover() {
try {
String xml = restClient.getRequest(uriPath);
// Do not try to parse empty response
if(xml == null || xml.length() <= 1) {
log.warn("discover() - no data.");
return;
}
XmlMapper xmlMapper = new XmlMapper();
XmlEntry xmlEntry = xmlMapper.readValue(xml, XmlEntry.class);
if(xmlEntry.getContent() == null){
log.warn("discover() - no content.");
return;
}
this.id = xmlEntry.id;
if(xmlEntry.getContent().isLogicalPartition()) {
entry = xmlEntry.getContent().getLogicalPartitionEntry();
this.name = entry.getName();
log.info("discover() - [{}] {} ({})", String.format("%2d", entry.partitionId), entry.getName(), entry.operatingSystemType);
} else {
throw new UnsupportedOperationException("Failed to deserialize LogicalPartition");
}
} catch (Exception e) {
log.error("discover() - error: {}", e.getMessage());
}
}
public void refresh() {
log.debug("refresh() - {}", name);
try {
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=%d", managedSystem.id, id, noOfSamples));
// Do not try to parse empty response
if(xml == null || xml.length() <= 1) {
log.warn("refresh() - no data.");
return;
}
XmlMapper xmlMapper = new XmlMapper();
XmlFeed xmlFeed = xmlMapper.readValue(xml, XmlFeed.class);
xmlFeed.entries.forEach((entry) -> {
if(entry.category.term.equals("LogicalPartition")) {
Link link = entry.link;
if (link.getType() != null && Objects.equals(link.getType(), "application/json")) {
try {
URI jsonUri = URI.create(link.getHref());
String json = restClient.getRequest(jsonUri.getPath());
deserialize(json);
} catch (IOException e) {
log.error("refresh() - error 1: {}", e.getMessage());
}
}
}
});
} catch (IOException e) {
log.error("refresh() - error 2: {}", e.getMessage());
}
}
@Override
public void process(int sample) throws NullPointerException {
log.debug("process() - {} - sample: {}", name, sample);
influxClient.write(getDetails(sample),"lpar_details");
influxClient.write(getMemoryMetrics(sample),"lpar_memory");
influxClient.write(getProcessorMetrics(sample),"lpar_processor");
influxClient.write(getSriovLogicalPorts(sample),"lpar_net_sriov");
influxClient.write(getVirtualEthernetAdapterMetrics(sample),"lpar_net_virtual");
influxClient.write(getVirtualGenericAdapterMetrics(sample),"lpar_storage_virtual");
influxClient.write(getVirtualFibreChannelAdapterMetrics(sample),"lpar_storage_vFC");
}
// LPAR Details
List<Measurement> getDetails() {
List<Measurement> getDetails(int sample) throws NullPointerException {
log.debug("getDetails()");
List<Measurement> list = new ArrayList<>();
Map<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
log.trace("getDetails() - tags: {}", tagsMap);
TreeMap<String, Object> fieldsMap = new TreeMap<>();
Map<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("id", metrics.systemUtil.sample.lparsUtil.id);
fieldsMap.put("type", metrics.systemUtil.sample.lparsUtil.type);
fieldsMap.put("state", metrics.systemUtil.sample.lparsUtil.state);
fieldsMap.put("osType", metrics.systemUtil.sample.lparsUtil.osType);
fieldsMap.put("affinityScore", metrics.systemUtil.sample.lparsUtil.affinityScore);
log.trace("getDetails() - fields: {}", fieldsMap);
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
log.trace("getDetails() - tags: " + tagsMap);
fieldsMap.put("id", metric.getSample(sample).lparsUtil.id);
fieldsMap.put("type", metric.getSample(sample).lparsUtil.type);
fieldsMap.put("state", metric.getSample(sample).lparsUtil.state);
fieldsMap.put("osType", metric.getSample(sample).lparsUtil.osType);
fieldsMap.put("affinityScore", metric.getSample(sample).lparsUtil.affinityScore);
log.trace("getDetails() - fields: " + fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
list.add(new Measurement(tagsMap, fieldsMap));
return list;
}
// LPAR Memory
List<Measurement> getMemoryMetrics() {
List<Measurement> getMemoryMetrics(int sample) throws NullPointerException {
log.debug("getMemoryMetrics()");
List<Measurement> list = new ArrayList<>();
Map<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
log.trace("getMemoryMetrics() - tags: {}", tagsMap);
TreeMap<String, Object> fieldsMap = new TreeMap<>();
Map<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("logicalMem", metrics.systemUtil.sample.lparsUtil.memory.logicalMem);
fieldsMap.put("backedPhysicalMem", metrics.systemUtil.sample.lparsUtil.memory.backedPhysicalMem);
log.trace("getMemoryMetrics() - fields: {}", fieldsMap);
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
log.trace("getMemoryMetrics() - tags: " + tagsMap);
fieldsMap.put("logicalMem", metric.getSample(sample).lparsUtil.memory.logicalMem);
fieldsMap.put("backedPhysicalMem", metric.getSample(sample).lparsUtil.memory.backedPhysicalMem);
log.trace("getMemoryMetrics() - fields: " + fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
list.add(new Measurement(tagsMap, fieldsMap));
return list;
}
// LPAR Processor
List<Measurement> getProcessorMetrics() {
List<Measurement> getProcessorMetrics(int sample) throws NullPointerException {
log.debug("getProcessorMetrics()");
List<Measurement> list = new ArrayList<>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
log.trace("getProcessorMetrics() - tags: {}", tagsMap);
HashMap<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("utilizedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedProcUnits);
fieldsMap.put("entitledProcUnits", metrics.systemUtil.sample.lparsUtil.processor.entitledProcUnits);
fieldsMap.put("donatedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.donatedProcUnits);
fieldsMap.put("idleProcUnits", metrics.systemUtil.sample.lparsUtil.processor.idleProcUnits);
fieldsMap.put("maxProcUnits", metrics.systemUtil.sample.lparsUtil.processor.maxProcUnits);
fieldsMap.put("maxVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.maxVirtualProcessors);
fieldsMap.put("currentVirtualProcessors", metrics.systemUtil.sample.lparsUtil.processor.currentVirtualProcessors);
fieldsMap.put("utilizedCappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedCappedProcUnits);
fieldsMap.put("utilizedUncappedProcUnits", metrics.systemUtil.sample.lparsUtil.processor.utilizedUncappedProcUnits);
fieldsMap.put("timePerInstructionExecution", metrics.systemUtil.sample.lparsUtil.processor.timeSpentWaitingForDispatch);
fieldsMap.put("timeSpentWaitingForDispatch", metrics.systemUtil.sample.lparsUtil.processor.timePerInstructionExecution);
fieldsMap.put("mode", metrics.systemUtil.sample.lparsUtil.processor.mode);
fieldsMap.put("weight", metrics.systemUtil.sample.lparsUtil.processor.weight);
fieldsMap.put("poolId", metrics.systemUtil.sample.lparsUtil.processor.poolId);
log.trace("getProcessorMetrics() - fields: {}", fieldsMap);
list.add(new Measurement(tagsMap, fieldsMap));
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
log.trace("getProcessorMetrics() - tags: " + tagsMap);
fieldsMap.put("utilizedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedProcUnits);
fieldsMap.put("entitledProcUnits", metric.getSample(sample).lparsUtil.processor.entitledProcUnits);
fieldsMap.put("donatedProcUnits", metric.getSample(sample).lparsUtil.processor.donatedProcUnits);
fieldsMap.put("idleProcUnits", metric.getSample(sample).lparsUtil.processor.idleProcUnits);
fieldsMap.put("maxProcUnits", metric.getSample(sample).lparsUtil.processor.maxProcUnits);
fieldsMap.put("maxVirtualProcessors", metric.getSample(sample).lparsUtil.processor.maxVirtualProcessors);
fieldsMap.put("currentVirtualProcessors", metric.getSample(sample).lparsUtil.processor.currentVirtualProcessors);
fieldsMap.put("utilizedCappedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedCappedProcUnits);
fieldsMap.put("utilizedUncappedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedUncappedProcUnits);
fieldsMap.put("timePerInstructionExecution", metric.getSample(sample).lparsUtil.processor.timeSpentWaitingForDispatch);
fieldsMap.put("timeSpentWaitingForDispatch", metric.getSample(sample).lparsUtil.processor.timePerInstructionExecution);
fieldsMap.put("mode", metric.getSample(sample).lparsUtil.processor.mode);
fieldsMap.put("weight", metric.getSample(sample).lparsUtil.processor.weight);
fieldsMap.put("poolId", metric.getSample(sample).lparsUtil.processor.poolId);
log.trace("getProcessorMetrics() - fields: " + fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
return list;
}
// LPAR Network - Virtual
List<Measurement> getVirtualEthernetAdapterMetrics() {
List<Measurement> getVirtualEthernetAdapterMetrics(int sample) throws NullPointerException {
log.debug("getVirtualEthernetAdapterMetrics()");
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.lparsUtil.network.virtualEthernetAdapters.forEach( adapter -> {
metric.getSample(sample).lparsUtil.network.virtualEthernetAdapters.forEach(adapter -> {
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
tagsMap.put("location", adapter.physicalLocation);
tagsMap.put("viosId", adapter.viosId.toString());
tagsMap.put("vlanId", adapter.vlanId.toString());
tagsMap.put("vswitchId", adapter.vswitchId.toString());
log.trace("getVirtualEthernetAdapterMetrics() - tags: {}", tagsMap);
log.trace("getVirtualEthernetAdapterMetrics() - tags: " + tagsMap);
HashMap<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("droppedPackets", adapter.droppedPackets);
fieldsMap.put("droppedPhysicalPackets", adapter.droppedPhysicalPackets);
fieldsMap.put("isPortVlanId", adapter.isPortVlanId);
@ -153,9 +262,9 @@ class LogicalPartition extends MetaSystem {
fieldsMap.put("transferredBytes", adapter.transferredBytes);
fieldsMap.put("transferredPhysicalBytes", adapter.transferredPhysicalBytes);
fieldsMap.put("sharedEthernetAdapterId", adapter.sharedEthernetAdapterId);
log.trace("getVirtualEthernetAdapterMetrics() - fields: {}", fieldsMap);
log.trace("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap);
list.add(new Measurement(tagsMap, fieldsMap));
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
});
return list;
@ -163,59 +272,93 @@ class LogicalPartition extends MetaSystem {
// LPAR Storage - Virtual Generic
List<Measurement> getVirtualGenericAdapterMetrics() {
List<Measurement> getVirtualGenericAdapterMetrics(int sample) throws NullPointerException {
log.debug("getVirtualGenericAdapterMetrics()");
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.lparsUtil.storage.genericVirtualAdapters.forEach( adapter -> {
metric.getSample(sample).lparsUtil.storage.genericVirtualAdapters.forEach(adapter -> {
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
tagsMap.put("viosId", adapter.viosId.toString());
tagsMap.put("location", adapter.physicalLocation);
tagsMap.put("id", adapter.id);
log.trace("getVirtualGenericAdapterMetrics() - tags: {}", tagsMap);
log.trace("getVirtualGenericAdapterMetrics() - tags: " + tagsMap);
HashMap<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("numOfReads", adapter.numOfReads);
fieldsMap.put("numOfWrites", adapter.numOfWrites);
fieldsMap.put("writeBytes", adapter.writeBytes);
fieldsMap.put("readBytes", adapter.readBytes);
fieldsMap.put("type", adapter.type);
log.trace("getVirtualGenericAdapterMetrics() - fields: {}", fieldsMap);
log.trace("getVirtualGenericAdapterMetrics() - fields: " + fieldsMap);
list.add(new Measurement(tagsMap, fieldsMap));
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
});
return list;
}
// LPAR Storage - Virtual FC
List<Measurement> getVirtualFibreChannelAdapterMetrics() {
// LPAR Storage - Virtual FC
List<Measurement> getVirtualFibreChannelAdapterMetrics(int sample) throws NullPointerException {
log.debug("getVirtualFibreChannelAdapterMetrics()");
List<Measurement> list = new ArrayList<>();
metrics.systemUtil.sample.lparsUtil.storage.virtualFiberChannelAdapters.forEach( adapter -> {
metric.getSample(sample).lparsUtil.storage.virtualFiberChannelAdapters.forEach(adapter -> {
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
tagsMap.put("lparname", name);
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
tagsMap.put("viosId", adapter.viosId.toString());
tagsMap.put("location", adapter.physicalLocation);
log.trace("getVirtualFibreChannelAdapterMetrics() - tags: {}", tagsMap);
log.trace("getVirtualFibreChannelAdapterMetrics() - tags: " + tagsMap);
HashMap<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("numOfReads", adapter.numOfReads);
fieldsMap.put("numOfWrites", adapter.numOfWrites);
fieldsMap.put("writeBytes", adapter.writeBytes);
fieldsMap.put("readBytes", adapter.readBytes);
fieldsMap.put("runningSpeed", adapter.runningSpeed);
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
fieldsMap.put("transferredByte", adapter.transmittedBytes); // TODO: Must be error in dashboard, remove when checked.
//fieldsMap.put("wwpn", adapter.wwpn);
//fieldsMap.put("wwpn2", adapter.wwpn2);
log.trace("getVirtualFibreChannelAdapterMetrics() - fields: {}", fieldsMap);
log.trace("getVirtualFibreChannelAdapterMetrics() - fields: " + fieldsMap);
list.add(new Measurement(tagsMap, fieldsMap));
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
});
return list;
}
// LPAR Network - SR-IOV Logical Ports
List<Measurement> getSriovLogicalPorts(int sample) throws NullPointerException {
log.debug("getSriovLogicalPorts()");
List<Measurement> list = new ArrayList<>();
metric.getSample(sample).lparsUtil.network.sriovLogicalPorts.forEach(port -> {
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("servername", managedSystem.entry.getName());
tagsMap.put("lparname", entry.getName());
tagsMap.put("location", port.physicalLocation);
log.trace("getSriovLogicalPorts() - tags: " + tagsMap);
fieldsMap.put("sentBytes", port.sentBytes);
fieldsMap.put("receivedBytes", port.receivedBytes);
fieldsMap.put("transferredBytes", port.transferredBytes);
fieldsMap.put("sentPackets", port.sentPackets);
fieldsMap.put("receivedPackets", port.receivedPackets);
fieldsMap.put("droppedPackets", port.droppedPackets);
fieldsMap.put("errorIn", port.errorIn);
fieldsMap.put("errorOut", port.errorOut);
log.trace("getSriovLogicalPorts() - fields: " + fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
});
return list;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,201 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import java.io.IOException;
import static java.lang.Thread.sleep;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import biz.nellemann.hmci.dto.toml.HmcConfiguration;
import biz.nellemann.hmci.dto.xml.Link;
import biz.nellemann.hmci.dto.xml.ManagementConsoleEntry;
import biz.nellemann.hmci.dto.xml.XmlFeed;
class ManagementConsole implements Runnable {
private final static Logger log = LoggerFactory.getLogger(ManagementConsole.class);
private final Integer refreshValue;
private final Integer discoverValue;
private final List<ManagedSystem> managedSystems = new ArrayList<>();
private final RestClient restClient;
private final InfluxClient influxClient;
private final AtomicBoolean keepRunning = new AtomicBoolean(true);
protected Integer responseErrors = 0;
private Boolean doEnergy = true;
private final List<String> excludeSystems;
private final List<String> includeSystems;
private final List<String> excludePartitions;
private final List<String> includePartitions;
ManagementConsole(HmcConfiguration configuration, InfluxClient influxClient) {
this.refreshValue = configuration.refresh;
this.discoverValue = configuration.discover;
this.doEnergy = configuration.energy;
this.influxClient = influxClient;
restClient = new RestClient(configuration.url, configuration.username, configuration.password, configuration.trust);
this.excludeSystems = configuration.excludeSystems;
this.includeSystems = configuration.includeSystems;
this.excludePartitions = configuration.excludePartitions;
this.includePartitions = configuration.includePartitions;
}
@Override
public void run() {
log.trace("run()");
Instant lastDiscover = Instant.now();
restClient.login();
discover();
do {
Instant instantStart = Instant.now();
try {
refresh();
if(instantStart.isAfter(lastDiscover.plus(discoverValue, ChronoUnit.MINUTES))) {
lastDiscover = instantStart;
discover();
}
} catch (Exception e) {
log.error("run() - fatal error: {}", e.getMessage());
keepRunning.set(false);
throw new RuntimeException(e);
}
Instant instantEnd = Instant.now();
long timeSpend = Duration.between(instantStart, instantEnd).toMillis();
log.trace("run() - duration millis: " + timeSpend);
if(timeSpend < (refreshValue * 1000)) {
try {
long sleepTime = (refreshValue * 1000) - timeSpend;
log.trace("run() - sleeping millis: " + sleepTime);
if(sleepTime > 0) {
//noinspection BusyWait
sleep(sleepTime);
}
} catch (InterruptedException e) {
log.error("run() - sleep interrupted", e);
}
} else {
log.warn("run() - possible slow response from this HMC");
}
} while (keepRunning.get());
// Logout of HMC
restClient.logoff();
}
public void discover() {
try {
String xml = restClient.getRequest("/rest/api/uom/ManagementConsole");
// Do not try to parse empty response
if(xml == null || xml.length() <= 1) {
responseErrors++;
log.warn("discover() - no data.");
return;
}
XmlMapper xmlMapper = new XmlMapper();
XmlFeed xmlFeed = xmlMapper.readValue(xml, XmlFeed.class);
ManagementConsoleEntry entry;
if(xmlFeed.getEntry() == null){
log.warn("discover() - xmlFeed.entry == null");
return;
}
if(xmlFeed.getEntry().getContent().isManagementConsole()) {
entry = xmlFeed.getEntry().getContent().getManagementConsole();
//log.info("discover() - {}", entry.getName());
} else {
throw new UnsupportedOperationException("Failed to deserialize ManagementConsole");
}
managedSystems.clear();
for (Link link : entry.getAssociatedManagedSystems()) {
ManagedSystem managedSystem = new ManagedSystem(restClient, influxClient, link.getHref());
managedSystem.setExcludePartitions(excludePartitions);
managedSystem.setIncludePartitions(includePartitions);
managedSystem.discover();
// Only continue for powered-on operating systems
if(managedSystem.entry != null && Objects.equals(managedSystem.entry.state, "operating")) {
if(doEnergy) {
managedSystem.getPcmPreferences();
managedSystem.setDoEnergy(doEnergy);
}
// Check exclude / include
if (!excludeSystems.contains(managedSystem.name) && includeSystems.isEmpty()) {
managedSystems.add(managedSystem);
//log.info("discover() - adding !excluded system: {}", managedSystem.name);
} else if (!includeSystems.isEmpty() && includeSystems.contains(managedSystem.name)) {
managedSystems.add(managedSystem);
//log.info("discover() - adding included system: {}", managedSystem.name);
}
}
}
} catch (IOException e) {
log.warn("discover() - error: {}", e.getMessage());
}
}
void refresh() {
log.debug("refresh()");
managedSystems.forEach( (system) -> {
if(system.entry == null){
log.warn("refresh() - no data.");
return;
}
system.refresh();
system.process();
});
}
}

View File

@ -15,14 +15,23 @@
*/
package biz.nellemann.hmci;
import java.time.Instant;
import java.util.Map;
public class Measurement {
final Instant timestamp;
final Map<String, String> tags;
final Map<String, Object> fields;
Measurement(Map<String, String> tags, Map<String, Object> fields) {
this.timestamp = Instant.now();
this.tags = tags;
this.fields = fields;
}
Measurement(Instant timestamp, Map<String, String> tags, Map<String, Object> fields) {
this.timestamp = timestamp;
this.tags = tags;
this.fields = fields;
}

View File

@ -1,133 +0,0 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.pcm.PcmData;
import com.serjltt.moshi.adapters.FirstElement;
import com.squareup.moshi.FromJson;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.Moshi;
import com.squareup.moshi.ToJson;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
abstract class MetaSystem {
private final static Logger log = LoggerFactory.getLogger(MetaSystem.class);
private final JsonAdapter<PcmData> jsonAdapter;
protected PcmData metrics;
MetaSystem() {
try {
Moshi moshi = new Moshi.Builder().add(new NumberAdapter()).add(new BigDecimalAdapter()).add(FirstElement.ADAPTER_FACTORY).build();
jsonAdapter = moshi.adapter(PcmData.class);
} catch(Exception e) {
log.warn("MetaSystem() error", e);
throw new ExceptionInInitializerError(e);
}
}
void processMetrics(String json) {
try {
metrics = jsonAdapter.nullSafe().fromJson(json);
} catch(IOException e) {
log.warn("processMetrics() error", e);
}
//System.out.println(jsonAdapter.toJson(metrics));
}
Instant getTimestamp() {
String timestamp = getStringMetricObject(metrics.systemUtil.sample.sampleInfo.timeStamp);
Instant instant = Instant.now();
try {
log.trace("getTimeStamp() - PMC Timestamp: {}", timestamp);
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
instant = Instant.from(dateTimeFormatter.parse(timestamp));
log.trace("getTimestamp() - Instant: {}", instant.toString());
} catch(DateTimeParseException e) {
log.warn("getTimestamp() - parse error: {}", timestamp);
}
return instant;
}
String getStringMetricObject(Object obj) {
String metric = null;
try {
metric = (String) obj;
} catch (NullPointerException npe) {
log.warn("getStringMetricObject()", npe);
}
return metric;
}
Number getNumberMetricObject(Object obj) {
Number metric = null;
try {
metric = (Number) obj;
} catch (NullPointerException npe) {
log.warn("getNumberMetricObject()", npe);
}
return metric;
}
static class BigDecimalAdapter {
@FromJson
BigDecimal fromJson(String string) {
return new BigDecimal(string);
}
@ToJson
String toJson(BigDecimal value) {
return value.toString();
}
}
static class NumberAdapter {
@FromJson
Number fromJson(String string) {
return Double.parseDouble(string);
}
@ToJson
String toJson(Number value) {
return value.toString();
}
}
}

View File

@ -0,0 +1,136 @@
package biz.nellemann.hmci;
import biz.nellemann.hmci.dto.json.ProcessedMetrics;
import biz.nellemann.hmci.dto.json.SystemUtil;
import biz.nellemann.hmci.dto.json.UtilSample;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
public abstract class Resource {
private final static Logger log = LoggerFactory.getLogger(Resource.class);
private final ObjectMapper objectMapper = new ObjectMapper();
private final ArrayList<String> sampleHistory = new ArrayList<>();
protected SystemUtil metric;
protected final int MAX_NUMBER_OF_SAMPLES = 60;
protected final int MIN_NUMBER_OF_SAMPLES = 5;
protected int noOfSamples = MAX_NUMBER_OF_SAMPLES;
Resource() {
objectMapper.enable(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS);
objectMapper.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY);
objectMapper.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT);
}
void deserialize(String json) {
if(json == null || json.length() < 1) {
return;
}
try {
ProcessedMetrics processedMetrics = objectMapper.readValue(json, ProcessedMetrics.class);
metric = processedMetrics.systemUtil;
log.trace("deserialize() - samples: {}", metric.samples.size());
} catch (Exception e) {
log.error("deserialize() - error: {}", e.getMessage());
}
}
Instant getTimestamp() {
Instant instant = Instant.now();
if (metric == null) {
return instant;
}
String timestamp = metric.getSample().sampleInfo.timestamp;
try {
log.trace("getTimeStamp() - PMC Timestamp: {}", timestamp);
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
instant = Instant.from(dateTimeFormatter.parse(timestamp));
log.trace("getTimestamp() - Instant: {}", instant.toString());
} catch(DateTimeParseException e) {
log.warn("getTimestamp() - parse error: {}", timestamp);
}
return instant;
}
Instant getTimestamp(int sampleNumber) {
Instant instant = Instant.now();
if (metric == null) {
return instant;
}
String timestamp = metric.getSample(sampleNumber).sampleInfo.timestamp;
try {
log.trace("getTimeStamp() - PMC Timestamp: {}", timestamp);
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
instant = Instant.from(dateTimeFormatter.parse(timestamp));
log.trace("getTimestamp() - Instant: {}", instant.toString());
} catch(DateTimeParseException e) {
log.warn("getTimestamp() - parse error: {}", timestamp);
}
return instant;
}
public void process() {
if(metric == null) {
return;
}
int processed = 0;
int sampleSize = metric.samples.size();
log.debug("process() - Samples Returned: {}, Samples in History: {}, Fetch Next Counter: {}", sampleSize, sampleHistory.size(), noOfSamples);
for(int i = 0; i<sampleSize; i++) {
UtilSample sample = metric.getSample(i);
String timestamp = sample.getInfo().timestamp;
if(sampleHistory.contains(timestamp)) {
//log.info("process() - Sample \"{}\" already processed", timestamp);
continue; // Already processed
}
try {
process(i);
processed++;
sampleHistory.add(timestamp); // Add to processed history
} catch (NullPointerException e) {
log.warn("process() - error", e);
}
}
// Remove old elements from history
for(int n = noOfSamples; n < sampleHistory.size(); n++) {
//log.info("process() - Removing element no. {} from sampleHistory: {}", n, sampleHistory.get(0));
sampleHistory.remove(0);
}
// Decrease down to minSamples
if(noOfSamples > MIN_NUMBER_OF_SAMPLES) {
noOfSamples = Math.min( (noOfSamples - 1), Math.max( (noOfSamples - processed) + 5, MIN_NUMBER_OF_SAMPLES));
}
}
public abstract void process(int sample) throws NullPointerException;
}

View File

@ -0,0 +1,362 @@
package biz.nellemann.hmci;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import biz.nellemann.hmci.dto.xml.LogonResponse;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
public class RestClient {
private final static Logger log = LoggerFactory.getLogger(RestClient.class);
private final MediaType MEDIA_TYPE_IBM_XML_LOGIN = MediaType.parse("application/vnd.ibm.powervm.web+xml; type=LogonRequest");
private final MediaType MEDIA_TYPE_IBM_XML_POST = MediaType.parse("application/xml, application/vnd.ibm.powervm.pcm.dita");
protected OkHttpClient httpClient;
// OkHttpClient timeouts
private final static int CONNECT_TIMEOUT_SEC = 10;
private final static int WRITE_TIMEOUT_SEC = 30;
private final static int READ_TIMEOUT_SEC = 180;
protected String authToken;
protected final String baseUrl;
protected final String username;
protected final String password;
private final static int MAX_MINUTES_BETWEEN_AUTHENTICATION = 60; // TODO: Make configurable and match HMC timeout settings
private Instant lastAuthenticationTimestamp;
public RestClient(String baseUrl, String username, String password, Boolean trustAll) {
this.baseUrl = baseUrl;
this.username = username;
this.password = password;
if (trustAll) {
this.httpClient = getUnsafeOkHttpClient();
} else {
this.httpClient = getSafeOkHttpClient();
}
/*
if(configuration.trace != null) {
try {
File traceDir = new File(configuration.trace);
traceDir.mkdirs();
if(traceDir.canWrite()) {
Boolean doTrace = true;
} else {
log.warn("ManagementConsole() - can't write to trace dir: " + traceDir.toString());
}
} catch (Exception e) {
log.error("ManagementConsole() - trace error: " + e.getMessage());
}
}*/
Thread shutdownHook = new Thread(this::logoff);
Runtime.getRuntime().addShutdownHook(shutdownHook);
}
/**
* Logon to the HMC and get an authentication token for further requests.
*/
public synchronized void login() {
if(authToken != null) {
logoff();
}
log.info("Connecting to HMC - {} @ {}", username, baseUrl);
StringBuilder payload = new StringBuilder();
payload.append("<?xml version='1.0' encoding='UTF-8' standalone='yes'?>");
payload.append("<LogonRequest xmlns='http://www.ibm.com/xmlns/systems/power/firmware/web/mc/2012_10/' schemaVersion='V1_0'>");
payload.append("<UserID>").append(username).append("</UserID>");
payload.append("<Password>").append(password).append("</Password>");
payload.append("</LogonRequest>");
try {
//httpClient.start();
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
Request request = new Request.Builder()
.url(url)
.addHeader("Accept", "application/vnd.ibm.powervm.web+xml; type=LogonResponse")
.addHeader("X-Audit-Memento", "IBM Power HMC Insights")
.put(RequestBody.create(payload.toString(), MEDIA_TYPE_IBM_XML_LOGIN))
.build();
String responseBody;
try (Response response = httpClient.newCall(request).execute()) {
responseBody = Objects.requireNonNull(response.body()).string();
if (!response.isSuccessful()) {
log.warn("login() - Unexpected response: {}", response.code());
throw new IOException("Unexpected code: " + response);
}
}
XmlMapper xmlMapper = new XmlMapper();
LogonResponse logonResponse = xmlMapper.readValue(responseBody, LogonResponse.class);
authToken = logonResponse.getToken();
lastAuthenticationTimestamp = Instant.now();
log.debug("logon() - auth token: {}", authToken);
} catch (Exception e) {
log.warn("logon() - error: {}", e.getMessage());
lastAuthenticationTimestamp = null;
}
}
/**
* Logoff from the HMC and remove any session
*
*/
synchronized void logoff() {
if(authToken == null) {
return;
}
try {
URL url = new URL(String.format("%s/rest/api/web/Logon", baseUrl));
Request request = new Request.Builder()
.url(url)
.addHeader("Content-Type", "application/vnd.ibm.powervm.web+xml; type=LogonRequest")
.addHeader("X-API-Session", authToken)
.delete()
.build();
try (Response response = httpClient.newCall(request).execute()) {
} catch (IOException e) {
log.warn("logoff() error: {}", e.getMessage());
} finally {
authToken = null;
lastAuthenticationTimestamp = null;
}
} catch (MalformedURLException e) {
log.warn("logoff() - error: {}", e.getMessage());
}
}
public String getRequest(String urlPath) throws IOException {
URL absUrl = new URL(String.format("%s%s", baseUrl, urlPath));
return getRequest(absUrl);
}
public String postRequest(String urlPath, String payload) throws IOException {
URL absUrl = new URL(String.format("%s%s", baseUrl, urlPath));
return postRequest(absUrl, payload);
}
/**
* Return a Response from the HMC
* @param url to get Response from
* @return Response body string
* @throws IOException
*/
public synchronized String getRequest(URL url) throws IOException {
log.debug("getRequest() - URL: {}", url.toString());
if (lastAuthenticationTimestamp == null || lastAuthenticationTimestamp.plus(MAX_MINUTES_BETWEEN_AUTHENTICATION, ChronoUnit.MINUTES).isBefore(Instant.now())) {
login();
}
Request request = new Request.Builder()
.url(url)
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
.addHeader("X-API-Session", (authToken == null ? "" : authToken))
.get().build();
String responseBody;
try (Response response = httpClient.newCall(request).execute()) {
responseBody = Objects.requireNonNull(response.body()).string();;
if (!response.isSuccessful()) {
// Auth. failure
if(response.code() == 401) {
log.warn("getRequest() - 401 - login and retry.");
// Let's login again and retry
login();
return retryGetRequest(url);
}
log.error("getRequest() - Unexpected response: {}", response.code());
throw new IOException("getRequest() - Unexpected response: " + response.code());
}
}
return responseBody;
}
private String retryGetRequest(URL url) throws IOException {
log.debug("retryGetRequest() - URL: {}", url.toString());
Request request = new Request.Builder()
.url(url)
.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8")
.addHeader("X-API-Session", (authToken == null ? "" : authToken))
.get().build();
String responseBody = null;
try (Response responseRetry = httpClient.newCall(request).execute()) {
if(responseRetry.isSuccessful()) {
responseBody = Objects.requireNonNull(responseRetry.body()).string();
}
}
return responseBody;
}
/**
* Send a POST request with a payload (can be null) to the HMC
* @param url
* @param payload
* @return Response body string
* @throws IOException
*/
public synchronized String postRequest(URL url, String payload) throws IOException {
log.debug("sendPostRequest() - URL: {}", url.toString());
if (lastAuthenticationTimestamp == null || lastAuthenticationTimestamp.plus(MAX_MINUTES_BETWEEN_AUTHENTICATION, ChronoUnit.MINUTES).isBefore(Instant.now())) {
login();
}
RequestBody requestBody;
if(payload != null) {
requestBody = RequestBody.create(payload, MEDIA_TYPE_IBM_XML_POST);
} else {
requestBody = RequestBody.create("", null);
}
Request request = new Request.Builder()
.url(url)
.addHeader("content-type", "application/xml")
.addHeader("X-API-Session", (authToken == null ? "" : authToken) )
.post(requestBody).build();
String responseBody;
try (Response response = httpClient.newCall(request).execute()) {
responseBody = Objects.requireNonNull(response.body()).string();
if (!response.isSuccessful()) {
response.close();
//log.warn(responseBody);
log.error("sendPostRequest() - Unexpected response: {}", response.code());
throw new IOException("sendPostRequest() - Unexpected response: " + response.code());
}
}
return responseBody;
}
/**
* Provide an unsafe (ignoring SSL problems) OkHttpClient
*
* @return OkHttpClient ignoring SSL/TLS errors
*/
private static OkHttpClient getUnsafeOkHttpClient() {
try {
// Create a trust manager that does not validate certificate chains
final TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) { }
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[]{};
}
}
};
// Install the all-trusting trust manager
final SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, trustAllCerts, new SecureRandom());
// Create a ssl socket factory with our all-trusting manager
final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
builder.hostnameVerifier((hostname, session) -> true);
builder.connectTimeout(CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT_SEC, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT_SEC, TimeUnit.SECONDS);
return builder.build();
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
/**
* Get OkHttpClient with our preferred timeout values.
* @return OkHttpClient
*/
private static OkHttpClient getSafeOkHttpClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
builder.connectTimeout(CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS);
builder.writeTimeout(WRITE_TIMEOUT_SEC, TimeUnit.SECONDS);
builder.readTimeout(READ_TIMEOUT_SEC, TimeUnit.SECONDS);
return builder.build();
}
/*
private void writeTraceFile(String id, String json) {
String fileName = String.format("%s-%s.json", id, Instant.now().toString());
try {
log.debug("Writing trace file: " + fileName);
File traceFile = new File(traceDir, fileName);
BufferedWriter writer = new BufferedWriter(new FileWriter(traceFile));
writer.write(json);
writer.close();
} catch (IOException e) {
log.warn("writeTraceFile() - " + e.getMessage());
}
}
*/
}

View File

@ -1,89 +1,144 @@
/*
* Copyright 2020 Mark Nellemann <mark.nellemann@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.nellemann.hmci;
import biz.nellemann.hmci.pcm.Temperature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
class SystemEnergy extends MetaSystem {
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import biz.nellemann.hmci.dto.xml.Link;
import biz.nellemann.hmci.dto.xml.XmlFeed;
class SystemEnergy extends Resource {
private final static Logger log = LoggerFactory.getLogger(SystemEnergy.class);
public final ManagedSystem system;
private final RestClient restClient;
private final InfluxClient influxClient;
private final ManagedSystem managedSystem;
protected String id;
protected String name;
SystemEnergy(ManagedSystem system) {
this.system = system;
public SystemEnergy(RestClient restClient, InfluxClient influxClient, ManagedSystem managedSystem) {
log.debug("SystemEnergy()");
this.restClient = restClient;
this.influxClient = influxClient;
this.managedSystem = managedSystem;
}
public void refresh() {
log.debug("refresh()");
try {
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?Type=Energy&NoOfSamples=%d", managedSystem.id, noOfSamples));
// Do not try to parse empty response
if(xml == null || xml.length() <= 1) {
log.debug("refresh() - no data."); // We do not log as 'warn' as many systems do not have this enabled.
return;
}
XmlMapper xmlMapper = new XmlMapper();
XmlFeed xmlFeed = xmlMapper.readValue(xml, XmlFeed.class);
xmlFeed.entries.forEach((entry) -> {
if (entry.category.term.equals("ManagedSystem")) {
Link link = entry.link;
if (link.getType() != null && Objects.equals(link.getType(), "application/json")) {
try {
URI jsonUri = URI.create(link.getHref());
String json = restClient.getRequest(jsonUri.getPath());
deserialize(json);
} catch (IOException e) {
log.error("refresh() - error 1: {}", e.getMessage());
}
}
}
});
} catch (IOException e) {
log.error("refresh() - error: {} {}", e.getClass(), e.getMessage());
}
}
@Override
public String toString() {
return system.name;
public void process(int sample) {
if(metric != null) {
log.debug("process() - sample: {}", sample);
influxClient.write(getPowerMetrics(sample), "server_energy_power");
influxClient.write(getThermalMetrics(sample), "server_energy_thermal");
}
}
List<Measurement> getPowerMetrics() {
List<Measurement> getPowerMetrics(int sample) {
List<Measurement> list = new ArrayList<>();
try {
HashMap<String, String> tagsMap = new HashMap<>();
Map<String, Object> fieldsMap = new HashMap<>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
log.trace("getPowerMetrics() - tags: {}", tagsMap);
tagsMap.put("servername", managedSystem.name);
log.trace("getPowerMetrics() - tags: {}", tagsMap);
Map<String, Object> fieldsMap = new HashMap<>();
fieldsMap.put("powerReading", metrics.systemUtil.sample.energyUtil.powerUtil.powerReading);
log.trace("getPowerMetrics() - fields: {}", fieldsMap);
fieldsMap.put("powerReading", metric.getSample(sample).energyUtil.powerUtil.powerReading);
log.trace("getPowerMetrics() - fields: {}", fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
} catch (Exception e) {
log.warn("getPowerMetrics() - error: {}", e.getMessage());
}
list.add(new Measurement(tagsMap, fieldsMap));
return list;
}
List<Measurement> getThermalMetrics() {
List<Measurement> getThermalMetrics(int sample) {
List<Measurement> list = new ArrayList<>();
try {
HashMap<String, String> tagsMap = new HashMap<>();
Map<String, Object> fieldsMap = new HashMap<>();
HashMap<String, String> tagsMap = new HashMap<>();
tagsMap.put("servername", system.name);
log.trace("getThermalMetrics() - tags: {}", tagsMap);
tagsMap.put("servername", managedSystem.name);
log.trace("getThermalMetrics() - tags: {}", tagsMap);
Map<String, Object> fieldsMap = new HashMap<>();
metric.getSample(sample).energyUtil.thermalUtil.cpuTemperatures.forEach((t) -> {
fieldsMap.put("cpuTemperature_" + t.entityInstance, t.temperatureReading);
});
for(Temperature t : metrics.systemUtil.sample.energyUtil.thermalUtil.cpuTemperatures) {
fieldsMap.put("cpuTemperature_" + t.entityInstance, t.temperatureReading);
metric.getSample(sample).energyUtil.thermalUtil.inletTemperatures.forEach((t) -> {
fieldsMap.put("inletTemperature_" + t.entityInstance, t.temperatureReading);
});
/* Disabled, not sure if useful
for(Temperature t : metrics.systemUtil.sample.energyUtil.thermalUtil.baseboardTemperatures) {
fieldsMap.put("baseboardTemperature_" + t.entityInstance, t.temperatureReading);
}*/
log.trace("getThermalMetrics() - fields: {}", fieldsMap);
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
} catch (Exception e) {
log.warn("getThermalMetrics() - error: {}", e.getMessage());
}
for(Temperature t : metrics.systemUtil.sample.energyUtil.thermalUtil.inletTemperatures) {
fieldsMap.put("inletTemperature_" + t.entityInstance, t.temperatureReading);
}
/* Disabled, not sure if useful
for(Temperature t : metrics.systemUtil.sample.energyUtil.thermalUtil.baseboardTemperatures) {
fieldsMap.put("baseboardTemperature_" + t.entityInstance, t.temperatureReading);
}*/
log.trace("getThermalMetrics() - fields: {}", fieldsMap);
list.add(new Measurement(tagsMap, fieldsMap));
return list;
}
}

View File

@ -15,12 +15,12 @@
*/
package biz.nellemann.hmci;
import picocli.CommandLine;
import java.io.IOException;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import picocli.CommandLine;
class VersionProvider implements CommandLine.IVersionProvider {
@Override

View File

@ -0,0 +1,69 @@
package biz.nellemann.hmci;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import biz.nellemann.hmci.dto.xml.VirtualIOServerEntry;
import biz.nellemann.hmci.dto.xml.XmlEntry;
public class VirtualIOServer {
private final static Logger log = LoggerFactory.getLogger(VirtualIOServer.class);
private final RestClient restClient;
private final ManagedSystem managedSystem;
protected String id;
private String uriPath;
protected VirtualIOServerEntry entry;
public VirtualIOServer(RestClient restClient, String href, ManagedSystem system) {
log.debug("VirtualIOServer() - {}", href);
this.restClient = restClient;
this.managedSystem = system;
try {
URI uri = new URI(href);
uriPath = uri.getPath();
//refresh();
} catch (URISyntaxException e) {
log.error("VirtualIOServer() - {}", e.getMessage());
}
}
public void discover() {
try {
String xml = restClient.getRequest(uriPath);
// Do not try to parse empty response
if(xml == null || xml.length() <= 1) {
log.warn("discover() - no data.");
return;
}
XmlMapper xmlMapper = new XmlMapper();
XmlEntry xmlEntry = xmlMapper.readValue(xml, XmlEntry.class);
if(xmlEntry.getContent() == null){
log.warn("discover() - no content.");
return;
}
if(xmlEntry.getContent().isVirtualIOServer()) {
entry = xmlEntry.getContent().getVirtualIOServerEntry();
log.debug("discover() - {}", entry.getName());
} else {
throw new UnsupportedOperationException("Failed to deserialize VirtualIOServer");
}
} catch (IOException e) {
log.error("discover() - error: {}", e.getMessage());
}
}
}

View File

@ -0,0 +1,11 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class EnergyUtil {
public PowerUtil powerUtil = new PowerUtil();
public ThermalUtil thermalUtil = new ThermalUtil();
}

View File

@ -0,0 +1,25 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* Storage adapter
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class FiberChannelAdapter {
public String id;
public String wwpn;
public String physicalLocation;
public int numOfPorts;
public double numOfReads;
public double numOfWrites;
public double readBytes;
public double writeBytes;
public double runningSpeed;
public double transmittedBytes;
}

View File

@ -0,0 +1,19 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class GenericAdapter {
public String id;
public String type = "";
public String physicalLocation = "";
public double receivedPackets = 0.0;
public double sentPackets = 0.0;
public double droppedPackets = 0.0;
public double sentBytes = 0.0;
public double receivedBytes = 0.0;
public double transferredBytes = 0.0;
}

View File

@ -0,0 +1,18 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class GenericPhysicalAdapters {
public String id;
public String type = "";
public String physicalLocation;
public double numOfReads;
public double numOfWrites;
public double readBytes;
public double writeBytes;
public double transmittedBytes;
}

View File

@ -0,0 +1,23 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* Storage adapter
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class GenericVirtualAdapter {
public String id = "";
public String type = "";
public Integer viosId = 0;
public String physicalLocation = "";
public Double numOfReads = 0.0;
public Double numOfWrites = 0.0;
public Double readBytes = 0.0;
public Double writeBytes = 0.0;
public Double transmittedBytes = 0.0;
}

View File

@ -0,0 +1,12 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class LparMemory {
public Double logicalMem;
public Double utilizedMem = 0.0;
public Double backedPhysicalMem = 0.0;
}

View File

@ -0,0 +1,24 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class LparProcessor {
public Integer poolId = 0;
public Integer weight = 0;
public String mode = "";
public Double maxVirtualProcessors = 0.0;
public Double currentVirtualProcessors = 0.0;
public Double maxProcUnits = 0.0;
public Double entitledProcUnits = 0.0;
public Double utilizedProcUnits = 0.0;
public Double utilizedCappedProcUnits = 0.0;
public Double utilizedUncappedProcUnits = 0.0;
public Double idleProcUnits = 0.0;
public Double donatedProcUnits = 0.0;
public Double timeSpentWaitingForDispatch = 0.0;
public Double timePerInstructionExecution = 0.0;
}

View File

@ -1,5 +1,8 @@
package biz.nellemann.hmci.pcm;
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class LparUtil {
public Integer id = 0;
@ -8,7 +11,7 @@ public final class LparUtil {
public String state = "";
public String type = "";
public String osType = "";
public Number affinityScore = 0.0f;
public Float affinityScore = 0.0f;
public final LparMemory memory = new LparMemory();
public final LparProcessor processor = new LparProcessor();

View File

@ -0,0 +1,19 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class Network {
public List<String> clientLpars = new ArrayList<>();
public List<GenericAdapter> genericAdapters = new ArrayList<>();
public List<SharedAdapter> sharedAdapters = new ArrayList<>();
public List<VirtualEthernetAdapter> virtualEthernetAdapters = new ArrayList<>();
public List<SRIOVAdapter> sriovAdapters = new ArrayList<>();
public List<SRIOVLogicalPort> sriovLogicalPorts = new ArrayList<>();
}

View File

@ -0,0 +1,15 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class PhysicalProcessorPool {
public double assignedProcUnits = 0.0;
public double utilizedProcUnits = 0.0;
public double availableProcUnits = 0.0;
public double configuredProcUnits = 0.0;
public double borrowedProcUnits = 0.0;
}

View File

@ -0,0 +1,10 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class PowerUtil {
public float powerReading = 0.0F;
}

View File

@ -0,0 +1,10 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public class ProcessedMetrics {
public SystemUtil systemUtil;
}

View File

@ -0,0 +1,14 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SRIOVAdapter {
public String drcIndex = "";
public List<SRIOVPhysicalPort> physicalPorts;
}

View File

@ -0,0 +1,24 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public class SRIOVLogicalPort {
public String drcIndex;
public String physicalLocation;
public String physicalDrcIndex;
public Number physicalPortId;
public String clientPartitionUUID;
public String vnicDeviceMode;
public String configurationType;
public Number receivedPackets;
public Number sentPackets;
public Number droppedPackets;
public Number sentBytes;
public Number receivedBytes;
public Number errorIn;
public Number errorOut;
public Number transferredBytes;
}

View File

@ -0,0 +1,23 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SRIOVPhysicalPort {
public String id;
public String physicalLocation = ""; // "U78CA.001.CSS0CXA-P1-C2-C1-T1-S2"
public String physicalDrcIndex = "";
public Number physicalPortId = 0;
public String vnicDeviceMode = ""; // "NonVNIC"
public String configurationType = ""; // "Ethernet"
public Number receivedPackets = 0.0;
public Number sentPackets = 0.0;
public Number droppedPackets = 0.0;
public Number sentBytes = 0.0;
public Number receivedBytes = 0.0;
public Number errorIn = 0.0;
public Number errorOut = 0.0;
public Number transferredBytes = 0.0;
}

View File

@ -0,0 +1,31 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SampleInfo {
@JsonProperty("timeStamp")
public String timestamp;
public String getTimeStamp() {
return timestamp;
}
public Integer status;
@JsonProperty("errorInfo")
public List<ErrorInfo> errors;
static class ErrorInfo {
public String errId;
public String errMsg;
public String uuid;
public String reportedBy;
public Integer occurrenceCount;
}
}

View File

@ -0,0 +1,14 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ServerMemory {
public double totalMem = 0.0;
public double availableMem = 0.0;
public double configurableMem = 0.0;
public double assignedMemToLpars = 0.0;
public double virtualPersistentMem = 0.0;
}

View File

@ -0,0 +1,13 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ServerProcessor {
public Double totalProcUnits = 0.0;
public Double utilizedProcUnits = 0.0;
public Double availableProcUnits = 0.0;
public Double configurableProcUnits = 0.0;
}

View File

@ -0,0 +1,18 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ServerUtil {
public final ServerProcessor processor = new ServerProcessor();
public final ServerMemory memory = new ServerMemory();
public PhysicalProcessorPool physicalProcessorPool = new PhysicalProcessorPool();
public List<SharedProcessorPool> sharedProcessorPool = new ArrayList<>();
public Network network = new Network();
}

View File

@ -0,0 +1,28 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.List;
/**
* Network adapter
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SharedAdapter {
public String id;
public String type;
public String physicalLocation;
public double receivedPackets;
public double sentPackets;
public double droppedPackets;
public double sentBytes;
public double receivedBytes;
public double transferredBytes;
public List<String> bridgedAdapters;
}

View File

@ -0,0 +1,18 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SharedProcessorPool {
public int id;
public String name;
public double assignedProcUnits = 0.0;
public double utilizedProcUnits = 0.0;
public double availableProcUnits = 0.0;
public double configuredProcUnits = 0.0;
public double borrowedProcUnits = 0.0;
}

View File

@ -0,0 +1,18 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class Storage {
public List<String> clientLpars = new ArrayList<>();
public List<GenericPhysicalAdapters> genericPhysicalAdapters = new ArrayList<>();
public List<GenericVirtualAdapter> genericVirtualAdapters = new ArrayList<>();
public List<FiberChannelAdapter> fiberChannelAdapters = new ArrayList<>();
public List<VirtualFiberChannelAdapter> virtualFiberChannelAdapters = new ArrayList<>();
}

View File

@ -0,0 +1,15 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SystemFirmware {
@JsonUnwrapped
public Double utilizedProcUnits;// = 0.0;
public Double assignedMem = 0.0;
}

View File

@ -0,0 +1,30 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class SystemUtil {
@JsonProperty("utilInfo")
public UtilInfo utilInfo;
public UtilInfo getUtilInfo() {
return utilInfo;
}
@JsonUnwrapped
@JsonProperty("utilSamples")
public List<UtilSample> samples;
public UtilSample getSample(int n) {
return samples.size() > n ? samples.get(n) : new UtilSample();
}
public UtilSample getSample() {
return samples.size() > 0 ? samples.get(0) : new UtilSample();
}
}

View File

@ -1,13 +1,12 @@
package biz.nellemann.hmci.pcm;
package biz.nellemann.hmci.dto.json;
import com.serjltt.moshi.adapters.FirstElement;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class Temperature {
public String entityId = "";
public String entityInstance = "";
@FirstElement
public Number temperatureReading = 0.0;
}

View File

@ -0,0 +1,15 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ThermalUtil {
public List<Temperature> inletTemperatures = new ArrayList<>();
public List<Temperature> cpuTemperatures = new ArrayList<>();
public List<Temperature> baseboardTemperatures = new ArrayList<>();
}

View File

@ -1,7 +1,8 @@
package biz.nellemann.hmci.pcm;
package biz.nellemann.hmci.dto.json;
import com.serjltt.moshi.adapters.FirstElement;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class UtilInfo {
public String version = "";
@ -13,7 +14,4 @@ public final class UtilInfo {
public String name = "";
public String uuid = "";
@FirstElement
public String metricArrayOrder = "";
}

View File

@ -0,0 +1,31 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class UtilSample {
public String sampleType = "";
@JsonProperty("sampleInfo")
public SampleInfo sampleInfo = new SampleInfo();
public SampleInfo getInfo() {
return sampleInfo;
}
@JsonProperty("systemFirmwareUtil")
public SystemFirmware systemFirmwareUtil = new SystemFirmware();
public ServerUtil serverUtil = new ServerUtil();
public EnergyUtil energyUtil = new EnergyUtil();
public List<ViosUtil> viosUtil = new ArrayList<>();
public LparUtil lparsUtil = new LparUtil();
}

View File

@ -0,0 +1,10 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ViosMemory {
public double assignedMem;
public double utilizedMem;
public double virtualPersistentMem;
}

View File

@ -1,9 +1,12 @@
package biz.nellemann.hmci.pcm;
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ViosUtil {
public String id = "";
public String uuid = "";
public int id;
public String uuid;
public String name = "";
public String state = "";
public Integer affinityScore = 0;

View File

@ -0,0 +1,33 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* Network adapter SEA
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class VirtualEthernetAdapter {
public String physicalLocation = "";
public Integer vlanId = 0;
public Integer vswitchId = 0;
public Boolean isPortVlanId = false;
public Integer viosId = 0;
public String sharedEthernetAdapterId = "";
public Double receivedPackets = 0.0;
public Double sentPackets = 0.0;
public Double droppedPackets = 0.0;
public Double sentBytes = 0.0;
public Double receivedBytes = 0.0;
public Double receivedPhysicalPackets = 0.0;
public Double sentPhysicalPackets = 0.0;
public Double droppedPhysicalPackets = 0.0;
public Double sentPhysicalBytes = 0.0;
public Double receivedPhysicalBytes = 0.0;
public Double transferredBytes = 0.0;
public Double transferredPhysicalBytes = 0.0;
}

View File

@ -0,0 +1,27 @@
package biz.nellemann.hmci.dto.json;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* Storage adapter - NPIV ?
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class VirtualFiberChannelAdapter {
public String id = "";
public String wwpn = "";
public String wwpn2 = "";
public String physicalLocation = "";
public String physicalPortWWPN = "";
public Integer viosId = 0;
public Double numOfReads = 0.0;
public Double numOfWrites = 0.0;
public Double readBytes = 0.0;
public Double writeBytes = 0.0;
public Double runningSpeed = 0.0;
public Double transmittedBytes = 0.0;
}

View File

@ -0,0 +1,12 @@
package biz.nellemann.hmci.dto.toml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.Map;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Configuration {
public InfluxConfiguration influx;
public Map<String, HmcConfiguration> hmc;
}

View File

@ -0,0 +1,28 @@
package biz.nellemann.hmci.dto.toml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import java.util.ArrayList;
import java.util.List;
@JsonIgnoreProperties(ignoreUnknown = true)
public class HmcConfiguration {
public String url;
public String name;
public String username;
public String password;
public Integer refresh = 30;
public Integer discover = 120;
public String trace;
public Boolean energy = true;
public Boolean trust = true;
public List<String> excludeSystems = new ArrayList<>();
public List<String> includeSystems = new ArrayList<>();
public List<String> excludePartitions = new ArrayList<>();
public List<String> includePartitions = new ArrayList<>();
}

View File

@ -0,0 +1,21 @@
package biz.nellemann.hmci.dto.toml;
public class InfluxConfiguration {
public String url;
public String org;
public String token;
public String bucket;
public String username;
public String password;
public String database;
/*public InfluxConfiguration(String url, String username, String password, String database) {
this.url = url;
this.username = username;
this.password = password;
this.database = database;
}*/
}

View File

@ -0,0 +1,16 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
@JsonIgnoreProperties({ "Atom", "ksv", "kxe", "kb", "schemaVersion", "" })
public class IFixDetail implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("IFix")
public String iFix;
}

View File

@ -0,0 +1,35 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import java.io.Serializable;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Link implements Serializable {
private static final long serialVersionUID = 1L;
@JacksonXmlProperty(isAttribute = true)
public String rel;
public String getRel() {
return rel;
}
@JacksonXmlProperty(isAttribute = true)
public String type;
public String getType() {
return type;
}
@JacksonXmlProperty(isAttribute = true)
public String href;
public String getHref() {
return href;
}
}

View File

@ -0,0 +1,55 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
/*
@JsonIgnoreProperties({
"ksv", "kxe", "kb", "schemaVersion", "Metadata", "AllowPerformanceDataCollection",
"AssociatedPartitionProfile", "AvailabilityPriority", "CurrentProcessorCompatibilityMode", "CurrentProfileSync",
"IsBootable", "IsConnectionMonitoringEnabled", "IsOperationInProgress", "IsRedundantErrorPathReportingEnabled",
"IsTimeReferencePartition", "IsVirtualServiceAttentionLEDOn", "IsVirtualTrustedPlatformModuleEnabled",
"KeylockPosition", "LogicalSerialNumber", "OperatingSystemVersion", "PartitionCapabilities", "PartitionID",
"PartitionIOConfiguration", "PartitionMemoryConfiguration", "PartitionProcessorConfiguration", "PartitionProfiles",
"PendingProcessorCompatibilityMode", "ProcessorPool", "ProgressPartitionDataRemaining", "ProgressPartitionDataTotal",
"ProgressState", "ResourceMonitoringControlState", "ResourceMonitoringIPAddress", "AssociatedManagedSystem",
"ClientNetworkAdapters", "HostEthernetAdapterLogicalPorts", "MACAddressPrefix", "IsServicePartition",
"PowerVMManagementCapable", "ReferenceCode", "AssignAllResources", "HardwareAcceleratorQoS", "LastActivatedProfile",
"HasPhysicalIO", "AllowPerformanceDataCollection", "PendingSecureBoot", "CurrentSecureBoot", "BootMode",
"PowerOnWithHypervisor", "Description", "MigrationStorageViosDataStatus", "MigrationStorageViosDataTimestamp",
"RemoteRestartCapable", "SimplifiedRemoteRestartCapable", "HasDedicatedProcessorsForMigration", "SuspendCapable",
"MigrationDisable", "MigrationState", "RemoteRestartState", "VirtualFibreChannelClientAdapters",
"VirtualSCSIClientAdapters", "BootListInformation"
})
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class LogicalPartitionEntry implements Serializable, ResourceEntry {
private static final long serialVersionUID = 1L;
@JsonProperty("PartitionID")
public Number partitionId;
@JsonProperty("PartitionName")
public String partitionName;
@JsonProperty("PartitionState")
public String partitionState;
@JsonProperty("PartitionType")
public String partitionType;
@JsonProperty("PartitionUUID")
public String partitionUUID;
@JsonProperty("OperatingSystemType")
public String operatingSystemType;
@Override
public String getName() {
return partitionName.trim();
}
}

View File

@ -0,0 +1,21 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
@JsonIgnoreProperties({ "schemaVersion", "Metadata" })
public class LogonResponse implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("X-API-Session")
private String token;
public String getToken() {
return token;
}
}

View File

@ -0,0 +1,46 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import java.io.Serializable;
@JsonIgnoreProperties({ "kb", "kxe", "Metadata" })
public class MachineTypeModelAndSerialNumber implements Serializable {
private static final long serialVersionUID = 1L;
@JacksonXmlProperty(isAttribute = true)
private final String schemaVersion = "V1_0";
@JsonProperty("MachineType")
public String machineType;
public String getMachineType() {
return machineType;
}
@JsonProperty("Model")
public String model;
public String getModel() {
return model;
}
@JsonProperty("SerialNumber")
public String serialNumber;
public String getSerialNumber() {
return serialNumber;
}
public String getTypeAndModel() {
return machineType+"-"+model;
}
public String getTypeAndModelAndSerialNumber() {
return machineType+"-"+model+"-"+serialNumber;
}
}

View File

@ -0,0 +1,94 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/*
@JsonIgnoreProperties({
"schemaVersion", "Metadata", "AssociatedIPLConfiguration", "AssociatedSystemCapabilities",
"AssociatedSystemIOConfiguration", "AssociatedSystemMemoryConfiguration", "AssociatedSystemProcessorConfiguration",
"AssociatedSystemSecurity", "DetailedState", "ManufacturingDefaultConfigurationEnabled", "MaximumPartitions",
"MaximumPowerControlPartitions", "MaximumRemoteRestartPartitions", "MaximumSharedProcessorCapablePartitionID",
"MaximumSuspendablePartitions", "MaximumBackingDevicesPerVNIC", "PhysicalSystemAttentionLEDState",
"PrimaryIPAddress", "ServiceProcessorFailoverEnabled", "ServiceProcessorFailoverReason", "ServiceProcessorFailoverState",
"ServiceProcessorVersion", "VirtualSystemAttentionLEDState", "SystemMigrationInformation", "ReferenceCode",
"MergedReferenceCode", "EnergyManagementConfiguration", "IsPowerVMManagementMaster", "IsClassicHMCManagement",
"IsPowerVMManagementWithoutMaster", "IsManagementPartitionPowerVMManagementMaster", "IsHMCPowerVMManagementMaster",
"IsNotPowerVMManagementMaster", "IsPowerVMManagementNormalMaster", "IsPowerVMManagementPersistentMaster",
"IsPowerVMManagementTemporaryMaster", "IsPowerVMManagementPartitionEnabled", "SupportedHardwareAcceleratorTypes",
"CurrentStealableProcUnits", "CurrentStealableMemory", "Description", "SystemLocation", "SystemType",
"ProcessorThrottling", "AssociatedPersistentMemoryConfiguration"
})*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ManagedSystemEntry implements Serializable, ResourceEntry {
private static final long serialVersionUID = 1L;
@JsonProperty("State")
public String state;
@JsonProperty("Hostname")
public String hostname;
//@JsonAlias("ActivatedLevel")
@JsonProperty("ActivatedLevel")
public Integer activatedLevel;
public Integer getActivatedLevel() {
return activatedLevel;
}
@JsonAlias("ActivatedServicePackNameAndLevel")
public String activatedServicePackNameAndLevel;
public String getActivatedServicePackNameAndLevel() {
return activatedServicePackNameAndLevel;
}
@JsonAlias("SystemName")
public String systemName = "";
public String getSystemName() {
return systemName.trim();
}
@Override
public String getName() {
return systemName.trim();
}
@JsonProperty("SystemTime")
public Long systemTime;
@JsonProperty("SystemFirmware")
public String systemFirmware;
@JsonAlias("AssociatedLogicalPartitions")
public List<Link> associatedLogicalPartitions;
public List<Link> getAssociatedLogicalPartitions() {
return associatedLogicalPartitions != null ? associatedLogicalPartitions : new ArrayList<>();
}
@JsonAlias("AssociatedVirtualIOServers")
public List<Link> associatedVirtualIOServers;
public List<Link> getAssociatedVirtualIOServers() {
return associatedVirtualIOServers != null ? associatedVirtualIOServers : new ArrayList<>();
}
@JsonAlias("MachineTypeModelAndSerialNumber")
public MachineTypeModelAndSerialNumber machineTypeModelAndSerialNumber;
public MachineTypeModelAndSerialNumber getMachineTypeModelAndSerialNumber() {
return machineTypeModelAndSerialNumber;
}
}

View File

@ -0,0 +1,57 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
@JsonIgnoreProperties(ignoreUnknown = true)
@JacksonXmlRootElement(localName = "ManagedSystemPcmPreference:ManagedSystemPcmPreference")
public class ManagedSystemPcmPreference {
@JacksonXmlProperty(isAttribute = true)
private final String schemaVersion = "V1_0";
@JacksonXmlProperty(isAttribute = true, localName = "xmlns")
private final String xmlns = "http://www.ibm.com/xmlns/systems/power/firmware/pcm/mc/2012_10/";
@JacksonXmlProperty(isAttribute = true, localName = "xmlns:ManagedSystemPcmPreference")
private final String ns1 = "http://www.ibm.com/xmlns/systems/power/firmware/pcm/mc/2012_10/";
@JacksonXmlProperty(isAttribute = true, localName = "xmlns:ns2")
private final String ns2 = "http://www.w3.org/XML/1998/namespace/k2";
@JsonProperty("Metadata")
public Metadata metadata;
@JsonProperty("SystemName")
public String systemName;
@JsonProperty("MachineTypeModelSerialNumber")
public MachineTypeModelAndSerialNumber machineTypeModelSerialNumber;
@JsonProperty("EnergyMonitoringCapable")
public Boolean energyMonitoringCapable = false;
@JsonProperty("LongTermMonitorEnabled")
public Boolean longTermMonitorEnabled;
@JsonProperty("AggregationEnabled")
public Boolean aggregationEnabled;
@JsonProperty("ShortTermMonitorEnabled")
public Boolean shortTermMonitorEnabled;
// ksv ksv="V1_1_0"
//@JacksonXmlProperty(isAttribute = true)
//@JsonProperty("ComputeLTMEnabled")
//public Boolean computeLTMEnabled;
@JsonProperty("EnergyMonitorEnabled")
public Boolean energyMonitorEnabled = false;
@JsonProperty("AssociatedManagedSystem")
public Link associatedManagedSystem;
}

View File

@ -0,0 +1,99 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/*
@JsonIgnoreProperties({
"schemaVersion", "Metadata", "NetworkInterfaces", "Driver", "LicenseID", "LicenseFirstYear", "UVMID",
"TemplateObjectModelVersion", "UserObjectModelVersion", "WebObjectModelVersion", "PublicSSHKeyValue",
"MinimumKeyStoreSize", "MinimumKeyStoreSize"
})*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ManagementConsoleEntry implements Serializable, ResourceEntry {
private static final long serialVersionUID = 1L;
@JsonProperty("MachineTypeModelAndSerialNumber")
private MachineTypeModelAndSerialNumber machineTypeModelAndSerialNumber;
public MachineTypeModelAndSerialNumber getMachineTypeModelAndSerialNumber() {
return machineTypeModelAndSerialNumber;
}
@JsonProperty("ManagedSystems")
protected List<Link> associatedManagedSystems;
public List<Link> getAssociatedManagedSystems() {
// TODO: Security - Return new array, so receiver cannot modify ours.
return new ArrayList<>(associatedManagedSystems);
}
@JsonProperty("ManagementConsoleName")
public String managementConsoleName;
@Override
public String getName() {
return managementConsoleName.replace("\n", "").trim();
}
@JsonProperty("VersionInfo")
public VersionInfo versionInfo;
@JsonProperty("BIOS")
protected String bios;
@JsonProperty("BaseVersion")
protected String baseVersion;
public String getBaseVersion() {
return baseVersion;
}
@JsonProperty("IFixDetails")
public IFixDetails iFixDetails;
@JsonIgnoreProperties({ "ksv", "kxe", "kb", "schemaVersion", "Metadata" })
static class IFixDetails {
@JsonProperty("IFixDetail")
public List<IFixDetail> iFixDetailList;
}
@JsonProperty("ProcConfiguration")
public ProcConfiguration procConfiguration;
@JsonIgnoreProperties({ "ksv", "kxe", "kb", "schemaVersion", "Metadata", "Atom" })
static class ProcConfiguration {
@JsonProperty("NumberOfProcessors")
public Integer numberOfProcessors;
@JsonProperty("ModelName")
public String modelName;
@JsonProperty("Architecture")
public String architecture;
}
@JsonProperty("MemConfiguration")
public MemConfiguration memConfiguration;
@JsonIgnoreProperties({ "ksv", "kxe", "kb", "schemaVersion", "Metadata", "Atom" })
static class MemConfiguration {
@JsonProperty("TotalMemory")
public Integer totalMemory;
@JsonProperty("TotalSwapMemory")
public Integer totalSwapMemory;
}
}

View File

@ -0,0 +1,21 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Metadata {
@JsonProperty("Atom")
public Atom atom;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Atom {
@JsonProperty("AtomID")
public String atomID;
@JsonProperty("AtomCreated")
public String atomCreated;
}
}

View File

@ -0,0 +1,6 @@
package biz.nellemann.hmci.dto.xml;
public interface ResourceEntry {
String getName();
}

View File

@ -0,0 +1,32 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
@JsonIgnoreProperties({ "kxe", "kb", "schemaVersion", "Metadata" })
public class VersionInfo implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("BuildLevel")
public String buildLevel;
@JsonProperty("Maintenance")
protected String maintenance;
@JsonProperty("Minor")
protected String minor;
@JsonProperty("Release")
protected String release;
@JsonProperty("ServicePackName")
public String servicePackName;
@JsonProperty("Version")
protected String version;
}

View File

@ -0,0 +1,26 @@
package biz.nellemann.hmci.dto.xml;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
@JsonIgnoreProperties(ignoreUnknown = true)
public class VirtualIOServerEntry implements Serializable, ResourceEntry {
private static final long serialVersionUID = 1L;
@JsonProperty("PartitionName")
private String partitionName;
public String getPartitionName() {
return partitionName;
}
@Override
public String getName() {
return partitionName;
}
}

Some files were not shown because too many files have changed in this diff Show More