Fixes to RPM and DEB build.

This commit is contained in:
Mark Nellemann 2020-08-13 22:01:22 +02:00
parent de32d95d3f
commit 80138b8d57
8 changed files with 51 additions and 68 deletions

View file

@ -2,16 +2,24 @@
Small utility to fetch metrics from one or more HMC's and push those to an InfluxDB time-series database. Small utility to fetch metrics from one or more HMC's and push those to an InfluxDB time-series database.
## Known Problems
- When running on Windows, the data is collected and written to InfluxDB, but in Grafana there is no data.
## Usage Instructions ## Usage Instructions
### Create Configuration ### Create Configuration
Create a configuration file with setup for HMC's and InfluxDB. Modify the **/opt/hmci/conf/hmci.groovy** configuration file to suit your environment.
### Run HMCi Tool ### Run HMCi Tool
Requires Java 8+ runtime. Requires Java 8+ runtime
/opt/hmci/bin/hmci
## Development Information ## Development Information

View file

@ -18,5 +18,5 @@ pipelines:
script: script:
- ./gradlew clean build shadowJar startShadowScripts buildDep buildRpm - ./gradlew clean build shadowJar startShadowScripts buildDep buildRpm
- for file in ${BITBUCKET_CLONE_DIR}/build/libs/*-all.jar ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done - for file in ${BITBUCKET_CLONE_DIR}/build/libs/*-all.jar ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done
- for file in ${BITBUCKET_CLONE_DIR}/build/distributions/*_all.deb ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done - for file in ${BITBUCKET_CLONE_DIR}/build/distributions/*.deb ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done
- for file in ${BITBUCKET_CLONE_DIR}/build/distributions/*.noarch.rpm ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done - for file in ${BITBUCKET_CLONE_DIR}/build/distributions/*.rpm ; do curl -X POST --user "${BB_AUTH_STRING}" "https://api.bitbucket.org/2.0/repositories/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}/downloads" --form files=@"${file}" ; done

View file

@ -1,11 +1,3 @@
/*
* This file was generated by the Gradle 'init' task.
*
* This generated file contains a sample Groovy project to get you started.
* For more details take a look at the Groovy Quickstart chapter in the Gradle
* User Manual available at https://docs.gradle.org/6.5.1/userguide/tutorial_groovy_projects.html
*/
plugins { plugins {
// Apply the groovy plugin to add support for Groovy // Apply the groovy plugin to add support for Groovy
id 'groovy' id 'groovy'
@ -18,20 +10,16 @@ plugins {
} }
repositories { repositories {
// Use jcenter for resolving dependencies.
// You can declare any Maven/Ivy/file repository here.
jcenter() jcenter()
} }
dependencies { dependencies {
// Use the latest Groovy version for building this library
implementation 'org.codehaus.groovy:groovy-all:3.0.5' implementation 'org.codehaus.groovy:groovy-all:3.0.5'
implementation 'com.squareup.okhttp3:okhttp:4.8.0' implementation 'com.squareup.okhttp3:okhttp:4.8.0'
implementation 'org.influxdb:influxdb-java:2.19' implementation 'org.influxdb:influxdb-java:2.19'
implementation 'org.slf4j:slf4j-api:1.7.+' implementation 'org.slf4j:slf4j-api:1.7.+'
runtimeOnly 'ch.qos.logback:logback-classic:1.+' runtimeOnly 'ch.qos.logback:logback-classic:1.+'
// Use the awesome Spock testing and specification framework
testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0') testImplementation('org.spockframework:spock-core:2.0-M3-groovy-3.0')
testImplementation("org.slf4j:slf4j-simple:1.7.+") testImplementation("org.slf4j:slf4j-simple:1.7.+")
testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0') testImplementation('com.squareup.okhttp3:mockwebserver:4.8.0')
@ -40,7 +28,6 @@ dependencies {
} }
application { application {
// Define the main class for the application.
mainClassName = 'biz.nellemann.hmci.App' mainClassName = 'biz.nellemann.hmci.App'
} }
@ -52,6 +39,9 @@ apply plugin: 'nebula.ospackage'
ospackage { ospackage {
packageName = 'hmci' packageName = 'hmci'
release = '1' release = '1'
os = LINUX
user = 'root'
packager = "Mark Nellemann <mark.nellemann@gmail.com>"
into '/opt/hmci' into '/opt/hmci'

View file

@ -2,7 +2,7 @@
Configuration for HMCi Configuration for HMCi
*/ */
hmci.refresh = 30 hmci.refresh = 60
hmci.rescan = 15 hmci.rescan = 15
// InfluxDB to save metrics // InfluxDB to save metrics

View file

@ -1,13 +0,0 @@
[Unit]
Description=HMC Insights Daemon
[Install]
WantedBy=multi-user.target
[Service]
User=nobody
Group=nogroup
TimeoutSec=20
Restart=always
WorkingDirectory=/opt/hmci
ExecStart=/usr/bin/java -jar lib/hmci-all.jar

View file

@ -1,6 +1,5 @@
package biz.nellemann.hmci package biz.nellemann.hmci
import groovy.cli.picocli.CliBuilder import groovy.cli.picocli.CliBuilder
import groovy.cli.picocli.OptionAccessor import groovy.cli.picocli.OptionAccessor
import groovy.util.logging.Slf4j import groovy.util.logging.Slf4j
@ -8,28 +7,31 @@ import groovy.util.logging.Slf4j
@Slf4j @Slf4j
class App implements Runnable { class App implements Runnable {
HmcClient hmc
InfluxClient influx
final ConfigObject configuration final ConfigObject configuration
final Integer refreshEverySec final Integer refreshEverySec
final Integer rescanHmcEvery final Integer rescanHmcEvery
Map<String, HmcClient> discoveredHmc = new HashMap<>() InfluxClient influxClient
Map<String, HmcClient> hmcClients = new HashMap<>()
Map<String,ManagedSystem> systems = new HashMap<String, ManagedSystem>() Map<String,ManagedSystem> systems = new HashMap<String, ManagedSystem>()
Map<String, LogicalPartition> partitions = new HashMap<String, LogicalPartition>() Map<String, LogicalPartition> partitions = new HashMap<String, LogicalPartition>()
App(ConfigObject configuration) { App(ConfigObject configuration) {
log.debug configuration.toString()
this.configuration = configuration this.configuration = configuration
log.debug configuration.toString()
refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60 refreshEverySec = (Integer)configuration.get('hmci.refresh') ?: 60
rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15 rescanHmcEvery = (Integer)configuration.get('hmci.rescan') ?: 15
String influxUrl = configuration.get('influx')['url']
String influxUsername = configuration.get('influx')['username']
String influxPassword = configuration.get('influx')['password']
String influxDatabase = configuration.get('influx')['database']
try { try {
influx = new InfluxClient((String) configuration.get('influx')['url'], (String) configuration.get('influx')['username'], (String) configuration.get('influx')['password'], (String) configuration.get('influx')['database']) influxClient = new InfluxClient(influxUrl, influxUsername, influxPassword, influxDatabase)
influx.login() influxClient.login()
} catch(Exception e) { } catch(Exception e) {
System.exit(1) System.exit(1)
} }
@ -44,15 +46,21 @@ class App implements Runnable {
void discover() { void discover() {
configuration.get('hmc').each { Object key, Object hmc -> configuration.get('hmc').each { Object key, Object hmc ->
if(!discoveredHmc?.containsKey(key)) { if(!hmcClients?.containsKey(key)) {
log.info("Adding HMC: " + hmc.toString()) log.info("Adding HMC: " + hmc.toString())
HmcClient hmcClient = new HmcClient(key as String, hmc['url'] as String, hmc['username'] as String, hmc['password'] as String, hmc['unsafe'] as Boolean) String hmcKey = key
discoveredHmc.put(key as String, hmcClient) String hmcUrl = hmc['url']
String hmcUsername = hmc['username']
String hmcPassword = hmc['password']
Boolean hmcUnsafe = hmc['unsafe']
HmcClient hmcClient = new HmcClient(hmcKey, hmcUrl, hmcUsername, hmcPassword, hmcUnsafe)
hmcClients.put(hmcKey, hmcClient)
} }
} }
discoveredHmc.each {id, hmcClient -> hmcClients.each { hmcId, hmcClient ->
log.info("Loggin in to HMC " + hmcId)
try { try {
hmcClient.login() hmcClient.login()
hmcClient.getManagedSystems().each { systemId, system -> hmcClient.getManagedSystems().each { systemId, system ->
@ -68,8 +76,8 @@ class App implements Runnable {
} }
} }
} catch(Exception e) { } catch(Exception e) {
log.error("discover() - " + id + " error: " + e.message) log.error("discover() - " + hmcId + " error: " + e.message)
discoveredHmc.remove(id) hmcClients.remove(hmcId)
} }
} }
@ -83,7 +91,7 @@ class App implements Runnable {
systems.each {systemId, system -> systems.each {systemId, system ->
HmcClient hmcClient = discoveredHmc.get(system.hmcId) HmcClient hmcClient = hmcClients.get(system.hmcId)
// Get and process metrics for this system // Get and process metrics for this system
String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system) String tmpJsonString = hmcClient.getPcmDataForManagedSystem(system)
@ -108,7 +116,7 @@ class App implements Runnable {
// Get LPAR's for this system // Get LPAR's for this system
partitions.each { partitionId, partition -> partitions.each { partitionId, partition ->
HmcClient hmcClient = discoveredHmc.get(partition.system.hmcId) HmcClient hmcClient = hmcClients.get(partition.system.hmcId)
// Get and process metrics for this partition // Get and process metrics for this partition
String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition) String tmpJsonString2 = hmcClient.getPcmDataForLogicalPartition(partition)
@ -127,14 +135,14 @@ class App implements Runnable {
void writeMetricsForManagedSystems() { void writeMetricsForManagedSystems() {
systems.each {systemId, system -> systems.each {systemId, system ->
influx.writeManagedSystem(system) influxClient.writeManagedSystem(system)
} }
} }
void writeMetricsForLogicalPartitions() { void writeMetricsForLogicalPartitions() {
partitions.each {partitionId, partition -> partitions.each {partitionId, partition ->
influx.writeLogicalPartition(partition) influxClient.writeLogicalPartition(partition)
} }
} }
@ -158,11 +166,7 @@ class App implements Runnable {
System.exit(1) System.exit(1)
} }
// Read in 'config.groovy' for the development environment.
configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL()); configuration = new ConfigSlurper("development").parse(configurationFile.toURI().toURL());
// Flatten configuration for easy access keys with dotted notation.
//configuration = conf.flatten();
} }
new App(configuration) new App(configuration)
@ -173,7 +177,7 @@ class App implements Runnable {
@Override @Override
void run() { void run() {
log.info("In RUN ") log.info("run()")
boolean keepRunning = true boolean keepRunning = true
int executions = 0 int executions = 0

View file

@ -7,7 +7,6 @@ import okhttp3.OkHttpClient
import okhttp3.Request import okhttp3.Request
import okhttp3.RequestBody import okhttp3.RequestBody
import okhttp3.Response import okhttp3.Response
import org.influxdb.InfluxDBFactory
import javax.net.ssl.HostnameVerifier import javax.net.ssl.HostnameVerifier
import javax.net.ssl.SSLContext import javax.net.ssl.SSLContext
@ -30,11 +29,9 @@ class HmcClient {
private final String password private final String password
private final Boolean unsafe private final Boolean unsafe
//protected Map<String,ManagedSystem> managedSystems = new HashMap<String, ManagedSystem>()
protected String authToken protected String authToken
private final OkHttpClient client private final OkHttpClient client
HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) { HmcClient(String hmcId, String baseUrl, String username, String password, Boolean unsafe = false) {
this.hmcId = hmcId this.hmcId = hmcId
this.baseUrl = baseUrl this.baseUrl = baseUrl

View file

@ -1,18 +1,15 @@
package biz.nellemann.hmci package biz.nellemann.hmci
import groovy.util.logging.Slf4j import groovy.util.logging.Slf4j
import org.influxdb.BatchOptions
import org.influxdb.InfluxDB
import org.influxdb.InfluxDBFactory
import org.influxdb.dto.BatchPoints import org.influxdb.dto.BatchPoints
import org.influxdb.dto.Point
import org.influxdb.dto.Query
import java.time.Instant import java.time.Instant
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import org.influxdb.InfluxDB
import org.influxdb.BatchOptions
import org.influxdb.InfluxDBFactory
import org.influxdb.dto.QueryResult
import org.influxdb.dto.Query
import org.influxdb.dto.Point
@Slf4j @Slf4j
class InfluxClient { class InfluxClient {
@ -53,13 +50,13 @@ class InfluxClient {
influxDB.query(new Query("CREATE DATABASE " + database)); influxDB.query(new Query("CREATE DATABASE " + database));
influxDB.setDatabase(database); influxDB.setDatabase(database);
// ... and a retention policy, if necessary.
/* /*
// ... and a retention policy, if necessary.
String retentionPolicyName = "HMCI_ONE_YEAR"; String retentionPolicyName = "HMCI_ONE_YEAR";
influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName influxDB.query(new Query("CREATE RETENTION POLICY " + retentionPolicyName
+ " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT")); + " ON " + database + " DURATION 365d REPLICATION 1 DEFAULT"));
influxDB.setRetentionPolicy(retentionPolicyName);*/ influxDB.setRetentionPolicy(retentionPolicyName);
*/
// Enable batch writes to get better performance. // Enable batch writes to get better performance.
influxDB.enableBatch(BatchOptions.DEFAULTS); influxDB.enableBatch(BatchOptions.DEFAULTS);
} }