Fetch multiple samples.
This commit is contained in:
parent
7786f5182f
commit
a0cfff18ef
|
@ -2,6 +2,9 @@
|
||||||
|
|
||||||
All notable changes to this project will be documented in this file.
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
## [1.4.1] - 2011-12-xx
|
||||||
|
- Retrieve multiple PCM samples and keep track of processed samples.
|
||||||
|
|
||||||
## [1.4.0] - 2011-12-01
|
## [1.4.0] - 2011-12-01
|
||||||
- Rewrite of toml+xml+json de-serialization code (uses jackson now).
|
- Rewrite of toml+xml+json de-serialization code (uses jackson now).
|
||||||
- Changes to configuration file format - please look at [doc/hmci.toml](doc/hmci.toml) as example.
|
- Changes to configuration file format - please look at [doc/hmci.toml](doc/hmci.toml) as example.
|
||||||
|
@ -24,6 +27,7 @@ All notable changes to this project will be documented in this file.
|
||||||
## [1.2.7] - 2022-02-24
|
## [1.2.7] - 2022-02-24
|
||||||
- Options to include/exclude Managed Systems and/or Logical Partitions.
|
- Options to include/exclude Managed Systems and/or Logical Partitions.
|
||||||
|
|
||||||
|
[1.4.1]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.4.1%0Dv1.4.0
|
||||||
[1.4.0]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.4.0%0Dv1.3.3
|
[1.4.0]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.4.0%0Dv1.3.3
|
||||||
[1.3.3]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.3.3%0Dv1.3.0
|
[1.3.3]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.3.3%0Dv1.3.0
|
||||||
[1.3.0]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.3.0%0Dv1.2.8
|
[1.3.0]: https://bitbucket.org/mnellemann/hmci/branches/compare/v1.3.0%0Dv1.2.8
|
||||||
|
|
8
TODO.md
Normal file
8
TODO.md
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
In *ManagementConsole run()* - should we try to sleep up until the closest 30/sec interval to get most fresh data?
|
||||||
|
Or should we get more data-samples and keep track of which we have processed already ? And then sleep for shorter times.
|
||||||
|
|
||||||
|
Set how many samples to ask for and process.
|
||||||
|
Loop samples.
|
||||||
|
Keep track of sample status and if they are processed.
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_HMCI",
|
"name": "DS_HMCI",
|
||||||
"label": "hmci",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_INFLUXDB",
|
"name": "DS_INFLUXDB",
|
||||||
"label": "InfluxDB",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_HMCI",
|
"name": "DS_HMCI",
|
||||||
"label": "hmci",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_HMCI",
|
"name": "DS_HMCI",
|
||||||
"label": "hmci",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_INFLUXDB",
|
"name": "DS_INFLUXDB",
|
||||||
"label": "InfluxDB",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_HMCI",
|
"name": "DS_HMCI",
|
||||||
"label": "hmci",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"__inputs": [
|
"__inputs": [
|
||||||
{
|
{
|
||||||
"name": "DS_INFLUXDB",
|
"name": "DS_INFLUXDB",
|
||||||
"label": "InfluxDB",
|
"label": "Database",
|
||||||
"description": "",
|
"description": "",
|
||||||
"type": "datasource",
|
"type": "datasource",
|
||||||
"pluginId": "influxdb",
|
"pluginId": "influxdb",
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
projectId = hmci
|
projectId = hmci
|
||||||
projectGroup = biz.nellemann.hmci
|
projectGroup = biz.nellemann.hmci
|
||||||
projectVersion = 1.4.0
|
projectVersion = 1.4.1
|
||||||
|
|
|
@ -97,54 +97,45 @@ public final class InfluxClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void write(List<Measurement> measurements, Instant timestamp, String measurement) {
|
public void write(List<Measurement> measurements, Instant timestamp, String name) {
|
||||||
log.debug("write() - measurement: {} {}", measurement, measurements.size());
|
log.debug("write() - measurement: {} {}", name, measurements.size());
|
||||||
processMeasurementMap(measurements, timestamp, measurement).forEach( (point) -> { influxDB.write(point); });
|
processMeasurementMap(measurements, timestamp, name).forEach( (point) -> { influxDB.write(point); });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private List<Point> processMeasurementMap(List<Measurement> measurements, Instant timestamp, String measurement) {
|
public void write(List<Measurement> measurements, String name) {
|
||||||
|
log.debug("write() - measurement: {} {}", name, measurements.size());
|
||||||
|
processMeasurementMap(measurements, name).forEach( (point) -> { influxDB.write(point); });
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private List<Point> processMeasurementMap(List<Measurement> measurements, Instant timestamp, String name) {
|
||||||
List<Point> listOfPoints = new ArrayList<>();
|
List<Point> listOfPoints = new ArrayList<>();
|
||||||
measurements.forEach( (m) -> {
|
measurements.forEach( (m) -> {
|
||||||
|
|
||||||
Point.Builder builder = Point.measurement(measurement)
|
Point.Builder builder = Point.measurement(name)
|
||||||
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
|
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
|
||||||
.tag(m.tags)
|
.tag(m.tags)
|
||||||
.fields(m.fields);
|
.fields(m.fields);
|
||||||
|
|
||||||
/*
|
|
||||||
// Iterate fields
|
|
||||||
m.fields.forEach((fieldName, fieldValue) -> {
|
|
||||||
|
|
||||||
log.info("processMeasurementMap() {} - fieldName: {}, fieldValue: {}", measurement, fieldName, fieldValue);
|
|
||||||
if(fieldValue instanceof Number) {
|
|
||||||
Number num = (Number) fieldValue;
|
|
||||||
builder.addField(fieldName, num);
|
|
||||||
} else if(fieldValue instanceof Boolean) {
|
|
||||||
Boolean bol = (Boolean) fieldValue;
|
|
||||||
builder.addField(fieldName, bol);
|
|
||||||
} else {
|
|
||||||
String str = (String) fieldValue;
|
|
||||||
builder.addField(fieldName, str);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Iterate sorted tags
|
|
||||||
Map<String, String> sortedTags = new TreeMap<>(m.tags);
|
|
||||||
sortedTags.forEach((tagName, tagValue) -> {
|
|
||||||
log.info("processMeasurementMap() {} - tagName: {}, tagValue: {}", measurement, tagName, tagValue);
|
|
||||||
builder.tag(tagName, tagValue);
|
|
||||||
});
|
|
||||||
*/
|
|
||||||
/*
|
|
||||||
if(m.fields.size() > 0 && m.tags.size() > 0) {
|
|
||||||
listOfPoints.add(builderbuilder.build());
|
|
||||||
}*/
|
|
||||||
|
|
||||||
listOfPoints.add(builder.build());
|
listOfPoints.add(builder.build());
|
||||||
});
|
});
|
||||||
|
|
||||||
return listOfPoints;
|
return listOfPoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private List<Point> processMeasurementMap(List<Measurement> measurements, String name) {
|
||||||
|
List<Point> listOfPoints = new ArrayList<>();
|
||||||
|
measurements.forEach( (m) -> {
|
||||||
|
log.trace("processMeasurementMap() - timestamp: {}, tags: {}, fields: {}", m.timestamp, m.tags, m.fields);
|
||||||
|
Point.Builder builder = Point.measurement(name)
|
||||||
|
.time(m.timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
|
||||||
|
.tag(m.tags)
|
||||||
|
.fields(m.fields);
|
||||||
|
listOfPoints.add(builder.build());
|
||||||
|
});
|
||||||
|
return listOfPoints;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,8 +36,10 @@ class LogicalPartition extends Resource {
|
||||||
private final static Logger log = LoggerFactory.getLogger(LogicalPartition.class);
|
private final static Logger log = LoggerFactory.getLogger(LogicalPartition.class);
|
||||||
|
|
||||||
private final RestClient restClient;
|
private final RestClient restClient;
|
||||||
|
private final InfluxClient influxClient;
|
||||||
private final ManagedSystem managedSystem;
|
private final ManagedSystem managedSystem;
|
||||||
|
|
||||||
|
|
||||||
protected String id;
|
protected String id;
|
||||||
protected String name;
|
protected String name;
|
||||||
protected LogicalPartitionEntry entry;
|
protected LogicalPartitionEntry entry;
|
||||||
|
@ -45,9 +47,10 @@ class LogicalPartition extends Resource {
|
||||||
private String uriPath;
|
private String uriPath;
|
||||||
|
|
||||||
|
|
||||||
public LogicalPartition(RestClient restClient, String href, ManagedSystem managedSystem) throws URISyntaxException {
|
public LogicalPartition(RestClient restClient, InfluxClient influxClient, String href, ManagedSystem managedSystem) throws URISyntaxException {
|
||||||
log.debug("LogicalPartition() - {}", href);
|
log.debug("LogicalPartition() - {}", href);
|
||||||
this.restClient = restClient;
|
this.restClient = restClient;
|
||||||
|
this.influxClient = influxClient;
|
||||||
this.managedSystem = managedSystem;
|
this.managedSystem = managedSystem;
|
||||||
try {
|
try {
|
||||||
URI uri = new URI(href);
|
URI uri = new URI(href);
|
||||||
|
@ -99,9 +102,9 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
public void refresh() {
|
public void refresh() {
|
||||||
|
|
||||||
log.debug("refresh()");
|
log.debug("refresh() - {}", name);
|
||||||
try {
|
try {
|
||||||
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=1", managedSystem.id, id));
|
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/LogicalPartition/%s/ProcessedMetrics?NoOfSamples=%d", managedSystem.id, id, currentNumberOfSamples));
|
||||||
|
|
||||||
// Do not try to parse empty response
|
// Do not try to parse empty response
|
||||||
if(xml == null || xml.length() <= 1) {
|
if(xml == null || xml.length() <= 1) {
|
||||||
|
@ -134,9 +137,22 @@ class LogicalPartition extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void process(int sample) {
|
||||||
|
log.debug("process() - {} - sample: {}", name, sample);
|
||||||
|
|
||||||
|
influxClient.write(getDetails(sample),"lpar_details");
|
||||||
|
influxClient.write(getMemoryMetrics(sample),"lpar_memory");
|
||||||
|
influxClient.write(getProcessorMetrics(sample),"lpar_processor");
|
||||||
|
influxClient.write(getSriovLogicalPorts(sample),"lpar_net_sriov");
|
||||||
|
influxClient.write(getVirtualEthernetAdapterMetrics(sample),"lpar_net_virtual");
|
||||||
|
influxClient.write(getVirtualGenericAdapterMetrics(sample),"lpar_storage_virtual");
|
||||||
|
influxClient.write(getVirtualFibreChannelAdapterMetrics(sample),"lpar_storage_vFC");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// LPAR Details
|
// LPAR Details
|
||||||
List<Measurement> getDetails() {
|
List<Measurement> getDetails(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
|
@ -148,14 +164,14 @@ class LogicalPartition extends Resource {
|
||||||
tagsMap.put("lparname", entry.getName());
|
tagsMap.put("lparname", entry.getName());
|
||||||
log.trace("getDetails() - tags: " + tagsMap);
|
log.trace("getDetails() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("id", metric.getSample().lparsUtil.id);
|
fieldsMap.put("id", metric.getSample(sample).lparsUtil.id);
|
||||||
fieldsMap.put("type", metric.getSample().lparsUtil.type);
|
fieldsMap.put("type", metric.getSample(sample).lparsUtil.type);
|
||||||
fieldsMap.put("state", metric.getSample().lparsUtil.state);
|
fieldsMap.put("state", metric.getSample(sample).lparsUtil.state);
|
||||||
fieldsMap.put("osType", metric.getSample().lparsUtil.osType);
|
fieldsMap.put("osType", metric.getSample(sample).lparsUtil.osType);
|
||||||
fieldsMap.put("affinityScore", metric.getSample().lparsUtil.affinityScore);
|
fieldsMap.put("affinityScore", metric.getSample(sample).lparsUtil.affinityScore);
|
||||||
log.trace("getDetails() - fields: " + fieldsMap);
|
log.trace("getDetails() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getDetails() - error: {}", e.getMessage());
|
log.warn("getDetails() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -165,7 +181,7 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// LPAR Memory
|
// LPAR Memory
|
||||||
List<Measurement> getMemoryMetrics() {
|
List<Measurement> getMemoryMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
|
@ -177,11 +193,11 @@ class LogicalPartition extends Resource {
|
||||||
tagsMap.put("lparname", entry.getName());
|
tagsMap.put("lparname", entry.getName());
|
||||||
log.trace("getMemoryMetrics() - tags: " + tagsMap);
|
log.trace("getMemoryMetrics() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("logicalMem", metric.getSample().lparsUtil.memory.logicalMem);
|
fieldsMap.put("logicalMem", metric.getSample(sample).lparsUtil.memory.logicalMem);
|
||||||
fieldsMap.put("backedPhysicalMem", metric.getSample().lparsUtil.memory.backedPhysicalMem);
|
fieldsMap.put("backedPhysicalMem", metric.getSample(sample).lparsUtil.memory.backedPhysicalMem);
|
||||||
log.trace("getMemoryMetrics() - fields: " + fieldsMap);
|
log.trace("getMemoryMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getMemoryMetrics() - error: {}", e.getMessage());
|
log.warn("getMemoryMetrics() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -190,7 +206,7 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// LPAR Processor
|
// LPAR Processor
|
||||||
List<Measurement> getProcessorMetrics() {
|
List<Measurement> getProcessorMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
|
@ -202,23 +218,23 @@ class LogicalPartition extends Resource {
|
||||||
tagsMap.put("lparname", entry.getName());
|
tagsMap.put("lparname", entry.getName());
|
||||||
log.trace("getProcessorMetrics() - tags: " + tagsMap);
|
log.trace("getProcessorMetrics() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("utilizedProcUnits", metric.getSample().lparsUtil.processor.utilizedProcUnits);
|
fieldsMap.put("utilizedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedProcUnits);
|
||||||
fieldsMap.put("entitledProcUnits", metric.getSample().lparsUtil.processor.entitledProcUnits);
|
fieldsMap.put("entitledProcUnits", metric.getSample(sample).lparsUtil.processor.entitledProcUnits);
|
||||||
fieldsMap.put("donatedProcUnits", metric.getSample().lparsUtil.processor.donatedProcUnits);
|
fieldsMap.put("donatedProcUnits", metric.getSample(sample).lparsUtil.processor.donatedProcUnits);
|
||||||
fieldsMap.put("idleProcUnits", metric.getSample().lparsUtil.processor.idleProcUnits);
|
fieldsMap.put("idleProcUnits", metric.getSample(sample).lparsUtil.processor.idleProcUnits);
|
||||||
fieldsMap.put("maxProcUnits", metric.getSample().lparsUtil.processor.maxProcUnits);
|
fieldsMap.put("maxProcUnits", metric.getSample(sample).lparsUtil.processor.maxProcUnits);
|
||||||
fieldsMap.put("maxVirtualProcessors", metric.getSample().lparsUtil.processor.maxVirtualProcessors);
|
fieldsMap.put("maxVirtualProcessors", metric.getSample(sample).lparsUtil.processor.maxVirtualProcessors);
|
||||||
fieldsMap.put("currentVirtualProcessors", metric.getSample().lparsUtil.processor.currentVirtualProcessors);
|
fieldsMap.put("currentVirtualProcessors", metric.getSample(sample).lparsUtil.processor.currentVirtualProcessors);
|
||||||
fieldsMap.put("utilizedCappedProcUnits", metric.getSample().lparsUtil.processor.utilizedCappedProcUnits);
|
fieldsMap.put("utilizedCappedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedCappedProcUnits);
|
||||||
fieldsMap.put("utilizedUncappedProcUnits", metric.getSample().lparsUtil.processor.utilizedUncappedProcUnits);
|
fieldsMap.put("utilizedUncappedProcUnits", metric.getSample(sample).lparsUtil.processor.utilizedUncappedProcUnits);
|
||||||
fieldsMap.put("timePerInstructionExecution", metric.getSample().lparsUtil.processor.timeSpentWaitingForDispatch);
|
fieldsMap.put("timePerInstructionExecution", metric.getSample(sample).lparsUtil.processor.timeSpentWaitingForDispatch);
|
||||||
fieldsMap.put("timeSpentWaitingForDispatch", metric.getSample().lparsUtil.processor.timePerInstructionExecution);
|
fieldsMap.put("timeSpentWaitingForDispatch", metric.getSample(sample).lparsUtil.processor.timePerInstructionExecution);
|
||||||
fieldsMap.put("mode", metric.getSample().lparsUtil.processor.mode);
|
fieldsMap.put("mode", metric.getSample(sample).lparsUtil.processor.mode);
|
||||||
fieldsMap.put("weight", metric.getSample().lparsUtil.processor.weight);
|
fieldsMap.put("weight", metric.getSample(sample).lparsUtil.processor.weight);
|
||||||
fieldsMap.put("poolId", metric.getSample().lparsUtil.processor.poolId);
|
fieldsMap.put("poolId", metric.getSample(sample).lparsUtil.processor.poolId);
|
||||||
log.trace("getProcessorMetrics() - fields: " + fieldsMap);
|
log.trace("getProcessorMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getProcessorMetrics() - error: {}", e.getMessage());
|
log.warn("getProcessorMetrics() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -228,12 +244,12 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// LPAR Network - Virtual
|
// LPAR Network - Virtual
|
||||||
List<Measurement> getVirtualEthernetAdapterMetrics() {
|
List<Measurement> getVirtualEthernetAdapterMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().lparsUtil.network.virtualEthernetAdapters.forEach(adapter -> {
|
metric.getSample(sample).lparsUtil.network.virtualEthernetAdapters.forEach(adapter -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -262,7 +278,7 @@ class LogicalPartition extends Resource {
|
||||||
fieldsMap.put("sharedEthernetAdapterId", adapter.sharedEthernetAdapterId);
|
fieldsMap.put("sharedEthernetAdapterId", adapter.sharedEthernetAdapterId);
|
||||||
log.trace("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap);
|
log.trace("getVirtualEthernetAdapterMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVirtualEthernetAdapterMetrics() - error: {}", e.getMessage());
|
log.warn("getVirtualEthernetAdapterMetrics() - error: {}", e.getMessage());
|
||||||
|
@ -273,12 +289,12 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// LPAR Storage - Virtual Generic
|
// LPAR Storage - Virtual Generic
|
||||||
List<Measurement> getVirtualGenericAdapterMetrics() {
|
List<Measurement> getVirtualGenericAdapterMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().lparsUtil.storage.genericVirtualAdapters.forEach(adapter -> {
|
metric.getSample(sample).lparsUtil.storage.genericVirtualAdapters.forEach(adapter -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -297,7 +313,7 @@ class LogicalPartition extends Resource {
|
||||||
fieldsMap.put("type", adapter.type);
|
fieldsMap.put("type", adapter.type);
|
||||||
log.trace("getVirtualGenericAdapterMetrics() - fields: " + fieldsMap);
|
log.trace("getVirtualGenericAdapterMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVirtualGenericAdapterMetrics() - error: {}", e.getMessage());
|
log.warn("getVirtualGenericAdapterMetrics() - error: {}", e.getMessage());
|
||||||
|
@ -308,12 +324,12 @@ class LogicalPartition extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// LPAR Storage - Virtual FC
|
// LPAR Storage - Virtual FC
|
||||||
List<Measurement> getVirtualFibreChannelAdapterMetrics() {
|
List<Measurement> getVirtualFibreChannelAdapterMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().lparsUtil.storage.virtualFiberChannelAdapters.forEach(adapter -> {
|
metric.getSample(sample).lparsUtil.storage.virtualFiberChannelAdapters.forEach(adapter -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -330,10 +346,9 @@ class LogicalPartition extends Resource {
|
||||||
fieldsMap.put("readBytes", adapter.readBytes);
|
fieldsMap.put("readBytes", adapter.readBytes);
|
||||||
fieldsMap.put("runningSpeed", adapter.runningSpeed);
|
fieldsMap.put("runningSpeed", adapter.runningSpeed);
|
||||||
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
|
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
|
||||||
fieldsMap.put("transferredByte", adapter.transmittedBytes); // TODO: Must be error in dashboard, remove when checked.
|
|
||||||
log.trace("getVirtualFibreChannelAdapterMetrics() - fields: " + fieldsMap);
|
log.trace("getVirtualFibreChannelAdapterMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVirtualFibreChannelAdapterMetrics() - error: {}", e.getMessage());
|
log.warn("getVirtualFibreChannelAdapterMetrics() - error: {}", e.getMessage());
|
||||||
|
@ -344,12 +359,12 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// LPAR Network - SR-IOV Logical Ports
|
// LPAR Network - SR-IOV Logical Ports
|
||||||
List<Measurement> getSriovLogicalPorts() {
|
List<Measurement> getSriovLogicalPorts(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().lparsUtil.network.sriovLogicalPorts.forEach(port -> {
|
metric.getSample(sample).lparsUtil.network.sriovLogicalPorts.forEach(port -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -370,7 +385,7 @@ class LogicalPartition extends Resource {
|
||||||
fieldsMap.put("errorOut", port.errorOut);
|
fieldsMap.put("errorOut", port.errorOut);
|
||||||
log.trace("getSriovLogicalPorts() - fields: " + fieldsMap);
|
log.trace("getSriovLogicalPorts() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getSriovLogicalPorts() - error: {}", e.getMessage());
|
log.warn("getSriovLogicalPorts() - error: {}", e.getMessage());
|
||||||
|
@ -378,4 +393,6 @@ class LogicalPartition extends Resource {
|
||||||
|
|
||||||
return list;
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,8 +17,6 @@ package biz.nellemann.hmci;
|
||||||
|
|
||||||
import biz.nellemann.hmci.dto.xml.*;
|
import biz.nellemann.hmci.dto.xml.*;
|
||||||
import com.fasterxml.jackson.core.JsonParseException;
|
import com.fasterxml.jackson.core.JsonParseException;
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
||||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
|
||||||
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
|
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -39,6 +37,8 @@ class ManagedSystem extends Resource {
|
||||||
private List<String> includePartitions = new ArrayList<>();
|
private List<String> includePartitions = new ArrayList<>();
|
||||||
|
|
||||||
private final RestClient restClient;
|
private final RestClient restClient;
|
||||||
|
private final InfluxClient influxClient;
|
||||||
|
|
||||||
|
|
||||||
protected ManagedSystemEntry entry;
|
protected ManagedSystemEntry entry;
|
||||||
|
|
||||||
|
@ -52,9 +52,10 @@ class ManagedSystem extends Resource {
|
||||||
public String id;
|
public String id;
|
||||||
|
|
||||||
|
|
||||||
public ManagedSystem(RestClient restClient, String href) {
|
public ManagedSystem(RestClient restClient, InfluxClient influxClient, String href) {
|
||||||
log.debug("ManagedSystem() - {}", href);
|
log.debug("ManagedSystem() - {}", href);
|
||||||
this.restClient = restClient;
|
this.restClient = restClient;
|
||||||
|
this.influxClient = influxClient;
|
||||||
try {
|
try {
|
||||||
URI uri = new URI(href);
|
URI uri = new URI(href);
|
||||||
uriPath = uri.getPath();
|
uriPath = uri.getPath();
|
||||||
|
@ -89,6 +90,7 @@ class ManagedSystem extends Resource {
|
||||||
setPcmPreference();
|
setPcmPreference();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
systemEnergy = new SystemEnergy(restClient, influxClient, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -122,7 +124,7 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
logicalPartitions.clear();
|
logicalPartitions.clear();
|
||||||
for (Link link : this.entry.getAssociatedLogicalPartitions()) {
|
for (Link link : this.entry.getAssociatedLogicalPartitions()) {
|
||||||
LogicalPartition logicalPartition = new LogicalPartition(restClient, link.getHref(), this);
|
LogicalPartition logicalPartition = new LogicalPartition(restClient, influxClient, link.getHref(), this);
|
||||||
logicalPartition.discover();
|
logicalPartition.discover();
|
||||||
if(Objects.equals(logicalPartition.entry.partitionState, "running")) {
|
if(Objects.equals(logicalPartition.entry.partitionState, "running")) {
|
||||||
// Check exclude / include
|
// Check exclude / include
|
||||||
|
@ -152,9 +154,9 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
public void refresh() {
|
public void refresh() {
|
||||||
|
|
||||||
log.debug("refresh()");
|
log.debug("refresh() - {}", name);
|
||||||
try {
|
try {
|
||||||
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=1", id));
|
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?NoOfSamples=%d", id, currentNumberOfSamples));
|
||||||
|
|
||||||
// Do not try to parse empty response
|
// Do not try to parse empty response
|
||||||
if(xml == null || xml.length() <= 1) {
|
if(xml == null || xml.length() <= 1) {
|
||||||
|
@ -180,6 +182,12 @@ class ManagedSystem extends Resource {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if(systemEnergy != null) {
|
||||||
|
systemEnergy.refresh();
|
||||||
|
}
|
||||||
|
|
||||||
|
logicalPartitions.forEach(LogicalPartition::refresh);
|
||||||
|
|
||||||
} catch (JsonParseException e) {
|
} catch (JsonParseException e) {
|
||||||
log.warn("refresh() - parse error for: {}", name);
|
log.warn("refresh() - parse error for: {}", name);
|
||||||
metric = null;
|
metric = null;
|
||||||
|
@ -190,6 +198,38 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void process(int sample) {
|
||||||
|
|
||||||
|
log.debug("process() - {} - sample: {}", name, sample);
|
||||||
|
|
||||||
|
influxClient.write(getDetails(sample),"server_details");
|
||||||
|
influxClient.write(getMemoryMetrics(sample),"server_memory");
|
||||||
|
influxClient.write(getProcessorMetrics(sample), "server_processor");
|
||||||
|
influxClient.write(getPhysicalProcessorPool(sample),"server_physicalProcessorPool");
|
||||||
|
influxClient.write(getSharedProcessorPools(sample),"server_sharedProcessorPool");
|
||||||
|
if(systemEnergy != null) {
|
||||||
|
systemEnergy.process();
|
||||||
|
}
|
||||||
|
|
||||||
|
influxClient.write(getVioDetails(sample),"vios_details");
|
||||||
|
influxClient.write(getVioProcessorMetrics(sample),"vios_processor");
|
||||||
|
influxClient.write(getVioMemoryMetrics(sample),"vios_memory");
|
||||||
|
influxClient.write(getVioNetworkLpars(sample),"vios_network_lpars");
|
||||||
|
influxClient.write(getVioNetworkVirtualAdapters(sample),"vios_network_virtual");
|
||||||
|
influxClient.write(getVioNetworkSharedAdapters(sample),"vios_network_shared");
|
||||||
|
influxClient.write(getVioNetworkGenericAdapters(sample),"vios_network_generic");
|
||||||
|
influxClient.write(getVioStorageLpars(sample),"vios_storage_lpars");
|
||||||
|
influxClient.write(getVioStorageFiberChannelAdapters(sample),"vios_storage_FC");
|
||||||
|
influxClient.write(getVioStorageVirtualAdapters(sample),"vios_storage_vFC");
|
||||||
|
influxClient.write(getVioStoragePhysicalAdapters(sample),"vios_storage_physical");
|
||||||
|
// Missing: vios_storage_SSP
|
||||||
|
|
||||||
|
logicalPartitions.forEach(Resource::process);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public void setPcmPreference() {
|
public void setPcmPreference() {
|
||||||
log.info("setPcmPreference()");
|
log.info("setPcmPreference()");
|
||||||
|
|
||||||
|
@ -250,10 +290,9 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// System details
|
// System details
|
||||||
List<Measurement> getDetails() {
|
List<Measurement> getDetails(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Map<String, String> tagsMap = new TreeMap<>();
|
Map<String, String> tagsMap = new TreeMap<>();
|
||||||
Map<String, Object> fieldsMap = new TreeMap<>();
|
Map<String, Object> fieldsMap = new TreeMap<>();
|
||||||
|
@ -271,11 +310,12 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("frequency", metric.getUtilInfo().frequency);
|
fieldsMap.put("frequency", metric.getUtilInfo().frequency);
|
||||||
fieldsMap.put("nextract", "HMCi");
|
fieldsMap.put("nextract", "HMCi");
|
||||||
fieldsMap.put("name", entry.getName());
|
fieldsMap.put("name", entry.getName());
|
||||||
fieldsMap.put("utilizedProcUnits", metric.getSample().systemFirmwareUtil.utilizedProcUnits);
|
fieldsMap.put("utilizedProcUnits", metric.getSample(sample).systemFirmwareUtil.utilizedProcUnits);
|
||||||
fieldsMap.put("assignedMem", metric.getSample().systemFirmwareUtil.assignedMem);
|
fieldsMap.put("assignedMem", metric.getSample(sample).systemFirmwareUtil.assignedMem);
|
||||||
log.trace("getDetails() - fields: " + fieldsMap);
|
log.trace("getDetails() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
|
||||||
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getDetails() - error: {}", e.getMessage());
|
log.warn("getDetails() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -285,10 +325,9 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// System Memory
|
// System Memory
|
||||||
List<Measurement> getMemoryMetrics() {
|
List<Measurement> getMemoryMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
Map<String, Object> fieldsMap = new HashMap<>();
|
Map<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -296,14 +335,14 @@ class ManagedSystem extends Resource {
|
||||||
tagsMap.put("servername", entry.getName());
|
tagsMap.put("servername", entry.getName());
|
||||||
log.trace("getMemoryMetrics() - tags: " + tagsMap);
|
log.trace("getMemoryMetrics() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("totalMem", metric.getSample().serverUtil.memory.totalMem);
|
fieldsMap.put("totalMem", metric.getSample(sample).serverUtil.memory.totalMem);
|
||||||
fieldsMap.put("availableMem", metric.getSample().serverUtil.memory.availableMem);
|
fieldsMap.put("availableMem", metric.getSample(sample).serverUtil.memory.availableMem);
|
||||||
fieldsMap.put("configurableMem", metric.getSample().serverUtil.memory.configurableMem);
|
fieldsMap.put("configurableMem", metric.getSample(sample).serverUtil.memory.configurableMem);
|
||||||
fieldsMap.put("assignedMemToLpars", metric.getSample().serverUtil.memory.assignedMemToLpars);
|
fieldsMap.put("assignedMemToLpars", metric.getSample(sample).serverUtil.memory.assignedMemToLpars);
|
||||||
fieldsMap.put("virtualPersistentMem", metric.getSample().serverUtil.memory.virtualPersistentMem);
|
fieldsMap.put("virtualPersistentMem", metric.getSample(sample).serverUtil.memory.virtualPersistentMem);
|
||||||
log.trace("getMemoryMetrics() - fields: " + fieldsMap);
|
log.trace("getMemoryMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getMemoryMetrics() - error: {}", e.getMessage());
|
log.warn("getMemoryMetrics() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -313,10 +352,9 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// System Processor
|
// System Processor
|
||||||
List<Measurement> getProcessorMetrics() {
|
List<Measurement> getProcessorMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -324,13 +362,13 @@ class ManagedSystem extends Resource {
|
||||||
tagsMap.put("servername", entry.getName());
|
tagsMap.put("servername", entry.getName());
|
||||||
log.trace("getProcessorMetrics() - tags: " + tagsMap);
|
log.trace("getProcessorMetrics() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("totalProcUnits", metric.getSample().serverUtil.processor.totalProcUnits);
|
fieldsMap.put("totalProcUnits", metric.getSample(sample).serverUtil.processor.totalProcUnits);
|
||||||
fieldsMap.put("utilizedProcUnits", metric.getSample().serverUtil.processor.utilizedProcUnits);
|
fieldsMap.put("utilizedProcUnits", metric.getSample(sample).serverUtil.processor.utilizedProcUnits);
|
||||||
fieldsMap.put("availableProcUnits", metric.getSample().serverUtil.processor.availableProcUnits);
|
fieldsMap.put("availableProcUnits", metric.getSample(sample).serverUtil.processor.availableProcUnits);
|
||||||
fieldsMap.put("configurableProcUnits", metric.getSample().serverUtil.processor.configurableProcUnits);
|
fieldsMap.put("configurableProcUnits", metric.getSample(sample).serverUtil.processor.configurableProcUnits);
|
||||||
log.trace("getProcessorMetrics() - fields: " + fieldsMap);
|
log.trace("getProcessorMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getProcessorMetrics() - error: {}", e.getMessage());
|
log.warn("getProcessorMetrics() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -339,12 +377,11 @@ class ManagedSystem extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sytem Shared ProcessorPools
|
// Sytem Shared ProcessorPools
|
||||||
List<Measurement> getSharedProcessorPools() {
|
List<Measurement> getSharedProcessorPools(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
|
metric.getSample(sample).serverUtil.sharedProcessorPool.forEach(sharedProcessorPool -> {
|
||||||
metric.getSample().serverUtil.sharedProcessorPool.forEach(sharedProcessorPool -> {
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
|
||||||
|
@ -360,7 +397,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("configuredProcUnits", sharedProcessorPool.configuredProcUnits);
|
fieldsMap.put("configuredProcUnits", sharedProcessorPool.configuredProcUnits);
|
||||||
log.trace("getSharedProcessorPools() - fields: " + fieldsMap);
|
log.trace("getSharedProcessorPools() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getSharedProcessorPools() - error: {}", e.getMessage());
|
log.warn("getSharedProcessorPools() - error: {}", e.getMessage());
|
||||||
|
@ -371,10 +408,9 @@ class ManagedSystem extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// System Physical ProcessorPool
|
// System Physical ProcessorPool
|
||||||
List<Measurement> getPhysicalProcessorPool() {
|
List<Measurement> getPhysicalProcessorPool(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -382,14 +418,14 @@ class ManagedSystem extends Resource {
|
||||||
tagsMap.put("servername", entry.getName());
|
tagsMap.put("servername", entry.getName());
|
||||||
log.trace("getPhysicalProcessorPool() - tags: " + tagsMap);
|
log.trace("getPhysicalProcessorPool() - tags: " + tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("assignedProcUnits", metric.getSample().serverUtil.physicalProcessorPool.assignedProcUnits);
|
fieldsMap.put("assignedProcUnits", metric.getSample(sample).serverUtil.physicalProcessorPool.assignedProcUnits);
|
||||||
fieldsMap.put("availableProcUnits", metric.getSample().serverUtil.physicalProcessorPool.availableProcUnits);
|
fieldsMap.put("availableProcUnits", metric.getSample(sample).serverUtil.physicalProcessorPool.availableProcUnits);
|
||||||
fieldsMap.put("utilizedProcUnits", metric.getSample().serverUtil.physicalProcessorPool.utilizedProcUnits);
|
fieldsMap.put("utilizedProcUnits", metric.getSample(sample).serverUtil.physicalProcessorPool.utilizedProcUnits);
|
||||||
fieldsMap.put("configuredProcUnits", metric.getSample().serverUtil.physicalProcessorPool.configuredProcUnits);
|
fieldsMap.put("configuredProcUnits", metric.getSample(sample).serverUtil.physicalProcessorPool.configuredProcUnits);
|
||||||
fieldsMap.put("borrowedProcUnits", metric.getSample().serverUtil.physicalProcessorPool.borrowedProcUnits);
|
fieldsMap.put("borrowedProcUnits", metric.getSample(sample).serverUtil.physicalProcessorPool.borrowedProcUnits);
|
||||||
log.trace("getPhysicalProcessorPool() - fields: " + fieldsMap);
|
log.trace("getPhysicalProcessorPool() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getPhysicalProcessorPool() - error: {}", e.getMessage());
|
log.warn("getPhysicalProcessorPool() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -404,12 +440,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Details
|
// VIO Details
|
||||||
List<Measurement> getVioDetails() {
|
List<Measurement> getVioDetails(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -424,7 +459,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("affinityScore", vio.affinityScore);
|
fieldsMap.put("affinityScore", vio.affinityScore);
|
||||||
log.trace("getVioDetails() - fields: " + fieldsMap);
|
log.trace("getVioDetails() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVioDetails() - error: {}", e.getMessage());
|
log.warn("getVioDetails() - error: {}", e.getMessage());
|
||||||
|
@ -435,11 +470,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Memory
|
// VIO Memory
|
||||||
List<Measurement> getVioMemoryMetrics() {
|
List<Measurement> getVioMemoryMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -456,7 +491,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("utilizedPct", usedMemPct.floatValue());
|
fieldsMap.put("utilizedPct", usedMemPct.floatValue());
|
||||||
log.trace("getVioMemoryMetrics() - fields: " + fieldsMap);
|
log.trace("getVioMemoryMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVioMemoryMetrics() - error: {}", e.getMessage());
|
log.warn("getVioMemoryMetrics() - error: {}", e.getMessage());
|
||||||
|
@ -467,11 +502,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Processor
|
// VIO Processor
|
||||||
List<Measurement> getVioProcessorMetrics() {
|
List<Measurement> getVioProcessorMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -495,7 +530,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("mode", vio.processor.mode);
|
fieldsMap.put("mode", vio.processor.mode);
|
||||||
log.trace("getVioProcessorMetrics() - fields: " + fieldsMap);
|
log.trace("getVioProcessorMetrics() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVioProcessorMetrics() - error: {}", e.getMessage());
|
log.warn("getVioProcessorMetrics() - error: {}", e.getMessage());
|
||||||
|
@ -506,11 +541,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIOs - Network
|
// VIOs - Network
|
||||||
List<Measurement> getVioNetworkLpars() {
|
List<Measurement> getVioNetworkLpars(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -522,7 +557,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("clientlpars", vio.network.clientLpars.size());
|
fieldsMap.put("clientlpars", vio.network.clientLpars.size());
|
||||||
log.trace("getVioNetworkLpars() - fields: " + fieldsMap);
|
log.trace("getVioNetworkLpars() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -534,11 +569,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Network - Shared
|
// VIO Network - Shared
|
||||||
List<Measurement> getVioNetworkSharedAdapters() {
|
List<Measurement> getVioNetworkSharedAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
vio.network.sharedAdapters.forEach(adapter -> {
|
vio.network.sharedAdapters.forEach(adapter -> {
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -559,7 +594,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("transferredBytes", adapter.transferredBytes);
|
fieldsMap.put("transferredBytes", adapter.transferredBytes);
|
||||||
log.trace("getVioNetworkSharedAdapters() - fields: " + fieldsMap);
|
log.trace("getVioNetworkSharedAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -571,12 +606,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Network - Virtual
|
// VIO Network - Virtual
|
||||||
List<Measurement> getVioNetworkVirtualAdapters() {
|
List<Measurement> getVioNetworkVirtualAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach( vio -> {
|
metric.getSample(sample).viosUtil.forEach( vio -> {
|
||||||
vio.network.virtualEthernetAdapters.forEach( adapter -> {
|
vio.network.virtualEthernetAdapters.forEach( adapter -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
|
@ -604,7 +638,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("transferredPhysicalBytes", adapter.transferredPhysicalBytes);
|
fieldsMap.put("transferredPhysicalBytes", adapter.transferredPhysicalBytes);
|
||||||
log.trace("getVioNetworkVirtualAdapters() - fields: " + fieldsMap);
|
log.trace("getVioNetworkVirtualAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -616,11 +650,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Network - Generic
|
// VIO Network - Generic
|
||||||
List<Measurement> getVioNetworkGenericAdapters() {
|
List<Measurement> getVioNetworkGenericAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach( vio -> {
|
metric.getSample(sample).viosUtil.forEach( vio -> {
|
||||||
vio.network.genericAdapters.forEach( adapter -> {
|
vio.network.genericAdapters.forEach( adapter -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
|
@ -640,7 +674,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("transferredBytes", adapter.transferredBytes);
|
fieldsMap.put("transferredBytes", adapter.transferredBytes);
|
||||||
log.trace("getVioNetworkGenericAdapters() - fields: " + fieldsMap);
|
log.trace("getVioNetworkGenericAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -651,11 +685,11 @@ class ManagedSystem extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// VIOs - Storage
|
// VIOs - Storage
|
||||||
List<Measurement> getVioStorageLpars() {
|
List<Measurement> getVioStorageLpars(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach(vio -> {
|
metric.getSample(sample).viosUtil.forEach(vio -> {
|
||||||
|
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -667,7 +701,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("clientlpars", vio.storage.clientLpars.size());
|
fieldsMap.put("clientlpars", vio.storage.clientLpars.size());
|
||||||
log.trace("getVioStorageLpars() - fields: " + fieldsMap);
|
log.trace("getVioStorageLpars() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getVioStorageLpars() - error: {}", e.getMessage());
|
log.warn("getVioStorageLpars() - error: {}", e.getMessage());
|
||||||
|
@ -677,11 +711,11 @@ class ManagedSystem extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// VIO Storage FC
|
// VIO Storage FC
|
||||||
List<Measurement> getVioStorageFiberChannelAdapters() {
|
List<Measurement> getVioStorageFiberChannelAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach( vio -> {
|
metric.getSample(sample).viosUtil.forEach( vio -> {
|
||||||
log.trace("getVioStorageFiberChannelAdapters() - VIO: " + vio.name);
|
log.trace("getVioStorageFiberChannelAdapters() - VIO: " + vio.name);
|
||||||
|
|
||||||
vio.storage.fiberChannelAdapters.forEach( adapter -> {
|
vio.storage.fiberChannelAdapters.forEach( adapter -> {
|
||||||
|
@ -702,7 +736,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
|
fieldsMap.put("transmittedBytes", adapter.transmittedBytes);
|
||||||
log.trace("getVioStorageFiberChannelAdapters() - fields: " + fieldsMap);
|
log.trace("getVioStorageFiberChannelAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -716,11 +750,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Storage - Physical
|
// VIO Storage - Physical
|
||||||
List<Measurement> getVioStoragePhysicalAdapters() {
|
List<Measurement> getVioStoragePhysicalAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach( vio -> {
|
metric.getSample(sample).viosUtil.forEach( vio -> {
|
||||||
log.trace("getVioStoragePhysicalAdapters() - VIO: " + vio.name);
|
log.trace("getVioStoragePhysicalAdapters() - VIO: " + vio.name);
|
||||||
|
|
||||||
vio.storage.genericPhysicalAdapters.forEach( adapter -> {
|
vio.storage.genericPhysicalAdapters.forEach( adapter -> {
|
||||||
|
@ -742,7 +776,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("type", adapter.type);
|
fieldsMap.put("type", adapter.type);
|
||||||
log.trace("getVioStoragePhysicalAdapters() - fields: " + fieldsMap);
|
log.trace("getVioStoragePhysicalAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -754,11 +788,11 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
|
|
||||||
// VIO Storage - Virtual
|
// VIO Storage - Virtual
|
||||||
List<Measurement> getVioStorageVirtualAdapters() {
|
List<Measurement> getVioStorageVirtualAdapters(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
metric.getSample().viosUtil.forEach( (vio) -> {
|
metric.getSample(sample).viosUtil.forEach( (vio) -> {
|
||||||
vio.storage.genericVirtualAdapters.forEach( (adapter) -> {
|
vio.storage.genericVirtualAdapters.forEach( (adapter) -> {
|
||||||
HashMap<String, String> tagsMap = new HashMap<>();
|
HashMap<String, String> tagsMap = new HashMap<>();
|
||||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||||
|
@ -777,7 +811,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("type", adapter.type);
|
fieldsMap.put("type", adapter.type);
|
||||||
log.debug("getVioStorageVirtualAdapters() - fields: " + fieldsMap);
|
log.debug("getVioStorageVirtualAdapters() - fields: " + fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -790,10 +824,10 @@ class ManagedSystem extends Resource {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
// VIO Storage SSP TODO
|
// VIO Storage SSP TODO
|
||||||
List<Measurement> getViosStorageSharedStoragePools() {
|
List<Measurement> getViosStorageSharedStoragePools(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
metrics.systemUtil.sample.viosUtil.forEach( vios -> {
|
metrics.systemUtil.getSample(sample).viosUtil.forEach( vios -> {
|
||||||
|
|
||||||
vios.storage.fiberChannelAdapters.forEach( adapter -> {
|
vios.storage.fiberChannelAdapters.forEach( adapter -> {
|
||||||
|
|
||||||
|
@ -813,7 +847,7 @@ class ManagedSystem extends Resource {
|
||||||
fieldsMap.put("physicalLocation", adapter.physicalLocation);
|
fieldsMap.put("physicalLocation", adapter.physicalLocation);
|
||||||
log.trace("getViosStorageSharedStoragePools() - fields: " + fieldsMap.toString());
|
log.trace("getViosStorageSharedStoragePools() - fields: " + fieldsMap.toString());
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
});
|
});
|
||||||
|
|
||||||
log.trace("getViosStorageSharedStoragePools() - VIOS: " + vios.name);
|
log.trace("getViosStorageSharedStoragePools() - VIOS: " + vios.name);
|
||||||
|
|
|
@ -61,19 +61,6 @@ class ManagementConsole implements Runnable {
|
||||||
this.influxClient = influxClient;
|
this.influxClient = influxClient;
|
||||||
restClient = new RestClient(configuration.url, configuration.username, configuration.password, configuration.trust);
|
restClient = new RestClient(configuration.url, configuration.username, configuration.password, configuration.trust);
|
||||||
|
|
||||||
if(configuration.trace != null) {
|
|
||||||
try {
|
|
||||||
File traceDir = new File(configuration.trace);
|
|
||||||
traceDir.mkdirs();
|
|
||||||
if(traceDir.canWrite()) {
|
|
||||||
Boolean doTrace = true;
|
|
||||||
} else {
|
|
||||||
log.warn("ManagementConsole() - can't write to trace dir: " + traceDir.toString());
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("ManagementConsole() - trace error: " + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.excludeSystems = configuration.excludeSystems;
|
this.excludeSystems = configuration.excludeSystems;
|
||||||
this.includeSystems = configuration.includeSystems;
|
this.includeSystems = configuration.includeSystems;
|
||||||
this.excludePartitions = configuration.excludePartitions;
|
this.excludePartitions = configuration.excludePartitions;
|
||||||
|
@ -160,7 +147,7 @@ class ManagementConsole implements Runnable {
|
||||||
|
|
||||||
managedSystems.clear();
|
managedSystems.clear();
|
||||||
for (Link link : entry.getAssociatedManagedSystems()) {
|
for (Link link : entry.getAssociatedManagedSystems()) {
|
||||||
ManagedSystem managedSystem = new ManagedSystem(restClient, link.getHref());
|
ManagedSystem managedSystem = new ManagedSystem(restClient, influxClient, link.getHref());
|
||||||
managedSystem.setExcludePartitions(excludePartitions);
|
managedSystem.setExcludePartitions(excludePartitions);
|
||||||
managedSystem.setIncludePartitions(includePartitions);
|
managedSystem.setIncludePartitions(includePartitions);
|
||||||
managedSystem.discover();
|
managedSystem.discover();
|
||||||
|
@ -202,63 +189,10 @@ class ManagementConsole implements Runnable {
|
||||||
}
|
}
|
||||||
|
|
||||||
system.refresh();
|
system.refresh();
|
||||||
influxClient.write(system.getDetails(), system.getTimestamp(),"server_details");
|
system.process();
|
||||||
influxClient.write(system.getMemoryMetrics(), system.getTimestamp(),"server_memory");
|
|
||||||
influxClient.write(system.getProcessorMetrics(), system.getTimestamp(),"server_processor");
|
|
||||||
influxClient.write(system.getPhysicalProcessorPool(), system.getTimestamp(),"server_physicalProcessorPool");
|
|
||||||
influxClient.write(system.getSharedProcessorPools(), system.getTimestamp(),"server_sharedProcessorPool");
|
|
||||||
|
|
||||||
if(system.systemEnergy != null) {
|
|
||||||
system.systemEnergy.refresh();
|
|
||||||
if(system.systemEnergy.metric != null) {
|
|
||||||
influxClient.write(system.systemEnergy.getPowerMetrics(), system.getTimestamp(), "server_energy_power");
|
|
||||||
influxClient.write(system.systemEnergy.getThermalMetrics(), system.getTimestamp(), "server_energy_thermal");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
influxClient.write(system.getVioDetails(), system.getTimestamp(),"vios_details");
|
|
||||||
influxClient.write(system.getVioProcessorMetrics(), system.getTimestamp(),"vios_processor");
|
|
||||||
influxClient.write(system.getVioMemoryMetrics(), system.getTimestamp(),"vios_memory");
|
|
||||||
influxClient.write(system.getVioNetworkLpars(), system.getTimestamp(),"vios_network_lpars");
|
|
||||||
influxClient.write(system.getVioNetworkVirtualAdapters(), system.getTimestamp(),"vios_network_virtual");
|
|
||||||
influxClient.write(system.getVioNetworkSharedAdapters(), system.getTimestamp(),"vios_network_shared");
|
|
||||||
influxClient.write(system.getVioNetworkGenericAdapters(), system.getTimestamp(),"vios_network_generic");
|
|
||||||
influxClient.write(system.getVioStorageLpars(), system.getTimestamp(),"vios_storage_lpars");
|
|
||||||
influxClient.write(system.getVioStorageFiberChannelAdapters(), system.getTimestamp(),"vios_storage_FC");
|
|
||||||
influxClient.write(system.getVioStorageVirtualAdapters(), system.getTimestamp(),"vios_storage_vFC");
|
|
||||||
influxClient.write(system.getVioStoragePhysicalAdapters(), system.getTimestamp(),"vios_storage_physical");
|
|
||||||
// Missing: vios_storage_SSP
|
|
||||||
|
|
||||||
system.logicalPartitions.forEach( (partition) -> {
|
|
||||||
partition.refresh();
|
|
||||||
influxClient.write(partition.getDetails(), partition.getTimestamp(),"lpar_details");
|
|
||||||
influxClient.write(partition.getMemoryMetrics(), partition.getTimestamp(),"lpar_memory");
|
|
||||||
influxClient.write(partition.getProcessorMetrics(), partition.getTimestamp(),"lpar_processor");
|
|
||||||
influxClient.write(partition.getSriovLogicalPorts(), partition.getTimestamp(),"lpar_net_sriov");
|
|
||||||
influxClient.write(partition.getVirtualEthernetAdapterMetrics(), partition.getTimestamp(),"lpar_net_virtual");
|
|
||||||
influxClient.write(partition.getVirtualGenericAdapterMetrics(), partition.getTimestamp(),"lpar_storage_virtual");
|
|
||||||
influxClient.write(partition.getVirtualFibreChannelAdapterMetrics(), partition.getTimestamp(),"lpar_storage_vFC");
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
private void writeTraceFile(String id, String json) {
|
|
||||||
|
|
||||||
String fileName = String.format("%s-%s.json", id, Instant.now().toString());
|
|
||||||
try {
|
|
||||||
log.debug("Writing trace file: " + fileName);
|
|
||||||
File traceFile = new File(traceDir, fileName);
|
|
||||||
BufferedWriter writer = new BufferedWriter(new FileWriter(traceFile));
|
|
||||||
writer.write(json);
|
|
||||||
writer.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
log.warn("writeTraceFile() - " + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,14 +15,23 @@
|
||||||
*/
|
*/
|
||||||
package biz.nellemann.hmci;
|
package biz.nellemann.hmci;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class Measurement {
|
public class Measurement {
|
||||||
|
|
||||||
|
final Instant timestamp;
|
||||||
final Map<String, String> tags;
|
final Map<String, String> tags;
|
||||||
final Map<String, Object> fields;
|
final Map<String, Object> fields;
|
||||||
|
|
||||||
Measurement(Map<String, String> tags, Map<String, Object> fields) {
|
Measurement(Map<String, String> tags, Map<String, Object> fields) {
|
||||||
|
this.timestamp = Instant.now();
|
||||||
|
this.tags = tags;
|
||||||
|
this.fields = fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
Measurement(Instant timestamp, Map<String, String> tags, Map<String, Object> fields) {
|
||||||
|
this.timestamp = timestamp;
|
||||||
this.tags = tags;
|
this.tags = tags;
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package biz.nellemann.hmci;
|
||||||
|
|
||||||
import biz.nellemann.hmci.dto.json.ProcessedMetrics;
|
import biz.nellemann.hmci.dto.json.ProcessedMetrics;
|
||||||
import biz.nellemann.hmci.dto.json.SystemUtil;
|
import biz.nellemann.hmci.dto.json.SystemUtil;
|
||||||
|
import biz.nellemann.hmci.dto.json.UtilSample;
|
||||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -10,14 +11,20 @@ import org.slf4j.LoggerFactory;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.time.format.DateTimeParseException;
|
import java.time.format.DateTimeParseException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
public class Resource {
|
public abstract class Resource {
|
||||||
|
|
||||||
private final static Logger log = LoggerFactory.getLogger(Resource.class);
|
private final static Logger log = LoggerFactory.getLogger(Resource.class);
|
||||||
|
|
||||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
private final ArrayList<String> sampleHistory = new ArrayList<>();
|
||||||
|
|
||||||
protected SystemUtil metric;
|
protected SystemUtil metric;
|
||||||
|
protected final int maxNumberOfSamples = 120;
|
||||||
|
protected final int minNumberOfSamples = 5;
|
||||||
|
protected int currentNumberOfSamples = 15;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Resource() {
|
Resource() {
|
||||||
|
@ -35,6 +42,7 @@ public class Resource {
|
||||||
try {
|
try {
|
||||||
ProcessedMetrics processedMetrics = objectMapper.readValue(json, ProcessedMetrics.class);
|
ProcessedMetrics processedMetrics = objectMapper.readValue(json, ProcessedMetrics.class);
|
||||||
metric = processedMetrics.systemUtil;
|
metric = processedMetrics.systemUtil;
|
||||||
|
log.trace("deserialize() - samples: {}", metric.samples.size());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("deserialize() - error: {}", e.getMessage());
|
log.error("deserialize() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -61,4 +69,66 @@ public class Resource {
|
||||||
return instant;
|
return instant;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Instant getTimestamp(int sampleNumber) {
|
||||||
|
Instant instant = Instant.now();
|
||||||
|
|
||||||
|
if (metric == null) {
|
||||||
|
return instant;
|
||||||
|
}
|
||||||
|
|
||||||
|
String timestamp = metric.getSample(sampleNumber).sampleInfo.timestamp;
|
||||||
|
try {
|
||||||
|
log.trace("getTimeStamp() - PMC Timestamp: {}", timestamp);
|
||||||
|
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[XXX][X]");
|
||||||
|
instant = Instant.from(dateTimeFormatter.parse(timestamp));
|
||||||
|
log.trace("getTimestamp() - Instant: {}", instant.toString());
|
||||||
|
} catch(DateTimeParseException e) {
|
||||||
|
log.warn("getTimestamp() - parse error: {}", timestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
return instant;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void process() {
|
||||||
|
|
||||||
|
if(metric == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int samples = metric.samples.size();
|
||||||
|
//log.info("process() - Samples to process: {}, Samples in History: {}, Current Counter: {}", samples, sampleHistory.size(), currentNumberOfSamples);
|
||||||
|
for(int i = 0; i<samples; i++) {
|
||||||
|
UtilSample sample = metric.getSample(i);
|
||||||
|
String timestamp = sample.getInfo().timestamp;
|
||||||
|
|
||||||
|
if(sampleHistory.contains(timestamp)) {
|
||||||
|
//log.info("process() - Sample \"{}\" already processed", timestamp);
|
||||||
|
continue; // Already processed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process
|
||||||
|
//log.info("process() - Sample: {}", timestamp);
|
||||||
|
process(i);
|
||||||
|
|
||||||
|
// Add to end of history
|
||||||
|
sampleHistory.add(timestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove old elements from history
|
||||||
|
for(int n = currentNumberOfSamples; n < sampleHistory.size(); n++) {
|
||||||
|
//log.info("process() - Removing element no. {} from sampleHistory: {}", n, sampleHistory.get(0));
|
||||||
|
sampleHistory.remove(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slowly decrease until we reach minSamples
|
||||||
|
if(currentNumberOfSamples > minNumberOfSamples) {
|
||||||
|
currentNumberOfSamples--;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract void process(int sample);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,9 +29,9 @@ public class RestClient {
|
||||||
protected OkHttpClient httpClient;
|
protected OkHttpClient httpClient;
|
||||||
|
|
||||||
// OkHttpClient timeouts
|
// OkHttpClient timeouts
|
||||||
private final static int CONNECT_TIMEOUT = 30;
|
private final static int CONNECT_TIMEOUT_SEC = 10;
|
||||||
private final static int WRITE_TIMEOUT = 30;
|
private final static int WRITE_TIMEOUT_SEC = 30;
|
||||||
private final static int READ_TIMEOUT = 180;
|
private final static int READ_TIMEOUT_SEC = 180;
|
||||||
|
|
||||||
protected String authToken;
|
protected String authToken;
|
||||||
protected final String baseUrl;
|
protected final String baseUrl;
|
||||||
|
@ -48,6 +48,21 @@ public class RestClient {
|
||||||
} else {
|
} else {
|
||||||
this.httpClient = getSafeOkHttpClient();
|
this.httpClient = getSafeOkHttpClient();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
if(configuration.trace != null) {
|
||||||
|
try {
|
||||||
|
File traceDir = new File(configuration.trace);
|
||||||
|
traceDir.mkdirs();
|
||||||
|
if(traceDir.canWrite()) {
|
||||||
|
Boolean doTrace = true;
|
||||||
|
} else {
|
||||||
|
log.warn("ManagementConsole() - can't write to trace dir: " + traceDir.toString());
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("ManagementConsole() - trace error: " + e.getMessage());
|
||||||
|
}
|
||||||
|
}*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -150,7 +165,7 @@ public class RestClient {
|
||||||
*/
|
*/
|
||||||
public synchronized String getRequest(URL url) throws IOException {
|
public synchronized String getRequest(URL url) throws IOException {
|
||||||
|
|
||||||
log.trace("getRequest() - URL: {}", url.toString());
|
log.debug("getRequest() - URL: {}", url.toString());
|
||||||
|
|
||||||
Request request = new Request.Builder()
|
Request request = new Request.Builder()
|
||||||
.url(url)
|
.url(url)
|
||||||
|
@ -196,7 +211,7 @@ public class RestClient {
|
||||||
String responseBody = null;
|
String responseBody = null;
|
||||||
try (Response responseRetry = httpClient.newCall(request).execute()) {
|
try (Response responseRetry = httpClient.newCall(request).execute()) {
|
||||||
if(responseRetry.isSuccessful()) {
|
if(responseRetry.isSuccessful()) {
|
||||||
responseBody = responseRetry.body().string();
|
responseBody = Objects.requireNonNull(responseRetry.body()).string();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return responseBody;
|
return responseBody;
|
||||||
|
@ -205,10 +220,6 @@ public class RestClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send a POST request with a payload (can be null) to the HMC
|
* Send a POST request with a payload (can be null) to the HMC
|
||||||
* @param url
|
|
||||||
* @param payload
|
|
||||||
* @return
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public synchronized String postRequest(URL url, String payload) throws IOException {
|
public synchronized String postRequest(URL url, String payload) throws IOException {
|
||||||
|
|
||||||
|
@ -276,9 +287,9 @@ public class RestClient {
|
||||||
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
||||||
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
|
builder.sslSocketFactory(sslSocketFactory, (X509TrustManager)trustAllCerts[0]);
|
||||||
builder.hostnameVerifier((hostname, session) -> true);
|
builder.hostnameVerifier((hostname, session) -> true);
|
||||||
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
|
builder.connectTimeout(CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
|
builder.writeTimeout(WRITE_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
|
builder.readTimeout(READ_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
|
|
||||||
return builder.build();
|
return builder.build();
|
||||||
} catch (KeyManagementException | NoSuchAlgorithmException e) {
|
} catch (KeyManagementException | NoSuchAlgorithmException e) {
|
||||||
|
@ -293,11 +304,28 @@ public class RestClient {
|
||||||
*/
|
*/
|
||||||
private static OkHttpClient getSafeOkHttpClient() {
|
private static OkHttpClient getSafeOkHttpClient() {
|
||||||
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
OkHttpClient.Builder builder = new OkHttpClient.Builder();
|
||||||
builder.connectTimeout(CONNECT_TIMEOUT, TimeUnit.SECONDS);
|
builder.connectTimeout(CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
builder.writeTimeout(WRITE_TIMEOUT, TimeUnit.SECONDS);
|
builder.writeTimeout(WRITE_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
builder.readTimeout(READ_TIMEOUT, TimeUnit.SECONDS);
|
builder.readTimeout(READ_TIMEOUT_SEC, TimeUnit.SECONDS);
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
private void writeTraceFile(String id, String json) {
|
||||||
|
|
||||||
|
String fileName = String.format("%s-%s.json", id, Instant.now().toString());
|
||||||
|
try {
|
||||||
|
log.debug("Writing trace file: " + fileName);
|
||||||
|
File traceFile = new File(traceDir, fileName);
|
||||||
|
BufferedWriter writer = new BufferedWriter(new FileWriter(traceFile));
|
||||||
|
writer.write(json);
|
||||||
|
writer.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.warn("writeTraceFile() - " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,15 +15,17 @@ class SystemEnergy extends Resource {
|
||||||
private final static Logger log = LoggerFactory.getLogger(SystemEnergy.class);
|
private final static Logger log = LoggerFactory.getLogger(SystemEnergy.class);
|
||||||
|
|
||||||
private final RestClient restClient;
|
private final RestClient restClient;
|
||||||
|
private final InfluxClient influxClient;
|
||||||
private final ManagedSystem managedSystem;
|
private final ManagedSystem managedSystem;
|
||||||
|
|
||||||
protected String id;
|
protected String id;
|
||||||
protected String name;
|
protected String name;
|
||||||
|
|
||||||
|
|
||||||
public SystemEnergy(RestClient restClient, ManagedSystem managedSystem) {
|
public SystemEnergy(RestClient restClient, InfluxClient influxClient, ManagedSystem managedSystem) {
|
||||||
log.debug("SystemEnergy()");
|
log.debug("SystemEnergy()");
|
||||||
this.restClient = restClient;
|
this.restClient = restClient;
|
||||||
|
this.influxClient = influxClient;
|
||||||
this.managedSystem = managedSystem;
|
this.managedSystem = managedSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +34,7 @@ class SystemEnergy extends Resource {
|
||||||
|
|
||||||
log.debug("refresh()");
|
log.debug("refresh()");
|
||||||
try {
|
try {
|
||||||
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?Type=Energy&NoOfSamples=1", managedSystem.id));
|
String xml = restClient.getRequest(String.format("/rest/api/pcm/ManagedSystem/%s/ProcessedMetrics?Type=Energy&NoOfSamples=%d", managedSystem.id, currentNumberOfSamples));
|
||||||
|
|
||||||
// Do not try to parse empty response
|
// Do not try to parse empty response
|
||||||
if(xml == null || xml.length() <= 1) {
|
if(xml == null || xml.length() <= 1) {
|
||||||
|
@ -66,8 +68,17 @@ class SystemEnergy extends Resource {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void process(int sample) {
|
||||||
|
if(metric != null) {
|
||||||
|
log.debug("process() - sample: {}", sample);
|
||||||
|
influxClient.write(getPowerMetrics(sample), "server_energy_power");
|
||||||
|
influxClient.write(getThermalMetrics(sample), "server_energy_thermal");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
List<Measurement> getPowerMetrics() {
|
|
||||||
|
List<Measurement> getPowerMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
|
@ -77,10 +88,10 @@ class SystemEnergy extends Resource {
|
||||||
tagsMap.put("servername", managedSystem.name);
|
tagsMap.put("servername", managedSystem.name);
|
||||||
log.trace("getPowerMetrics() - tags: {}", tagsMap);
|
log.trace("getPowerMetrics() - tags: {}", tagsMap);
|
||||||
|
|
||||||
fieldsMap.put("powerReading", metric.getSample().energyUtil.powerUtil.powerReading);
|
fieldsMap.put("powerReading", metric.getSample(sample).energyUtil.powerUtil.powerReading);
|
||||||
log.trace("getPowerMetrics() - fields: {}", fieldsMap);
|
log.trace("getPowerMetrics() - fields: {}", fieldsMap);
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getPowerMetrics() - error: {}", e.getMessage());
|
log.warn("getPowerMetrics() - error: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -89,7 +100,7 @@ class SystemEnergy extends Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
List<Measurement> getThermalMetrics() {
|
List<Measurement> getThermalMetrics(int sample) {
|
||||||
|
|
||||||
List<Measurement> list = new ArrayList<>();
|
List<Measurement> list = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
|
@ -99,11 +110,11 @@ class SystemEnergy extends Resource {
|
||||||
tagsMap.put("servername", managedSystem.name);
|
tagsMap.put("servername", managedSystem.name);
|
||||||
log.trace("getThermalMetrics() - tags: {}", tagsMap);
|
log.trace("getThermalMetrics() - tags: {}", tagsMap);
|
||||||
|
|
||||||
metric.getSample().energyUtil.thermalUtil.cpuTemperatures.forEach((t) -> {
|
metric.getSample(sample).energyUtil.thermalUtil.cpuTemperatures.forEach((t) -> {
|
||||||
fieldsMap.put("cpuTemperature_" + t.entityInstance, t.temperatureReading);
|
fieldsMap.put("cpuTemperature_" + t.entityInstance, t.temperatureReading);
|
||||||
});
|
});
|
||||||
|
|
||||||
metric.getSample().energyUtil.thermalUtil.inletTemperatures.forEach((t) -> {
|
metric.getSample(sample).energyUtil.thermalUtil.inletTemperatures.forEach((t) -> {
|
||||||
fieldsMap.put("inletTemperature_" + t.entityInstance, t.temperatureReading);
|
fieldsMap.put("inletTemperature_" + t.entityInstance, t.temperatureReading);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -114,7 +125,7 @@ class SystemEnergy extends Resource {
|
||||||
log.trace("getThermalMetrics() - fields: {}", fieldsMap);
|
log.trace("getThermalMetrics() - fields: {}", fieldsMap);
|
||||||
|
|
||||||
|
|
||||||
list.add(new Measurement(tagsMap, fieldsMap));
|
list.add(new Measurement(getTimestamp(sample), tagsMap, fieldsMap));
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("getThermalMetrics() - error: {}", e.getMessage());
|
log.warn("getThermalMetrics() - error: {}", e.getMessage());
|
||||||
|
|
|
@ -8,12 +8,12 @@ import java.util.List;
|
||||||
public final class SampleInfo {
|
public final class SampleInfo {
|
||||||
|
|
||||||
@JsonProperty("timeStamp")
|
@JsonProperty("timeStamp")
|
||||||
public String timestamp ;
|
public String timestamp;
|
||||||
public String getTimeStamp() {
|
public String getTimeStamp() {
|
||||||
return timestamp;
|
return timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer status ;
|
public Integer status;
|
||||||
|
|
||||||
@JsonProperty("errorInfo")
|
@JsonProperty("errorInfo")
|
||||||
public List<ErrorInfo> errors;
|
public List<ErrorInfo> errors;
|
||||||
|
|
|
@ -17,6 +17,10 @@ public final class SystemUtil {
|
||||||
@JsonProperty("utilSamples")
|
@JsonProperty("utilSamples")
|
||||||
public List<UtilSample> samples;
|
public List<UtilSample> samples;
|
||||||
|
|
||||||
|
public UtilSample getSample(int n) {
|
||||||
|
return samples.size() > n ? samples.get(n) : new UtilSample();
|
||||||
|
}
|
||||||
|
|
||||||
public UtilSample getSample() {
|
public UtilSample getSample() {
|
||||||
return samples.size() > 0 ? samples.get(0) : new UtilSample();
|
return samples.size() > 0 ? samples.get(0) : new UtilSample();
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ public class HmcConfiguration {
|
||||||
public String username;
|
public String username;
|
||||||
public String password;
|
public String password;
|
||||||
|
|
||||||
public Integer refresh = 30;
|
public Integer refresh = 60;
|
||||||
public Integer discover = 120;
|
public Integer discover = 120;
|
||||||
|
|
||||||
public String trace;
|
public String trace;
|
||||||
|
|
|
@ -19,6 +19,9 @@ class LogicalPartitionTest extends Specification {
|
||||||
@Shared
|
@Shared
|
||||||
private RestClient serviceClient
|
private RestClient serviceClient
|
||||||
|
|
||||||
|
@Shared
|
||||||
|
private InfluxClient influxClient
|
||||||
|
|
||||||
@Shared
|
@Shared
|
||||||
private ManagedSystem managedSystem
|
private ManagedSystem managedSystem
|
||||||
|
|
||||||
|
@ -39,7 +42,7 @@ class LogicalPartitionTest extends Specification {
|
||||||
MockResponses.prepareClientResponseForLogicalPartition(mockServer)
|
MockResponses.prepareClientResponseForLogicalPartition(mockServer)
|
||||||
serviceClient.login()
|
serviceClient.login()
|
||||||
|
|
||||||
managedSystem = new ManagedSystem(serviceClient, String.format("%s/rest/api/uom/ManagementConsole/2c6b6620-e3e3-3294-aaf5-38e546ff672b/ManagedSystem/b597e4da-2aab-3f52-8616-341d62153559", serviceClient.baseUrl));
|
managedSystem = new ManagedSystem(serviceClient, influxClient, String.format("%s/rest/api/uom/ManagementConsole/2c6b6620-e3e3-3294-aaf5-38e546ff672b/ManagedSystem/b597e4da-2aab-3f52-8616-341d62153559", serviceClient.baseUrl));
|
||||||
managedSystem.discover()
|
managedSystem.discover()
|
||||||
|
|
||||||
logicalPartition = managedSystem.logicalPartitions.first()
|
logicalPartition = managedSystem.logicalPartitions.first()
|
||||||
|
@ -77,6 +80,7 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
then:
|
then:
|
||||||
logicalPartition.metric != null
|
logicalPartition.metric != null
|
||||||
|
logicalPartition.metric.samples.size() == 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -85,9 +89,9 @@ class LogicalPartitionTest extends Specification {
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
|
|
||||||
then:
|
then:
|
||||||
logicalPartition.metric.getSample().lparsUtil.memory.logicalMem == 8192.000
|
logicalPartition.metric.getSample().lparsUtil.memory.logicalMem == 16384.000
|
||||||
logicalPartition.metric.getSample().lparsUtil.processor.utilizedProcUnits == 0.001
|
logicalPartition.metric.getSample().lparsUtil.processor.utilizedProcUnits == 0.00793
|
||||||
logicalPartition.metric.getSample().lparsUtil.network.virtualEthernetAdapters.first().receivedBytes == 276.467
|
logicalPartition.metric.getSample().lparsUtil.network.virtualEthernetAdapters.first().receivedBytes == 54.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,15 +99,13 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getDetails()
|
List<Measurement> listOfMeasurements = logicalPartition.getDetails(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMeasurements.first().fields['affinityScore'] == 100.0
|
listOfMeasurements.first().fields['affinityScore'] == 100.0
|
||||||
listOfMeasurements.first().fields['osType'] == 'Linux'
|
listOfMeasurements.first().fields['osType'] == 'IBM i'
|
||||||
listOfMeasurements.first().fields['type'] == 'AIX/Linux'
|
listOfMeasurements.first().fields['type'] == 'IBMi'
|
||||||
listOfMeasurements.first().tags['lparname'] == 'rhel8-ocp-helper'
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,11 +113,11 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getMemoryMetrics()
|
List<Measurement> listOfMeasurements = logicalPartition.getMemoryMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMeasurements.first().fields['logicalMem'] == 8192.000
|
listOfMeasurements.first().fields['logicalMem'] == 16384.0
|
||||||
listOfMeasurements.first().tags['lparname'] == 'rhel8-ocp-helper'
|
listOfMeasurements.first().tags['lparname'] == 'rhel8-ocp-helper'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -125,11 +127,11 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getProcessorMetrics()
|
List<Measurement> listOfMeasurements = logicalPartition.getProcessorMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMeasurements.first().fields['utilizedProcUnits'] == 0.001
|
listOfMeasurements.first().fields['utilizedProcUnits'] == 0.00793
|
||||||
listOfMeasurements.first().tags['lparname'] == 'rhel8-ocp-helper'
|
listOfMeasurements.first().tags['lparname'] == 'rhel8-ocp-helper'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -139,12 +141,12 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getVirtualEthernetAdapterMetrics()
|
List<Measurement> listOfMeasurements = logicalPartition.getVirtualEthernetAdapterMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
listOfMeasurements.first().fields['receivedBytes'] == 276.467
|
listOfMeasurements.first().fields['receivedBytes'] == 54.0
|
||||||
listOfMeasurements.first().tags['location'] == 'U9009.42A.21F64EV-V13-C32'
|
listOfMeasurements.first().tags['location'] == 'U9009.42A.21F64EV-V11-C7'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -152,25 +154,13 @@ class LogicalPartitionTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getVirtualFibreChannelAdapterMetrics()
|
List<Measurement> listOfMeasurements = logicalPartition.getVirtualFibreChannelAdapterMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 4
|
listOfMeasurements.size() == 2
|
||||||
listOfMeasurements.first().fields['writeBytes'] == 6690.133
|
listOfMeasurements.first().fields['writeBytes'] == 4454.4
|
||||||
listOfMeasurements.first().tags['viosId'] == '1'
|
listOfMeasurements.first().tags['viosId'] == '1'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void "test getVirtualGenericAdapterMetrics"() {
|
|
||||||
|
|
||||||
when:
|
|
||||||
logicalPartition.deserialize(metricsFile.getText('UTF-8'))
|
|
||||||
List<Measurement> listOfMeasurements = logicalPartition.getVirtualGenericAdapterMetrics()
|
|
||||||
|
|
||||||
then:
|
|
||||||
listOfMeasurements.size() == 1
|
|
||||||
listOfMeasurements.first().fields['readBytes'] == 0.0
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,9 @@ class ManagedSystemTest extends Specification {
|
||||||
@Shared
|
@Shared
|
||||||
private RestClient serviceClient
|
private RestClient serviceClient
|
||||||
|
|
||||||
|
@Shared
|
||||||
|
private InfluxClient influxClient
|
||||||
|
|
||||||
@Shared
|
@Shared
|
||||||
private ManagedSystem managedSystem
|
private ManagedSystem managedSystem
|
||||||
|
|
||||||
|
@ -33,7 +36,7 @@ class ManagedSystemTest extends Specification {
|
||||||
MockResponses.prepareClientResponseForVirtualIOServer(mockServer)
|
MockResponses.prepareClientResponseForVirtualIOServer(mockServer)
|
||||||
MockResponses.prepareClientResponseForLogicalPartition(mockServer)
|
MockResponses.prepareClientResponseForLogicalPartition(mockServer)
|
||||||
serviceClient.login()
|
serviceClient.login()
|
||||||
managedSystem = new ManagedSystem(serviceClient, String.format("%s/rest/api/uom/ManagementConsole/2c6b6620-e3e3-3294-aaf5-38e546ff672b/ManagedSystem/b597e4da-2aab-3f52-8616-341d62153559", serviceClient.baseUrl));
|
managedSystem = new ManagedSystem(serviceClient, influxClient, String.format("%s/rest/api/uom/ManagementConsole/2c6b6620-e3e3-3294-aaf5-38e546ff672b/ManagedSystem/b597e4da-2aab-3f52-8616-341d62153559", serviceClient.baseUrl));
|
||||||
managedSystem.discover()
|
managedSystem.discover()
|
||||||
metricsFile = new File(getClass().getResource('/2-managed-system-perf-data2.json').toURI())
|
metricsFile = new File(getClass().getResource('/2-managed-system-perf-data2.json').toURI())
|
||||||
}
|
}
|
||||||
|
@ -55,7 +58,7 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = managedSystem.getDetails()
|
List<Measurement> listOfMeasurements = managedSystem.getDetails(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
|
@ -68,7 +71,7 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = managedSystem.getMemoryMetrics()
|
List<Measurement> listOfMeasurements = managedSystem.getMemoryMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
|
@ -79,7 +82,7 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = managedSystem.getProcessorMetrics()
|
List<Measurement> listOfMeasurements = managedSystem.getProcessorMetrics(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
|
@ -90,7 +93,7 @@ class ManagedSystemTest extends Specification {
|
||||||
|
|
||||||
when:
|
when:
|
||||||
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = managedSystem.getSharedProcessorPools()
|
List<Measurement> listOfMeasurements = managedSystem.getSharedProcessorPools(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 4
|
listOfMeasurements.size() == 4
|
||||||
|
@ -100,7 +103,7 @@ class ManagedSystemTest extends Specification {
|
||||||
void "test getPhysicalProcessorPool"() {
|
void "test getPhysicalProcessorPool"() {
|
||||||
when:
|
when:
|
||||||
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
managedSystem.deserialize(metricsFile.getText('UTF-8'))
|
||||||
List<Measurement> listOfMeasurements = managedSystem.getPhysicalProcessorPool()
|
List<Measurement> listOfMeasurements = managedSystem.getPhysicalProcessorPool(0)
|
||||||
|
|
||||||
then:
|
then:
|
||||||
listOfMeasurements.size() == 1
|
listOfMeasurements.size() == 1
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue