Cleanup.
This commit is contained in:
parent
6ce3e0252d
commit
1a77edfe81
|
@ -1,6 +1,6 @@
|
|||
version=0.0.11
|
||||
version=0.0.12
|
||||
pf4jVersion=3.6.0
|
||||
slf4jVersion=1.7.32
|
||||
camelVersion=3.11.1
|
||||
camelVersion=3.11.2
|
||||
picocliVersion=4.6.1
|
||||
oshiVersion=5.8.2
|
|
@ -77,7 +77,7 @@ public class AixProcessorExtension implements MetricExtension {
|
|||
HashMap<String, String> tagsMap = null;
|
||||
HashMap<String, Object> fieldsMap = null;
|
||||
|
||||
try (InputStream buf = PluginHelper.executeCommand("lparstat 5 1")) {
|
||||
try (InputStream buf = PluginHelper.executeCommand("lparstat 3 1")) {
|
||||
AixProcessorStat processorStat = processCommandOutput(buf);
|
||||
tagsMap = processorStat.getTags();
|
||||
fieldsMap = processorStat.getFields();
|
||||
|
@ -93,5 +93,4 @@ public class AixProcessorExtension implements MetricExtension {
|
|||
return new AixProcessorStat(input);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -17,13 +17,13 @@ public class AixProcessorStat {
|
|||
private static final Logger log = LoggerFactory.getLogger(AixProcessorStat.class);
|
||||
|
||||
// System configuration: type=Shared mode=Uncapped smt=8 lcpu=8 mem=4096MB psize=19 ent=0.50
|
||||
private final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
|
||||
private static final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
|
||||
|
||||
// System configuration: type=Dedicated mode=Donating smt=8 lcpu=16 mem=4096MB
|
||||
private final Pattern patternAixDedicated = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB");
|
||||
private static final Pattern patternAixDedicated = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB");
|
||||
|
||||
// type=Shared mode=Uncapped smt=8 lcpu=4 mem=4101120 kB cpus=24 ent=4.00
|
||||
private final Pattern patternLinux = Pattern.compile("^type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+) kB cpus=(\\d+) ent=(\\d+\\.?\\d*)");
|
||||
private static final Pattern patternLinux = Pattern.compile("^type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+) kB cpus=(\\d+) ent=(\\d+\\.?\\d*)");
|
||||
|
||||
|
||||
private String type;
|
||||
|
@ -144,18 +144,18 @@ public class AixProcessorStat {
|
|||
}
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
HashMap<String, Object> fields = new HashMap<>();
|
||||
fields.put("lcpu", lcpu);
|
||||
fields.put("ent", ent);
|
||||
fields.put("user", user);
|
||||
fields.put("sys", sys);
|
||||
fields.put("idle", idle);
|
||||
fields.put("wait", wait);
|
||||
fields.put("physc", physc);
|
||||
fields.put("entc", entc);
|
||||
fields.put("lbusy", lbusy);
|
||||
fields.put("mode", mode);
|
||||
fields.put("type", type);
|
||||
return fields;
|
||||
return new HashMap<String, Object>() {{
|
||||
put("lcpu", lcpu);
|
||||
put("ent", ent);
|
||||
put("user", user);
|
||||
put("sys", sys);
|
||||
put("idle", idle);
|
||||
put("wait", wait);
|
||||
put("physc", physc);
|
||||
put("entc", entc);
|
||||
put("lbusy", lbusy);
|
||||
put("mode", mode);
|
||||
put("type", type);
|
||||
}};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class BaseDiskExtension implements MetricExtension {
|
|||
for(HWDiskStore store : diskStores) {
|
||||
|
||||
String name = store.getName();
|
||||
if (name.matches("hdisk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]") || name.matches("/dev/nvme[0-9]n[0-9]")) {
|
||||
if (name.matches("h?disk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]") || name.matches("/dev/nvme[0-9]n[0-9]")) {
|
||||
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("name", name);
|
||||
|
@ -91,7 +91,6 @@ public class BaseDiskExtension implements MetricExtension {
|
|||
}
|
||||
|
||||
return new MetricResult(name, measurementList);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -93,7 +93,7 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
if(!includeList.contains(name)) {
|
||||
continue;
|
||||
}
|
||||
log.debug("pid: " + p.getProcessID() + ", name: " + name + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
|
||||
log.info("pid: " + p.getProcessID() + ", name: " + name + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
|
||||
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("pid", String.valueOf(p.getProcessID()));
|
||||
|
|
|
@ -74,8 +74,8 @@ public class LinuxNetstatExtension implements MetricExtension {
|
|||
@Override
|
||||
public MetricResult getMetrics() throws Exception {
|
||||
|
||||
HashMap<String, String> tagsMap = null;
|
||||
HashMap<String, Object> fieldsMap = null;
|
||||
HashMap<String, String> tagsMap;
|
||||
HashMap<String, Object> fieldsMap;
|
||||
|
||||
try (InputStream inputStream = PluginHelper.executeCommand("netstat -s")) {
|
||||
LinuxNetstatParser parser = processCommandOutput(inputStream);
|
||||
|
@ -83,7 +83,6 @@ public class LinuxNetstatExtension implements MetricExtension {
|
|||
fieldsMap = parser.getFields();
|
||||
}
|
||||
|
||||
log.debug(fieldsMap.toString());
|
||||
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
|
||||
}
|
||||
|
||||
|
|
|
@ -144,20 +144,17 @@ public class LinuxNetstatParser {
|
|||
}
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
HashMap<String, Object> fields = new HashMap<>();
|
||||
fields.put("ip_forwarded", ipForwarded);
|
||||
fields.put("ip_received", ipTotalPacketsReceived);
|
||||
fields.put("ip_dropped", ipOutgoingPacketsDropped);
|
||||
fields.put("ip_discarded", ipIncomingPacketsDiscarded);
|
||||
|
||||
fields.put("tcp_connections", tcpConnectionsEstablished);
|
||||
fields.put("tcp_pkts_recv", tcpSegmentsReceived);
|
||||
fields.put("tcp_pkts_sent", tcpSegmentsSent);
|
||||
|
||||
fields.put("udp_pkts_recv", udpPacketsReceived);
|
||||
fields.put("udp_pkts_sent", udpPacketsSent);
|
||||
|
||||
return fields;
|
||||
return new HashMap<String, Object>() {{
|
||||
put("ip_forwarded", ipForwarded);
|
||||
put("ip_received", ipTotalPacketsReceived);
|
||||
put("ip_dropped", ipOutgoingPacketsDropped);
|
||||
put("ip_discarded", ipIncomingPacketsDiscarded);
|
||||
put("tcp_connections", tcpConnectionsEstablished);
|
||||
put("tcp_pkts_recv", tcpSegmentsReceived);
|
||||
put("tcp_pkts_sent", tcpSegmentsSent);
|
||||
put("udp_pkts_recv", udpPacketsReceived);
|
||||
put("udp_pkts_sent", udpPacketsSent);
|
||||
}};
|
||||
}
|
||||
|
||||
private Long getFirstLong(String line) {
|
||||
|
|
|
@ -81,16 +81,16 @@ public class LinuxNetworkSockStat {
|
|||
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
HashMap<String, Object> fields = new HashMap<>();
|
||||
fields.put("sockets", sockets);
|
||||
fields.put("tcp_inuse", tcp_inuse);
|
||||
fields.put("tcp_alloc", tcp_alloc);
|
||||
fields.put("tcp_orphan", tcp_orphan);
|
||||
fields.put("tcp_mem", tcp_mem);
|
||||
fields.put("tcp_tw", tcp_tw);
|
||||
fields.put("udp_inuse", udp_inuse);
|
||||
fields.put("udp_mem", udp_mem);
|
||||
return fields;
|
||||
return new HashMap<String, Object>() {{
|
||||
put("sockets", sockets);
|
||||
put("tcp_inuse", tcp_inuse);
|
||||
put("tcp_alloc", tcp_alloc);
|
||||
put("tcp_orphan", tcp_orphan);
|
||||
put("tcp_mem", tcp_mem);
|
||||
put("tcp_tw", tcp_tw);
|
||||
put("udp_inuse", udp_inuse);
|
||||
put("udp_mem", udp_mem);
|
||||
}};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -72,9 +72,7 @@ public class LinuxSockstatExtension implements MetricExtension {
|
|||
HashMap<String, String> tagsMap = sockStat.getTags();
|
||||
HashMap<String, Object> fieldsMap = sockStat.getFields();
|
||||
|
||||
log.debug(fieldsMap.toString());
|
||||
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
|
||||
|
||||
}
|
||||
|
||||
protected LinuxNetworkSockStat processSockOutput(List<String> inputLines) {
|
||||
|
|
|
@ -47,7 +47,7 @@ public class ServerRouteBuilder extends RouteBuilder {
|
|||
.process(new MetricResultToPointProcessor(dbname))
|
||||
.toF("influxdb://ref.myInfluxConnection?batch=true") //&retentionPolicy=autogen
|
||||
.doCatch(Exception.class)
|
||||
.log("Error storing metric to InfluxDB: ${exception}")
|
||||
.log(LoggingLevel.WARN, "Error: ${exception}")
|
||||
.end();
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue