Merged in network (pull request #1)

Network
This commit is contained in:
Mark Nellemann 2021-05-28 07:15:55 +00:00
commit f07f399b94
27 changed files with 517 additions and 49 deletions

View file

@ -15,6 +15,7 @@ subprojects {
dependencies {
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation "org.slf4j:slf4j-api:${slf4jVersion}"
testImplementation "org.slf4j:slf4j-simple:${slf4jVersion}"
implementation "org.slf4j:slf4j-api:${slf4jVersion}"
implementation "org.slf4j:slf4j-simple:${slf4jVersion}"

View file

@ -44,6 +44,26 @@ tasks.named('test') {
useJUnitPlatform()
}
jar {
manifest {
attributes(
'Created-By' : "Gradle ${gradle.gradleVersion}",
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}",
'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})",
'Build-User' : System.properties['user.name'],
'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build),
'Build-Revision' : versioning.info.commit,
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
)
}
}
shadowJar {
archiveBaseName.set(projectName)
archiveClassifier.set('')
archiveVersion.set('')
mergeServiceFiles() // Tell plugin to merge duplicate service files
}
apply plugin: 'nebula.ospackage'
ospackage {
@ -86,24 +106,3 @@ task buildRpmAix(type: Rpm) {
packageName = "${projectName}-AIX"
os = Os.AIX
}
jar {
manifest {
attributes(
'Created-By' : "Gradle ${gradle.gradleVersion}",
'Build-OS' : "${System.properties['os.name']} ${System.properties['os.arch']} ${System.properties['os.version']}",
'Build-Jdk' : "${System.properties['java.version']} (${System.properties['java.vendor']} ${System.properties['java.vm.version']})",
'Build-User' : System.properties['user.name'],
'Build-Version' : versioning.info.tag ?: (versioning.info.branch + "-" + versioning.info.build),
'Build-Revision' : versioning.info.commit,
'Build-Timestamp': new Date().format("yyyy-MM-dd'T'HH:mm:ss.SSSZ").toString(),
)
}
}
shadowJar {
archiveBaseName.set(projectName)
archiveClassifier.set('')
archiveVersion.set('')
mergeServiceFiles() // Tell plugin to merge duplicate service files
}

View file

@ -56,6 +56,8 @@ public class ClientRouteBuilder extends RouteBuilder {
.stop()
.otherwise()
.to("seda:metrics");
} else {
log.info(">>> Skipping extension: " + ext.getDescription());
}
}

View file

@ -15,13 +15,14 @@ subprojects {
testImplementation project(':shared')
implementation project(':shared')
implementation "org.slf4j:slf4j-api:${slf4jVersion}"
implementation(group: 'org.pf4j', name: 'pf4j', version: "${pf4jVersion}") {
exclude(group: "org.slf4j")
}
annotationProcessor(group: 'org.pf4j', name: 'pf4j', version: "${pf4jVersion}")
implementation "org.slf4j:slf4j-api:${slf4jVersion}"
}
/*
jar {
manifest {
attributes(
@ -32,14 +33,43 @@ subprojects {
'Plugin-Description': "${pluginDescription}"
)
}
}*/
task uberJar(type: Jar) {
from sourceSets.main.output
dependsOn configurations.runtimeClasspath
from {
configurations.runtimeClasspath.findAll { it.name.endsWith('jar') }.collect {
zipTree(it).matching {
exclude 'org/pf4j/**'
exclude 'org/slf4j/**'
exclude 'sysmon/shared/**'
exclude 'META-INF/AL2.0'
exclude 'META-INF/LGPL2.1'
exclude 'META-INF/LICENSE'
}
}
}
manifest {
attributes(
'Plugin-Id' : "${pluginId}",
'Plugin-Class' : "${pluginClass}",
'Plugin-Version' : "${pluginVersion}",
'Plugin-Provider' : "${pluginProvider}",
'Plugin-Description': "${pluginDescription}"
)
}
}
task copyJar(type: Copy, dependsOn:[jar]) {
from jar // here it automatically reads jar file produced from jar task
task copyJar(type: Copy, dependsOn: ['uberJar']) {
from jar
into "../output/"
}
tasks.build.dependsOn {
uberJar
copyJar
}

View file

@ -0,0 +1,6 @@
dependencies {
implementation(group: 'com.github.oshi', name: 'oshi-core', version: "5.7.3") {
exclude(group: "org.slf4j")
}
}

View file

@ -3,6 +3,8 @@ package sysmon.plugins.os_aix;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
@ -11,11 +13,15 @@ import sysmon.shared.PluginHelper;
import java.util.List;
import java.util.Map;
@Extension
public class AixDiskExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixProcessorExtension.class);
private final SystemInfo systemInfo;
private final HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isSupported() {
@ -32,6 +38,14 @@ public class AixDiskExtension implements MetricExtension {
return true;
}
public AixDiskExtension() {
systemInfo = new SystemInfo();
hardwareAbstractionLayer = systemInfo.getHardware();
log.warn(systemInfo.getOperatingSystem().toString());
}
@Override
public String getName() {
return "aix-disk";
@ -50,6 +64,15 @@ public class AixDiskExtension implements MetricExtension {
@Override
public MetricResult getMetrics() {
long writeBytes = hardwareAbstractionLayer.getDiskStores().get(0).getWriteBytes();
log.warn(String.format("Disk 0 - Write Bytes: %d", writeBytes));
long readBytes = hardwareAbstractionLayer.getDiskStores().get(0).getReadBytes();
log.warn(String.format("Disk 0 - Read Bytes: %d", readBytes));
long memAvailable = hardwareAbstractionLayer.getMemory().getAvailable();
log.warn(String.format("Memory - Available: %d", memAvailable));
List<String> iostat = PluginHelper.executeCommand("iostat -d 1 1");
AixDiskStat diskStat = processCommandOutput(iostat);

View file

@ -0,0 +1,7 @@
# IBM i Plugin
## Processor Extension
## Memory Extension
## Disk Extension

View file

@ -0,0 +1,7 @@
plugins {
}
dependencies {
// https://mvnrepository.com/artifact/net.sf.jt400/jt400
implementation group: 'net.sf.jt400', name: 'jt400', version: '10.6'
}

View file

@ -0,0 +1,6 @@
pluginId=sysmon-ibmi
pluginClass=sysmon.plugins.os_ibmi.IbmIPlugin
pluginVersion=0.0.1
pluginProvider=System Monitor
pluginDependencies=
pluginDescription=Collects IBM-i OS metrics.

View file

@ -0,0 +1,18 @@
package sysmon.plugins.os_ibmi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class IbmIPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(IbmIPlugin.class);
public IbmIPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View file

@ -0,0 +1,2 @@
plugins {
}

View file

@ -49,28 +49,28 @@ public class LinuxDiskProcLine {
== =====================================
*/
private Long readsCompleted = 0L; // successfully
//private Long readsMerged = 0L;
private Long sectorsRead = 0L; // 512 bytes pr. sector
private Long timeSpentReading = 0L; // ms
private Long writesCompleted = 0L; // successfully
//private Long writesMerged = 0L;
private Long sectorsWritten = 0L; // 512 bytes pr. sector
private Long timeSpentWriting = 0L; // ms
//private Long ioInProgress = 0L;
private Long timeSpentOnIo = 0L; // ms
//private Long timeSpentOnIoWeighted = 0L;
private long readsCompleted; // successfully
//private long readsMerged;
private long sectorsRead; // 512 bytes pr. sector
private long timeSpentReading; // ms
private long writesCompleted; // successfully
//private long writesMerged;
private long sectorsWritten; // 512 bytes pr. sector
private long timeSpentWriting; // ms
//private long ioInProgress;
private long timeSpentOnIo; // ms
//private long timeSpentOnIoWeighted;
//private Long discardsCompleted = 0L; // successfully
//private Long discardsMerged = 0L;
//private Long sectorsDiscarded = 0L; // 512 bytes pr. sector
//private Long timeSpentDiscarding = 0L; // ms
//private long discardsCompleted; // successfully
//private long discardsMerged;
//private long sectorsDiscarded; // 512 bytes pr. sector
//private long timeSpentDiscarding; // ms
//private Long flushRequestsCompleted = 0L;
//private Long timeSpentFlushing = 0L; // ms
//private long flushRequestsCompleted;
//private long timeSpentFlushing; // ms
LinuxDiskProcLine(List<String> procLines) {
public LinuxDiskProcLine(List<String> procLines) {
for(String procLine : procLines) {

View file

@ -13,7 +13,7 @@ public class LinuxMemoryStat {
Mem: 16069172 5896832 4597860 639780 5574480 9192992
Swap: 3985404 0 3985404
*/
private final Pattern pattern = Pattern.compile("^Mem:\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)");
private static final Pattern pattern = Pattern.compile("^Mem:\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)");
private long total;
private long used;

View file

@ -0,0 +1,105 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LinuxNetworkDevStat {
private static final Logger log = LoggerFactory.getLogger(LinuxNetworkDevStat.class);
private static final Pattern pattern1 = Pattern.compile("^\\s+([a-z]{2,}[0-9]+):.*");
private static final Pattern pattern2 = Pattern.compile("^\\s+([a-z]{2,}[0-9]+):\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+(\\d+)");
private long rxBytes;
private long rxPackets;
private long rxErrs;
private long txBytes;
private long txPackets;
private long txErrs;
/*
Inter-| Receive | Transmit
face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
env2: 657010764 483686 0 0 0 0 0 0 55416850 431020 0 0 0 0 0 0
env3: 6900272 41836 0 0 0 0 0 0 7667444 41849 0 0 0 0 0 0
lo: 3098805 14393 0 0 0 0 0 0 3098805 14393 0 0 0 0 0 0
*/
public LinuxNetworkDevStat(List<String> procLines) {
Matcher matcher1;
Matcher matcher2;
for(String procLine : procLines) {
matcher1 = pattern1.matcher(procLine);
if(matcher1.matches()) {
if(matcher1.group(1).equals("lo")) {
continue;
}
matcher2 = pattern2.matcher(procLine);
if(matcher2.matches() && matcher2.groupCount() == 17) {
rxBytes += Long.parseLong(matcher2.group(2));
rxPackets += Long.parseLong(matcher2.group(3));
rxErrs += Long.parseLong(matcher2.group(4));
txBytes += Long.parseLong(matcher2.group(10));
txPackets += Long.parseLong(matcher2.group(11));
txErrs += Long.parseLong(matcher2.group(12));
}
}
}
}
public long getRxBytes() {
return rxBytes;
}
public long getRxPackets() {
return rxPackets;
}
public long getRxErrs() {
return rxErrs;
}
public long getTxBytes() {
return txBytes;
}
public long getTxPackets() {
return txPackets;
}
public long getTxErrs() {
return txErrs;
}
public Map<String, String> getTags() {
return new HashMap<>();
}
public Map<String, Object> getFields() {
Map<String, Object> fields = new HashMap<>();
fields.put("rxBytes", rxBytes);
fields.put("rxPackets", rxPackets);
fields.put("txBytes", txBytes);
fields.put("txPackets", txPackets);
return fields;
}
}

View file

@ -0,0 +1,67 @@
package sysmon.plugins.os_linux;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.util.List;
import java.util.Map;
@Extension
public class LinuxNetworkExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(LinuxNetworkExtension.class);
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
log.warn("Requires Linux.");
return false;
}
return true;
}
@Override
public String getName() {
return "linux-network";
}
@Override
public String getProvides() {
return "network";
}
@Override
public String getDescription() {
return "Linux Network Metrics";
}
@Override
public MetricResult getMetrics() {
LinuxNetworkSockStat sockStat = processSockOutput(PluginHelper.readFile("/proc/net/sockstat"));
LinuxNetworkDevStat devStat = processDevOutput(PluginHelper.readFile("/proc/net/dev"));
Map<String, String> tagsMap = sockStat.getTags();
Map<String, Object> fieldsMap = sockStat.getFields();
fieldsMap.putAll(devStat.getFields());
return new MetricResult("network", new Measurement(tagsMap, fieldsMap));
}
protected LinuxNetworkSockStat processSockOutput(List<String> inputLines) {
return new LinuxNetworkSockStat(inputLines);
}
protected LinuxNetworkDevStat processDevOutput(List<String> inputLines) {
return new LinuxNetworkDevStat(inputLines);
}
}

View file

@ -0,0 +1,97 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LinuxNetworkSockStat {
private static final Logger log = LoggerFactory.getLogger(LinuxNetworkSockStat.class);
private static final Pattern pattern1 = Pattern.compile("^sockets: used (\\d+)");
private static final Pattern pattern2 = Pattern.compile("^TCP: inuse (\\d+) orphan (\\d+) tw (\\d+) alloc (\\d+) mem (\\d+)");
private static final Pattern pattern3 = Pattern.compile("^UDP: inuse (\\d+) mem (\\d+)");
private long sockets;
private long tcp_inuse;
private long tcp_orphan;
private long tcp_tw;
private long tcp_alloc;
private long tcp_mem;
private long udp_inuse;
private long udp_mem;
/*
sockets: used 1238
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
UDP: inuse 11 mem 10
UDPLITE: inuse 0
RAW: inuse 0
FRAG: inuse 0 memory 0
*/
LinuxNetworkSockStat(List<String> lines) {
Matcher matcher;
for(String line : lines) {
String proto = line.substring(0, line.indexOf(':'));
switch (proto) {
case "sockets":
matcher = pattern1.matcher(line);
if (matcher.matches() && matcher.groupCount() == 1) {
sockets = Long.parseLong(matcher.group(1));
}
break;
case "TCP":
matcher = pattern2.matcher(line);
if (matcher.matches() && matcher.groupCount() == 5) {
tcp_inuse = Long.parseLong(matcher.group(1));
tcp_orphan = Long.parseLong(matcher.group(2));
tcp_tw = Long.parseLong(matcher.group(3));
tcp_alloc = Long.parseLong(matcher.group(4));
tcp_mem = Long.parseLong(matcher.group(5));
}
break;
case "UDP":
matcher = pattern3.matcher(line);
if (matcher.matches() && matcher.groupCount() == 2) {
udp_inuse = Long.parseLong(matcher.group(1));
udp_mem = Long.parseLong(matcher.group(2));
}
break;
}
}
}
public Map<String, String> getTags() {
return new HashMap<>();
}
public Map<String, Object> getFields() {
Map<String, Object> fields = new HashMap<>();
fields.put("sockets", sockets);
fields.put("tcp_inuse", tcp_inuse);
fields.put("tcp_alloc", tcp_alloc);
fields.put("tcp_orphan", tcp_orphan);
fields.put("tcp_mem", tcp_mem);
fields.put("tcp_tw", tcp_tw);
fields.put("udp_inuse", udp_inuse);
fields.put("udp_mem", udp_mem);
return fields;
}
}

View file

@ -8,7 +8,7 @@ class LinuxDiskTest extends Specification {
void "test proc file processing"() {
setup:
def testFile = new File(getClass().getResource('/diskstats1.txt').toURI())
def testFile = new File(getClass().getResource('/proc_diskstats1.txt').toURI())
List<String> lines = testFile.readLines("UTF-8")
when:
@ -23,8 +23,8 @@ class LinuxDiskTest extends Specification {
void "test disk utilization"() {
setup:
def testFile1 = new File(getClass().getResource('/diskstats1.txt').toURI())
def testFile2 = new File(getClass().getResource('/diskstats2.txt').toURI())
def testFile1 = new File(getClass().getResource('/proc_diskstats1.txt').toURI())
def testFile2 = new File(getClass().getResource('/proc_diskstats2.txt').toURI())
LinuxDiskExtension extension = new LinuxDiskExtension()
LinuxDiskProcLine procLine1 = extension.processFileOutput(testFile1.readLines())
LinuxDiskProcLine procLine2 = extension.processFileOutput(testFile2.readLines())

View file

@ -0,0 +1,70 @@
import spock.lang.Specification
import sysmon.plugins.os_linux.LinuxNetworkDevStat
import sysmon.plugins.os_linux.LinuxNetworkExtension
import sysmon.plugins.os_linux.LinuxNetworkSockStat
class LinuxNetworkTest extends Specification {
void "test /proc/net/sockstat parsing"() {
setup:
def testFile = new File(getClass().getResource('/proc_net_sockstat.txt').toURI())
List<String> lines = testFile.readLines("UTF-8")
when:
LinuxNetworkExtension extension = new LinuxNetworkExtension()
LinuxNetworkSockStat stats = extension.processSockOutput(lines)
then:
stats.getFields().get("sockets") == 1238L
stats.getFields().get("tcp_inuse") == 52L
stats.getFields().get("tcp_orphan") == 0L
stats.getFields().get("tcp_alloc") == 55L
stats.getFields().get("tcp_mem") == 7l
stats.getFields().get("tcp_tw") == 18L
stats.getFields().get("udp_inuse") == 11L
stats.getFields().get("udp_mem") == 10L
}
void "test /proc/net/dev parsing"() {
setup:
def testFile = new File(getClass().getResource('/proc_net_dev1.txt').toURI())
List<String> lines = testFile.readLines("UTF-8")
when:
LinuxNetworkExtension extension = new LinuxNetworkExtension()
LinuxNetworkDevStat procLine = extension.processDevOutput(lines)
then:
procLine.getRxBytes() == 663911036L
procLine.getRxPackets() == 525522L
procLine.getRxErrs() == 0L
procLine.getTxBytes() == 63084294L
procLine.getTxPackets() == 472869L
procLine.getTxErrs() == 0L
}
/*
void "test dev utilization"() {
setup:
def testFile1 = new File(getClass().getResource('/proc_net_dev1.txt').toURI())
def testFile2 = new File(getClass().getResource('/proc_net_dev2.txt').toURI())
LinuxNetworkExtension extension = new LinuxNetworkExtension()
LinuxNetworkDevStat procLine1 = extension.processDevOutput(testFile1.readLines())
LinuxNetworkDevStat procLine2 = extension.processDevOutput(testFile2.readLines())
when:
LinuxNetworkDevStat networkDevStat = new LinuxNetworkDevStat(procLine1, procLine2)
then:
networkDevStat.getFields().get("rxPackets") == 223L
networkDevStat.getFields().get("rxBytes") == 31501L
networkDevStat.getFields().get("txBytes") == 46460L
networkDevStat.getFields().get("txPackets") == 341L
}*/
}

View file

@ -8,7 +8,7 @@ class LinuxProcessorTest extends Specification {
void "test proc file processing"() {
setup:
def testFile = new File(getClass().getResource('/proc1.txt').toURI())
def testFile = new File(getClass().getResource('/proc_stats1.txt').toURI())
List<String> lines = testFile.readLines("UTF-8")
when:
@ -27,8 +27,8 @@ class LinuxProcessorTest extends Specification {
void "test processor utilization"() {
setup:
def testFile1 = new File(getClass().getResource('/proc1.txt').toURI())
def testFile2 = new File(getClass().getResource('/proc2.txt').toURI())
def testFile1 = new File(getClass().getResource('/proc_stats1.txt').toURI())
def testFile2 = new File(getClass().getResource('/proc_stats2.txt').toURI())
LinuxProcessorProcLine processorProcLine1 = new LinuxProcessorProcLine(testFile1.readLines().get(0))
LinuxProcessorProcLine processorProcLine2 = new LinuxProcessorProcLine(testFile2.readLines().get(0))

View file

@ -0,0 +1,5 @@
Inter-| Receive | Transmit
face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
env2: 657010764 483686 0 0 0 0 0 0 55416850 431020 0 0 0 0 0 0
env3: 6900272 41836 0 0 0 0 0 0 7667444 41849 0 0 0 0 0 0
lo: 3098805 14393 0 0 0 0 0 0 3098805 14393 0 0 0 0 0 0

View file

@ -0,0 +1,5 @@
Inter-| Receive | Transmit
face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
env2: 657034787 483864 0 0 0 0 0 0 55454936 431316 0 0 0 0 0 0
env3: 6907750 41881 0 0 0 0 0 0 7675818 41894 0 0 0 0 0 0
lo: 3098805 14393 0 0 0 0 0 0 3098805 14393 0 0 0 0 0 0

View file

@ -0,0 +1,6 @@
sockets: used 1238
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
UDP: inuse 11 mem 10
UDPLITE: inuse 0
RAW: inuse 0
FRAG: inuse 0 memory 0

View file

@ -7,6 +7,7 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
@ -68,4 +69,15 @@ public class PluginHelper {
.anyMatch(path -> Files.exists(path.resolve(cmd)));
}
public static List<String> readFile(String filename) {
List<String> allLines = new ArrayList<>();
try {
allLines = Files.readAllLines(Paths.get(filename), StandardCharsets.UTF_8);
} catch (IOException e) {
log.error(e.getMessage());
}
return allLines;
}
}