Compare commits
No commits in common. "main" and "v1.0.21" have entirely different histories.
26
.drone.yml
26
.drone.yml
|
@ -1,26 +0,0 @@
|
|||
---
|
||||
kind: pipeline
|
||||
name: default
|
||||
type: docker
|
||||
|
||||
steps:
|
||||
- name: test
|
||||
image: eclipse-temurin:8-jdk
|
||||
commands:
|
||||
- ./gradlew test
|
||||
- name: build
|
||||
image: eclipse-temurin:8-jdk
|
||||
environment:
|
||||
AUTH_TOKEN: # Gitea access token ENV variable
|
||||
from_secret: auth # Name of DroneCI secret exposed above
|
||||
commands:
|
||||
- ./gradlew build packages shared:publishLibraryPublicationToGiteaRepository
|
||||
- for file in server/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
- for file in server/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
- for file in client/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
- for file in client/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
- for file in plugins/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
- for file in plugins/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
|
||||
when:
|
||||
event:
|
||||
- tag
|
|
@ -6,6 +6,3 @@ insert_final_newline = true
|
|||
trim_trailing_whitespace = true
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.yml]
|
||||
indent_size = 2
|
||||
|
|
32
CHANGELOG.md
32
CHANGELOG.md
|
@ -2,31 +2,6 @@
|
|||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [1.1.2] - 2023-02-06
|
||||
- Lowercase client hostnames
|
||||
|
||||
## [1.1.1] - 2023-01-22
|
||||
- Simplify plugin naming
|
||||
- Initial support for executing (groovy) scripts
|
||||
- Fixed bug when no config file were found
|
||||
- Update the default [dashboards](doc/dashboards/)
|
||||
|
||||
## [1.1.0] - 2022-12-17
|
||||
- Lower influx time precision from milliseconds to seconds
|
||||
- requires you to update server and clients to this version.
|
||||
- Update *oshi* dependency (for AIX improvements).
|
||||
|
||||
|
||||
## [1.0.24] - 2022-11-16
|
||||
- Fix incorrect use of OSHI getDiskStores()
|
||||
- Update dashboards
|
||||
|
||||
## [1.0.23] - 2022-11-07
|
||||
- Update dashboards.
|
||||
- Lower default interval for most plugins.
|
||||
- Simplify metrics-results to influx points code.
|
||||
- Remove logging of skipped disk devices (eg. cd0).
|
||||
|
||||
## [1.0.21] - 2022-10-30
|
||||
- Update dashboard
|
||||
- Add IP connections
|
||||
|
@ -55,14 +30,9 @@ All notable changes to this project will be documented in this file.
|
|||
### Changed
|
||||
- Updated 3rd party dependencies.
|
||||
|
||||
<!--
|
||||
[1.1.0]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.1.0%0Dv0.1.24
|
||||
[1.0.24]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.24%0Dv0.1.23
|
||||
[1.0.23]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.23%0Dv0.1.21
|
||||
[1.0.21]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
|
||||
[1.0.19]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
|
||||
[1.0.18]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.18%0Dv0.1.13
|
||||
[0.1.13]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.13%0Dv0.1.11
|
||||
[0.1.11]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.11%0Dv0.1.10
|
||||
[0.1.10]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.10%0Dv0.1.9
|
||||
[0.1.9]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.9%0Dv0.1.8
|
||||
-->
|
||||
|
|
50
README.md
50
README.md
|
@ -1,3 +1,49 @@
|
|||
# Repository moved
|
||||
# System Monitor
|
||||
|
||||
Please visit [github.com/mnellemann/sysmon](https://github.com/mnellemann/sysmon)
|
||||
Open source system monitoring solution with support for plugins.
|
||||
|
||||
![Sysmon Icon](doc/sysmon.png)
|
||||
|
||||
This software is free to use and is licensed under the [Apache 2.0 License](LICENSE).
|
||||
|
||||
- Example dashboards are provided in the [doc/dashboards/](doc/dashboards/) folder, which can be imported into your Grafana installation.
|
||||
- Screenshots are available in the [downloads](https://bitbucket.org/mnellemann/sysmon/downloads/) section.
|
||||
|
||||
|
||||
## Components
|
||||
|
||||
This software consist of a server and client component.
|
||||
|
||||
### Server
|
||||
|
||||
The server component receives aggregated metrics from *clients* and saves these into InfluxDB.
|
||||
|
||||
- More information and documentation on the [sysmon-server](server/README.md).
|
||||
|
||||
### Client & Plugins
|
||||
|
||||
The client runs on all or some of your hosts and collects metrics, which are then sent to the central sysmon-server component. Plugins are loaded by the client at startup and should also be installed.
|
||||
|
||||
- More information and documentation on the [sysmon-client](client/README.md).
|
||||
. More information and documentation on the [sysmon-plugins](plugins/README.md).
|
||||
|
||||
|
||||
## Known problems
|
||||
|
||||
### Correct timezone and clock
|
||||
|
||||
- Ensure you have **correct timezone and date/time** and NTPd (or similar) running to keep it accurate!
|
||||
|
||||
### Naming collision
|
||||
|
||||
You can't have hosts with the same name, as these cannot be distinguished when metrics are
|
||||
written to InfluxDB (which uses the hostname as key).
|
||||
|
||||
### Renaming hosts
|
||||
|
||||
If you rename a host, the metrics in InfluxDB will still be available by the old hostname, and new metrics will be written with the new hostname. There is no easy way to migrate the old data, but you can delete it easily:
|
||||
|
||||
```text
|
||||
USE sysmon;
|
||||
DELETE WHERE hostname = 'unknown';
|
||||
```
|
||||
|
|
10
build.gradle
10
build.gradle
|
@ -27,14 +27,11 @@ subprojects {
|
|||
mavenCentral()
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
sourceCompatibility = 1.8
|
||||
targetCompatibility = 1.8
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
tasks.register("packages") {
|
||||
tasks.create("packages") {
|
||||
group "build"
|
||||
|
||||
dependsOn ":client:buildDeb"
|
||||
|
@ -45,5 +42,4 @@ tasks.register("packages") {
|
|||
|
||||
dependsOn ":plugins:buildDeb"
|
||||
dependsOn ":plugins:buildRpm"
|
||||
dependsOn ":plugins:buildZip"
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
|
|||
plugins {
|
||||
id 'application'
|
||||
|
||||
id "net.nemerosa.versioning" version "2.15.1"
|
||||
id "com.github.johnrengelman.shadow" version "7.1.2"
|
||||
id "com.netflix.nebula.ospackage" version "11.3.0"
|
||||
id "net.nemerosa.versioning" version "2.15.1"
|
||||
id "nebula.ospackage" version "9.1.1"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -23,9 +23,6 @@ dependencies {
|
|||
exclude(group: "org.slf4j")
|
||||
}
|
||||
|
||||
//implementation "org.apache.groovy:groovy-all:${groovyVersion}" // From version 4.+
|
||||
implementation "org.codehaus.groovy:groovy:${groovyVersion}"
|
||||
|
||||
implementation group: 'org.apache.camel', name: 'camel-core', version: camelVersion
|
||||
implementation group: 'org.apache.camel', name: 'camel-main', version: camelVersion
|
||||
implementation group: 'org.apache.camel', name: 'camel-http', version: camelVersion
|
||||
|
@ -41,7 +38,7 @@ def projectName = "sysmon-client"
|
|||
application {
|
||||
// Define the main class for the application.
|
||||
mainClass.set('sysmon.client.Application')
|
||||
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
|
||||
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
|
||||
}
|
||||
|
||||
run {
|
||||
|
@ -76,6 +73,7 @@ shadowJar {
|
|||
mergeServiceFiles() // Tell plugin to merge duplicate service files
|
||||
}
|
||||
|
||||
apply plugin: 'nebula.ospackage'
|
||||
ospackage {
|
||||
packageName = projectName
|
||||
release = '1'
|
||||
|
|
|
@ -8,7 +8,7 @@ We require Java 8, which should already be installed on AIX, or is available to
|
|||
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
|
||||
|
||||
```shell
|
||||
rpm -ivh --ignoreos sysmon-client-*.rpm sysmon-plugins-*.rpm
|
||||
rpm -i --ignoreos sysmon-client.rpm sysmon-plugins.rpm
|
||||
```
|
||||
|
||||
### Run automatically at boot
|
||||
|
|
|
@ -18,9 +18,11 @@ Use *yum* if *dnf* is not available.
|
|||
|
||||
## Installation
|
||||
|
||||
[Download](https://git.data.coop/nellemann/-/packages/generic/sysmon/) the latest client and plugins rpm files and install:
|
||||
[Download](https://bitbucket.org/mnellemann/sysmon/downloads/) the latest client and plugins rpm files and install:
|
||||
|
||||
```shell
|
||||
wget https://bitbucket.org/mnellemann/sysmon/downloads/sysmon-client-1.0.16-1.noarch.rpm
|
||||
wget https://bitbucket.org/mnellemann/sysmon/downloads/sysmon-plugins-1.0.16-1.noarch.rpm
|
||||
rpm -ivh sysmon-client-*.noarch.rpm sysmon-plugins-*.noarch.rpm
|
||||
cp /opt/sysmon/client/doc/sysmon-client.service /etc/systemd/system/
|
||||
systemctl daemon-reload
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
|
||||
class ExampleScript implements MetricScript {
|
||||
|
||||
MetricResult getMetrics() {
|
||||
Map<String,String> tags = new TreeMap<>();
|
||||
Map<String,Object> fields = new TreeMap<>();
|
||||
|
||||
tags.put("type", "temp");
|
||||
fields.put("sensor1", 23.2);
|
||||
fields.put("sensor2", 25.8);
|
||||
|
||||
Measurement measurement = new Measurement(tags, fields);
|
||||
return new MetricResult("script_sensors", measurement);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# Example Scripts
|
||||
|
||||
TODO.
|
|
@ -6,7 +6,6 @@ Description=Sysmon Client
|
|||
#Group=nobody
|
||||
TimeoutSec=20
|
||||
Restart=on-failure
|
||||
# BELOW: Specify sysmon-server URL, add '-n hostname' if needed
|
||||
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
|
||||
|
||||
[Install]
|
||||
|
|
|
@ -11,20 +11,17 @@
|
|||
|
||||
dir="/opt/sysmon/client"
|
||||
cmd="/opt/sysmon/client/bin/client"
|
||||
args="-s http://10.20.30.40:9925/metrics" # <- HERE: Specify sysmon-server URL, add '-n hostname' if needed
|
||||
args="-s http://10.20.30.40:9925/metrics" # Specify sysmon-server URL here
|
||||
user=""
|
||||
|
||||
name="sysmon-client"
|
||||
name=`basename $0`
|
||||
pid_file="/var/run/$name.pid"
|
||||
stdout_log="/var/log/$name.log"
|
||||
stderr_log="/var/log/$name.err"
|
||||
|
||||
# Java 8+ runtime required - Uncomment and export JAVA_HOME if needed
|
||||
# Uncomment if required
|
||||
#JAVA_HOME=/usr/java8_64
|
||||
#JAVA_HOME=/opt/ibm-semeru-open-XX-jre
|
||||
#JAVA_HOME=/opt/ibm-semeru-open-XX-jdk
|
||||
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jre
|
||||
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jdk
|
||||
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-11-jre
|
||||
#export JAVA_HOME
|
||||
|
||||
get_pid() {
|
||||
|
|
|
@ -1,29 +1,23 @@
|
|||
###
|
||||
### Sysmon Client
|
||||
###
|
||||
### Example configuration with some default values.
|
||||
### Example configuration with default values.
|
||||
###
|
||||
|
||||
# Local path for Groovy scripts
|
||||
scripts = "/opt/sysmon/scripts"
|
||||
|
||||
[extension.base_info]
|
||||
enabled = true
|
||||
interval = '60m'
|
||||
|
||||
[extension.base_disk]
|
||||
enabled = true
|
||||
interval = '10s'
|
||||
|
||||
[extension.base_filesystem]
|
||||
enabled = true
|
||||
interval = '10s'
|
||||
exclude_type = [ "tmpfs", "ahafs" ]
|
||||
exclude_mount = [ "/boot/efi" ]
|
||||
|
||||
[extension.base_process]
|
||||
enabled = true
|
||||
interval = '5m'
|
||||
include = [
|
||||
"java", "node", "httpd", "mongod", "mysqld",
|
||||
"postgres", "influxd", "haproxy", "beam.smp",
|
||||
|
|
|
@ -85,7 +85,7 @@ public class Application implements Callable<Integer> {
|
|||
try {
|
||||
configuration.parse(configurationFile.toPath());
|
||||
} catch (Exception e) {
|
||||
System.err.println("Could not parse configuration file: " + e.getMessage());
|
||||
System.err.println(e.getMessage());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,39 +14,32 @@ import sysmon.shared.ComboResult;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import javax.script.*;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class ClientRouteBuilder extends RouteBuilder {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ClientRouteBuilder.class);
|
||||
|
||||
private final Set<String> scriptFiles = new HashSet<>();
|
||||
|
||||
|
||||
@Override
|
||||
public void configure() {
|
||||
|
||||
Registry registry = getContext().getRegistry();
|
||||
Configuration configuration = (Configuration) registry.lookupByName("configuration");
|
||||
|
||||
Path[] pluginPaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
|
||||
PluginManager pluginManager = new JarPluginManager(pluginPaths);
|
||||
Path[] pluginpaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
|
||||
PluginManager pluginManager = new JarPluginManager(pluginpaths);
|
||||
pluginManager.loadPlugins();
|
||||
pluginManager.startPlugins();
|
||||
|
||||
List<String> providers = new ArrayList<>();
|
||||
List<MetricExtension> metricExtensions = pluginManager.getExtensions(MetricExtension.class);
|
||||
for (MetricExtension ext : metricExtensions) {
|
||||
|
||||
final String name = ext.getName();
|
||||
final String provides = ext.getProvides();
|
||||
|
||||
// Load configuration if available
|
||||
if(configuration.isForExtension(name)) {
|
||||
|
@ -55,14 +48,37 @@ public class ClientRouteBuilder extends RouteBuilder {
|
|||
}
|
||||
|
||||
if(ext.isSupported() && ext.isEnabled()) {
|
||||
addExtensionRoute(ext);
|
||||
|
||||
// Check that another extension has not already been loaded - TODO: Is this required ?
|
||||
if(providers.contains(provides)) {
|
||||
log.warn("Skipping extension (already provided): " + ext.getName());
|
||||
continue;
|
||||
}
|
||||
|
||||
log.info("Enabling extension: " + ext.getDescription());
|
||||
providers.add(provides);
|
||||
|
||||
// Setup Camel route for this extension
|
||||
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
|
||||
String timerName = ext.isThreaded() ? ext.getProvides() : "default";
|
||||
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
|
||||
from("timer:"+timerName+"?fixedRate=true&period="+timerInterval)
|
||||
.bean(ext, "getMetrics")
|
||||
.outputType(MetricResult.class)
|
||||
.process(new MetricEnrichProcessor(registry))
|
||||
.choice().when(exchangeProperty("skip").isEqualTo(true))
|
||||
.log(LoggingLevel.WARN,"Skipping empty measurement.")
|
||||
.stop()
|
||||
.otherwise()
|
||||
.log("${body}")
|
||||
.to("seda:metrics?discardWhenFull=true");
|
||||
} else {
|
||||
log.info("Skipping extension (not supported or disabled): " + ext.getDescription());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
from("seda:metrics?purgeWhenStopping=true")
|
||||
.routeId("aggregation")
|
||||
.aggregate(constant(true), AggregationStrategies.beanAllowNull(ComboAppender.class, "append"))
|
||||
.completionTimeout(5000L)
|
||||
.doTry()
|
||||
|
@ -73,7 +89,6 @@ public class ClientRouteBuilder extends RouteBuilder {
|
|||
.end();
|
||||
|
||||
from("seda:outbound?purgeWhenStopping=true")
|
||||
.routeId("outbound")
|
||||
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
|
||||
.doTry()
|
||||
.marshal(new JacksonDataFormat(ComboResult.class))
|
||||
|
@ -83,99 +98,7 @@ public class ClientRouteBuilder extends RouteBuilder {
|
|||
.log(LoggingLevel.WARN,"Error: ${exception.message}.")
|
||||
.end();
|
||||
|
||||
// Find all local scripts
|
||||
String scriptsPath = configuration.getScriptPath();
|
||||
if(scriptsPath != null && Files.isDirectory(Paths.get(scriptsPath))) {
|
||||
try {
|
||||
scriptFiles.addAll(listFilesByExtension(scriptsPath, "groovy"));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Enable the local scripts
|
||||
for (String scriptFile : scriptFiles) {
|
||||
try {
|
||||
ScriptWrapper scriptWrapper = new ScriptWrapper(scriptsPath, scriptFile);
|
||||
addScriptRoute(scriptWrapper);
|
||||
} catch(Exception e) {
|
||||
log.error("configure() - script error: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
void addScriptRoute(ScriptWrapper script) {
|
||||
Registry registry = getContext().getRegistry();
|
||||
|
||||
from("timer:scripts?fixedRate=true&period=30s")
|
||||
.routeId(script.toString())
|
||||
.bean(script, "run")
|
||||
.outputType(MetricResult.class)
|
||||
.process(new MetricEnrichProcessor(registry))
|
||||
.choice().when(exchangeProperty("skip").isEqualTo(true))
|
||||
.log(LoggingLevel.WARN, "Skipping empty measurement.")
|
||||
.stop()
|
||||
.otherwise()
|
||||
.log("${body}")
|
||||
.to("seda:metrics?discardWhenFull=true");
|
||||
}
|
||||
|
||||
|
||||
void addExtensionRoute(MetricExtension ext) {
|
||||
|
||||
Registry registry = getContext().getRegistry();
|
||||
|
||||
// Setup Camel route for this extension
|
||||
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
|
||||
String timerName = ext.isThreaded() ? ext.getName() : "default";
|
||||
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
|
||||
from("timer:" + timerName + "?fixedRate=true&period=" + timerInterval)
|
||||
.routeId(ext.getName())
|
||||
.bean(ext, "getMetrics")
|
||||
.outputType(MetricResult.class)
|
||||
.process(new MetricEnrichProcessor(registry))
|
||||
.choice().when(exchangeProperty("skip").isEqualTo(true))
|
||||
.log(LoggingLevel.WARN, "Skipping empty measurement.")
|
||||
.stop()
|
||||
.otherwise()
|
||||
.log("${body}")
|
||||
.to("seda:metrics?discardWhenFull=true");
|
||||
}
|
||||
|
||||
|
||||
List<String> findScripts(String location) {
|
||||
log.info("Looking for scripts in: {}", location);
|
||||
List<String> scripts = new ArrayList<>();
|
||||
ScriptEngineManager manager = new ScriptEngineManager();
|
||||
List<ScriptEngineFactory> factoryList = manager.getEngineFactories();
|
||||
for (ScriptEngineFactory factory : factoryList) {
|
||||
log.info("findScripts() - Supporting: {}", factory.getLanguageName());
|
||||
for(String ex : factory.getExtensions()) {
|
||||
log.info("findScripts() - Extension: {}", ex);
|
||||
try {
|
||||
scripts.addAll(listFilesByExtension(location, ex));
|
||||
log.warn(scripts.toString());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return scripts;
|
||||
}
|
||||
|
||||
|
||||
Set<String> listFilesByExtension(String dir, String ext) throws IOException {
|
||||
try (Stream<Path> stream = Files.list(Paths.get(dir))) {
|
||||
return stream
|
||||
.filter(file -> !Files.isDirectory(file))
|
||||
.map(Path::getFileName)
|
||||
.map(Path::toString)
|
||||
.filter(s -> s.endsWith(ext))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -65,14 +65,4 @@ public final class Configuration {
|
|||
return map;
|
||||
}
|
||||
|
||||
|
||||
String getScriptPath() {
|
||||
if(result == null) {
|
||||
log.debug("No configuration file loaded ...");
|
||||
return null;
|
||||
}
|
||||
return result.getString("scripts");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
package sysmon.client;
|
||||
|
||||
import groovy.lang.GroovyClassLoader;
|
||||
import groovy.lang.GroovyObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import sysmon.shared.MetricResult;
|
||||
import sysmon.shared.MetricScript;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
public class ScriptWrapper {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ScriptWrapper.class);
|
||||
|
||||
private final static GroovyClassLoader loader = new GroovyClassLoader();
|
||||
|
||||
private GroovyObject script;
|
||||
private final String name;
|
||||
|
||||
public ScriptWrapper(String scriptPath, String scriptFile) {
|
||||
name = scriptFile;
|
||||
try {
|
||||
Class<?> scriptClass = loader.parseClass(new File(scriptPath, scriptFile));
|
||||
script = (GroovyObject) scriptClass.getDeclaredConstructor().newInstance();
|
||||
} catch (IOException |InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
|
||||
log.error("ScriptWrapper() - error: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MetricResult run() {
|
||||
MetricResult result = null;
|
||||
if (script != null && script instanceof MetricScript) {
|
||||
result = (MetricResult) script.invokeMethod("getMetrics", null);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
|
||||
}
|
|
@ -15,7 +15,21 @@
|
|||
## limitations under the License.
|
||||
## ---------------------------------------------------------------------------
|
||||
|
||||
# to configure camel main
|
||||
# here you can configure options on camel main (see MainConfigurationProperties class)
|
||||
camel.main.name = sysmon-client
|
||||
camel.main.jmxEnabled = false
|
||||
|
||||
# enable tracing
|
||||
#camel.main.tracing = true
|
||||
|
||||
# bean introspection to log reflection based configuration
|
||||
#camel.main.beanIntrospectionExtendedStatistics=true
|
||||
#camel.main.beanIntrospectionLoggingLevel=INFO
|
||||
|
||||
# run in lightweight mode to be tiny as possible
|
||||
camel.main.lightweight = true
|
||||
camel.component.seda.queue-size = 100
|
||||
# and eager load classes
|
||||
#camel.main.eager-classloading = true
|
||||
|
||||
# limit the seda queue size
|
||||
camel.component.seda.queue-size=10
|
|
@ -2,7 +2,7 @@
|
|||
"__inputs": [
|
||||
{
|
||||
"name": "DS_SYSMON",
|
||||
"label": "Database",
|
||||
"label": "sysmon",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "influxdb",
|
||||
|
@ -35,12 +35,6 @@
|
|||
"name": "Stat",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "table",
|
||||
"name": "Table",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "text",
|
||||
|
@ -76,7 +70,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
|
||||
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from sysmon agent.",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
|
@ -97,7 +91,7 @@
|
|||
},
|
||||
"id": 28,
|
||||
"options": {
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
|
||||
"mode": "markdown"
|
||||
},
|
||||
"pluginVersion": "9.1.6",
|
||||
|
@ -121,10 +115,8 @@
|
|||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {
|
||||
"align": "center",
|
||||
"displayMode": "auto",
|
||||
"inspect": false
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
|
@ -136,26 +128,30 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"unit": "string"
|
||||
"unit": "text"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 3,
|
||||
"h": 6,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 3
|
||||
},
|
||||
"id": 35,
|
||||
"options": {
|
||||
"footer": {
|
||||
"fields": "",
|
||||
"reducer": [
|
||||
"sum"
|
||||
"colorMode": "none",
|
||||
"graphMode": "none",
|
||||
"justifyMode": "center",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"show": false
|
||||
"fields": "/.*/",
|
||||
"values": false
|
||||
},
|
||||
"showHeader": true
|
||||
"textMode": "value_and_name"
|
||||
},
|
||||
"pluginVersion": "9.1.6",
|
||||
"targets": [
|
||||
|
@ -188,7 +184,7 @@
|
|||
"measurement": "base_info",
|
||||
"orderByTime": "DESC",
|
||||
"policy": "default",
|
||||
"query": "SELECT last(\"os_manufacturer\") AS \"manufacturer\", last(\"os_family\") AS \"family\", last(\"os_version\") AS \"version\", last(\"os_codename\") AS \"codename\", last(\"os_build\") AS \"build\", last(\"boot_time\") * 1000 AS \"boottime\" FROM \"base_info\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY \"hostname\" fill(previous) ORDER BY time DESC LIMIT 1",
|
||||
"query": "SELECT last(\"os_manufacturer\") AS \"manufacturer\", last(\"os_family\") AS \"family\", last(\"os_version\") AS \"version\", last(\"os_codename\") AS \"codename\", last(\"os_build\") AS \"build\", last(\"boot_time\") * 1000 AS \"boottime\" FROM \"base_info\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval), \"hostname\" fill(null) ORDER BY time DESC LIMIT 1000",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "table",
|
||||
|
@ -354,8 +350,7 @@
|
|||
}
|
||||
}
|
||||
],
|
||||
"transparent": true,
|
||||
"type": "table"
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
|
@ -420,7 +415,7 @@
|
|||
"h": 5,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 6
|
||||
"y": 9
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
|
@ -565,7 +560,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.mode"
|
||||
"options": "aix_processor.mode"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -585,7 +580,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.type"
|
||||
"options": "aix_processor.type"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -605,7 +600,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.ent"
|
||||
"options": "aix_processor.ent"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -621,7 +616,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.lcpu"
|
||||
"options": "aix_processor.lcpu"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -637,7 +632,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.smt"
|
||||
"options": "aix_processor.smt"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -652,7 +647,7 @@
|
|||
"h": 5,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 6
|
||||
"y": 9
|
||||
},
|
||||
"id": 16,
|
||||
"options": {
|
||||
|
@ -691,11 +686,9 @@
|
|||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "power_processor",
|
||||
"measurement": "aix_processor",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"query": "SELECT last(\"mode\") AS \"mode\", last(\"type\") AS \"type\", last(\"ent\") AS \"ent\", last(\"lcpu\") AS \"lcpu\", last(\"smt\") AS \"smt\" FROM \"power_processor\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) fill(previous) SLIMIT 1",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
|
@ -807,11 +800,11 @@
|
|||
"options": {
|
||||
"include": {
|
||||
"names": [
|
||||
"power_processor.mode",
|
||||
"power_processor.type",
|
||||
"power_processor.ent",
|
||||
"power_processor.lcpu",
|
||||
"power_processor.smt"
|
||||
"aix_processor.mode",
|
||||
"aix_processor.type",
|
||||
"aix_processor.ent",
|
||||
"aix_processor.lcpu",
|
||||
"aix_processor.smt"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -883,7 +876,7 @@
|
|||
"h": 6,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 11
|
||||
"y": 14
|
||||
},
|
||||
"id": 29,
|
||||
"options": {
|
||||
|
@ -915,7 +908,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
|
@ -939,7 +932,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -963,7 +956,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -987,7 +980,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1011,7 +1004,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1035,7 +1028,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1059,7 +1052,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1144,7 +1137,7 @@
|
|||
"h": 6,
|
||||
"w": 4,
|
||||
"x": 12,
|
||||
"y": 11
|
||||
"y": 14
|
||||
},
|
||||
"id": 19,
|
||||
"options": {
|
||||
|
@ -1176,7 +1169,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
|
@ -1196,11 +1189,13 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
"params": [
|
||||
"1s"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -1218,11 +1213,13 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
"params": [
|
||||
"10s"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -1273,7 +1270,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.entc"
|
||||
"options": "aix_processor.entc"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -1289,7 +1286,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.physc"
|
||||
"options": "aix_processor.physc"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -1301,7 +1298,7 @@
|
|||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "power_processor.lbusy"
|
||||
"options": "aix_processor.lbusy"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
|
@ -1320,7 +1317,7 @@
|
|||
"h": 6,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 11
|
||||
"y": 14
|
||||
},
|
||||
"id": 17,
|
||||
"options": {
|
||||
|
@ -1359,7 +1356,7 @@
|
|||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "power_processor",
|
||||
"measurement": "aix_processor",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
|
@ -1464,7 +1461,7 @@
|
|||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": 3600000,
|
||||
"spanNulls": true,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "normal"
|
||||
|
@ -1495,7 +1492,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 17
|
||||
"y": 20
|
||||
},
|
||||
"id": 10,
|
||||
"options": {
|
||||
|
@ -1563,13 +1560,13 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
"1s"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1655,7 +1652,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 17
|
||||
"y": 20
|
||||
},
|
||||
"id": 30,
|
||||
"options": {
|
||||
|
@ -1723,13 +1720,13 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
"1s"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
|
@ -1816,7 +1813,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 26
|
||||
"y": 29
|
||||
},
|
||||
"id": 18,
|
||||
"options": {
|
||||
|
@ -1878,15 +1875,15 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
"1s"
|
||||
],
|
||||
"type": "moving_average"
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
10
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"type": "moving_average"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -1971,7 +1968,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 26
|
||||
"y": 29
|
||||
},
|
||||
"id": 31,
|
||||
"options": {
|
||||
|
@ -2033,13 +2030,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
"1s"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
},
|
||||
|
@ -2191,7 +2182,7 @@
|
|||
"h": 5,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 35
|
||||
"y": 38
|
||||
},
|
||||
"id": 22,
|
||||
"options": {
|
||||
|
@ -2245,13 +2236,7 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2269,13 +2254,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2293,13 +2276,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2317,13 +2298,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2341,13 +2320,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2365,13 +2342,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2389,13 +2364,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2413,13 +2386,11 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "last"
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -2503,7 +2474,7 @@
|
|||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 40
|
||||
"y": 43
|
||||
},
|
||||
"id": 24,
|
||||
"options": {
|
||||
|
@ -2572,16 +2543,14 @@
|
|||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
5
|
||||
],
|
||||
"type": "moving_average"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
10
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"type": "moving_average"
|
||||
}
|
||||
]
|
||||
],
|
||||
|
@ -2658,7 +2627,7 @@
|
|||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 40
|
||||
"y": 43
|
||||
},
|
||||
"id": 25,
|
||||
"options": {
|
||||
|
@ -2724,7 +2693,7 @@
|
|||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
"type": "last"
|
||||
}
|
||||
]
|
||||
],
|
||||
|
@ -2803,7 +2772,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 50
|
||||
"y": 53
|
||||
},
|
||||
"id": 8,
|
||||
"options": {
|
||||
|
@ -3001,7 +2970,7 @@
|
|||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 50
|
||||
"y": 53
|
||||
},
|
||||
"id": 26,
|
||||
"options": {
|
||||
|
@ -3138,13 +3107,12 @@
|
|||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-24h",
|
||||
"to": "now-10s"
|
||||
"from": "now-2d",
|
||||
"to": "now-30s"
|
||||
},
|
||||
"timepicker": {
|
||||
"nowDelay": "10s",
|
||||
"nowDelay": "30s",
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
|
@ -3158,6 +3126,6 @@
|
|||
"timezone": "",
|
||||
"title": "Sysmon - Host Overview",
|
||||
"uid": "QkVPjseMt",
|
||||
"version": 23,
|
||||
"version": 9,
|
||||
"weekStart": ""
|
||||
}
|
|
@ -2,7 +2,7 @@
|
|||
"__inputs": [
|
||||
{
|
||||
"name": "DS_SYSMON",
|
||||
"label": "Database",
|
||||
"label": "sysmon",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "influxdb",
|
||||
|
@ -15,7 +15,7 @@
|
|||
"type": "grafana",
|
||||
"id": "grafana",
|
||||
"name": "Grafana",
|
||||
"version": "9.1.6"
|
||||
"version": "9.1.3"
|
||||
},
|
||||
{
|
||||
"type": "datasource",
|
||||
|
@ -58,7 +58,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
|
||||
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from sysmon agent.",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
|
@ -79,10 +79,10 @@
|
|||
},
|
||||
"id": 28,
|
||||
"options": {
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
|
||||
"mode": "markdown"
|
||||
},
|
||||
"pluginVersion": "9.1.6",
|
||||
"pluginVersion": "9.1.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
|
@ -126,8 +126,8 @@
|
|||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 3600000,
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
|
@ -155,7 +155,7 @@
|
|||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
"h": 14,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 3
|
||||
|
@ -218,12 +218,6 @@
|
|||
"params": [],
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"5min"
|
||||
|
@ -276,7 +270,7 @@
|
|||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 3600000,
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
|
@ -305,7 +299,7 @@
|
|||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
"h": 14,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 3
|
||||
|
@ -351,7 +345,7 @@
|
|||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "power_processor",
|
||||
"measurement": "aix_processor",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
|
@ -367,12 +361,6 @@
|
|||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
}
|
||||
]
|
||||
],
|
||||
|
@ -420,7 +408,7 @@
|
|||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 3600000,
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
|
@ -449,10 +437,10 @@
|
|||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 14,
|
||||
"h": 15,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
"y": 17
|
||||
},
|
||||
"id": 30,
|
||||
"options": {
|
||||
|
@ -495,7 +483,7 @@
|
|||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "power_processor",
|
||||
"measurement": "aix_processor",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
|
@ -511,12 +499,6 @@
|
|||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
}
|
||||
]
|
||||
],
|
||||
|
@ -565,7 +547,7 @@
|
|||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 3600000,
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
|
@ -594,10 +576,10 @@
|
|||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 14,
|
||||
"h": 15,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 16
|
||||
"y": 17
|
||||
},
|
||||
"id": 31,
|
||||
"options": {
|
||||
|
@ -640,7 +622,7 @@
|
|||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "power_processor",
|
||||
"measurement": "aix_processor",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
|
@ -656,12 +638,6 @@
|
|||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
10
|
||||
],
|
||||
"type": "moving_average"
|
||||
}
|
||||
]
|
||||
],
|
||||
|
@ -693,14 +669,14 @@
|
|||
"type": "influxdb",
|
||||
"uid": "${DS_SYSMON}"
|
||||
},
|
||||
"definition": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
|
||||
"definition": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
|
||||
"hide": 0,
|
||||
"includeAll": true,
|
||||
"label": "Host",
|
||||
"multi": true,
|
||||
"name": "hostname",
|
||||
"options": [],
|
||||
"query": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
|
||||
"query": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
|
||||
"refresh": 2,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
|
@ -714,12 +690,11 @@
|
|||
},
|
||||
"time": {
|
||||
"from": "now-7d",
|
||||
"to": "now-10s"
|
||||
"to": "now-30s"
|
||||
},
|
||||
"timepicker": {
|
||||
"nowDelay": "10s",
|
||||
"nowDelay": "30s",
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
|
@ -731,7 +706,7 @@
|
|||
]
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "Sysmon - IBM Power",
|
||||
"title": "Sysmon - Power Performance",
|
||||
"uid": "3zPCIbN4z",
|
||||
"version": 7,
|
||||
"weekStart": ""
|
|
@ -2,7 +2,7 @@
|
|||
"__inputs": [
|
||||
{
|
||||
"name": "DS_SYSMON",
|
||||
"label": "Database",
|
||||
"label": "sysmon",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "influxdb",
|
||||
|
@ -70,7 +70,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from within host / guest / partition.",
|
||||
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from within host / guest / partition.",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
|
@ -91,7 +91,7 @@
|
|||
},
|
||||
"id": 30,
|
||||
"options": {
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
|
||||
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
|
||||
"mode": "markdown"
|
||||
},
|
||||
"pluginVersion": "9.1.6",
|
||||
|
@ -697,7 +697,7 @@
|
|||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
|
@ -722,10 +722,8 @@
|
|||
"type": "sum"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -746,10 +744,8 @@
|
|||
"type": "sum"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "non_negative_derivative"
|
||||
"params": [],
|
||||
"type": "non_negative_difference"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
@ -965,9 +961,7 @@
|
|||
"refresh": "1m",
|
||||
"schemaVersion": 37,
|
||||
"style": "dark",
|
||||
"tags": [
|
||||
"sysmon"
|
||||
],
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
|
@ -1055,6 +1049,6 @@
|
|||
"timezone": "",
|
||||
"title": "Sysmon - Process Explorer",
|
||||
"uid": "Vjut5mS7k",
|
||||
"version": 5,
|
||||
"version": 3,
|
||||
"weekStart": ""
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 265 KiB |
Binary file not shown.
Before Width: | Height: | Size: 128 KiB |
|
@ -1,9 +1,9 @@
|
|||
version = 1.1.4
|
||||
pf4jVersion = 3.9.0
|
||||
slf4jVersion = 2.0.9
|
||||
camelVersion = 3.14.9
|
||||
groovyVersion = 3.0.18
|
||||
picocliVersion = 4.7.5
|
||||
oshiVersion = 6.4.7
|
||||
version = 1.0.21
|
||||
pf4jVersion = 3.7.0
|
||||
slf4jVersion = 2.0.3
|
||||
camelVersion = 3.14.5
|
||||
groovyVersion = 3.0.13
|
||||
picocliVersion = 4.6.3
|
||||
oshiVersion = 6.3.1
|
||||
spockVersion = 2.3-groovy-3.0
|
||||
tomljVersion = 1.1.0
|
||||
|
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
3
gradle/wrapper/gradle-wrapper.properties
vendored
3
gradle/wrapper/gradle-wrapper.properties
vendored
|
@ -1,6 +1,5 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
|
||||
networkTimeout=10000
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
|
18
gradlew
vendored
18
gradlew
vendored
|
@ -55,7 +55,7 @@
|
|||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
|
@ -80,11 +80,11 @@ do
|
|||
esac
|
||||
done
|
||||
|
||||
# This is normally unused
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
|
@ -143,16 +143,12 @@ fi
|
|||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC3045
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC3045
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
|
@ -209,12 +205,6 @@ set -- \
|
|||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Stop when "xargs" is not available.
|
||||
if ! command -v xargs >/dev/null 2>&1
|
||||
then
|
||||
die "xargs is not available"
|
||||
fi
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
|
|
11
gradlew.bat
vendored
11
gradlew.bat
vendored
|
@ -26,7 +26,6 @@ if "%OS%"=="Windows_NT" setlocal
|
|||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
@rem This is normally unused
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
|
@ -41,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
|
|||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if %ERRORLEVEL% equ 0 goto execute
|
||||
if "%ERRORLEVEL%" == "0" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
@ -76,15 +75,13 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
|||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if %ERRORLEVEL% equ 0 goto mainEnd
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
set EXIT_CODE=%ERRORLEVEL%
|
||||
if %EXIT_CODE% equ 0 set EXIT_CODE=1
|
||||
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
|
||||
exit /b %EXIT_CODE%
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
|
|
@ -2,5 +2,6 @@
|
|||
|
||||
Collection of standard sysmon plugins for use with the client.
|
||||
|
||||
- [base](base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
|
||||
- [power](power/README.md) - IBM Power specific metrics
|
||||
- [base](os-base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
|
||||
- [aix](os-aix/README.md) - AIX (and IBM Power) specific metrics
|
||||
- [linux](os-linux/README.md) - Linux specific metrics
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import org.redline_rpm.header.Os
|
||||
|
||||
plugins {
|
||||
id "com.netflix.nebula.ospackage" version "11.3.0"
|
||||
id "nebula.ospackage" version "9.1.1"
|
||||
}
|
||||
|
||||
|
||||
|
@ -82,6 +82,7 @@ tasks.clean.dependsOn(tasks.customCleanUp)
|
|||
def projectName = "sysmon-plugins"
|
||||
|
||||
|
||||
apply plugin: 'nebula.ospackage'
|
||||
ospackage {
|
||||
packageName = projectName
|
||||
release = '1'
|
||||
|
@ -110,12 +111,3 @@ task buildRpmAix(type: Rpm) {
|
|||
packageName = "${projectName}-AIX"
|
||||
os = Os.AIX
|
||||
}
|
||||
|
||||
task buildZip(type: Zip) {
|
||||
subprojects.each {
|
||||
dependsOn("${it.name}:copyJar")
|
||||
}
|
||||
from "output"
|
||||
setArchivesBaseName(projectName as String)
|
||||
setArchiveVersion(project.property("version") as String)
|
||||
}
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"k10temp-pci-00c3":{
|
||||
"Adapter": "PCI adapter",
|
||||
"Tctl":{
|
||||
"temp1_input": 56.250
|
||||
}
|
||||
},
|
||||
"nvme-pci-0400":{
|
||||
"Adapter": "PCI adapter",
|
||||
"Composite":{
|
||||
"temp1_input": 35.850,
|
||||
"temp1_max": 74.850,
|
||||
"temp1_min": -20.150,
|
||||
"temp1_crit": 79.850,
|
||||
"temp1_alarm": 0.000
|
||||
}
|
||||
},
|
||||
"iwlwifi_1-virtual-0":{
|
||||
"Adapter": "Virtual device",
|
||||
"temp1":{
|
||||
"temp1_input": 37.000
|
||||
}
|
||||
},
|
||||
"amdgpu-pci-0500":{
|
||||
"Adapter": "PCI adapter",
|
||||
"vddgfx":{
|
||||
"in0_input": 0.681
|
||||
},
|
||||
"vddnb":{
|
||||
"in1_input": 0.712
|
||||
},
|
||||
"edge":{
|
||||
"temp1_input": 37.000
|
||||
},
|
||||
"PPT":{
|
||||
"power1_average": 0.000
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"k10temp-pci-00c3":{
|
||||
"Adapter": "PCI adapter",
|
||||
"Tctl":{
|
||||
"temp1_input": 53.875
|
||||
}
|
||||
},
|
||||
"nvme-pci-0400":{
|
||||
"Adapter": "PCI adapter",
|
||||
"Composite":{
|
||||
"temp1_input": 36.850,
|
||||
"temp1_max": 74.850,
|
||||
"temp1_min": -20.150,
|
||||
"temp1_crit": 79.850,
|
||||
"temp1_alarm": 0.000
|
||||
}
|
||||
},
|
||||
"iwlwifi_1-virtual-0":{
|
||||
"Adapter": "Virtual device",
|
||||
"temp1":{
|
||||
"temp1_input": 41.000
|
||||
}
|
||||
},
|
||||
"amdgpu-pci-0500":{
|
||||
"Adapter": "PCI adapter",
|
||||
"vddgfx":{
|
||||
"in0_input": 1.281
|
||||
},
|
||||
"vddnb":{
|
||||
"in1_input": 0.712
|
||||
},
|
||||
"edge":{
|
||||
"temp1_input": 42.000
|
||||
},
|
||||
"PPT":{
|
||||
"power1_average": 0.000
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
# IBM Power Plugin
|
||||
# AIX Plugin
|
||||
|
||||
## Power LPAR Processor Extension
|
||||
## LPAR Processor Extension
|
||||
|
||||
The processor extension works for both AIX and Linux on the Power ppc64/ppc64le architecture.
|
||||
|
6
plugins/os-aix/gradle.properties
Normal file
6
plugins/os-aix/gradle.properties
Normal file
|
@ -0,0 +1,6 @@
|
|||
pluginId=sysmon-aix
|
||||
pluginClass=sysmon.plugins.os_aix.AixPlugin
|
||||
pluginVersion=0.0.1
|
||||
pluginProvider=System Monitor
|
||||
pluginDependencies=
|
||||
pluginDescription=Collects AIX OS metrics.
|
|
@ -0,0 +1,109 @@
|
|||
package sysmon.plugins.os_aix;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
import sysmon.shared.PluginHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
// Disabled
|
||||
//@Extension
|
||||
public class AixNetstatExtension implements MetricExtension {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(AixNetstatExtension.class);
|
||||
|
||||
// Extension details
|
||||
private final String name = "aix_network_netstat";
|
||||
private final String provides = "network_netstat";
|
||||
private final String description = "AIX Netstat Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isThreaded() {
|
||||
return threaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported() {
|
||||
|
||||
if(!System.getProperty("os.name").toLowerCase().contains("aix")) {
|
||||
log.warn("Requires AIX.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(PluginHelper.notExecutable("netstat")) {
|
||||
log.warn("Requires the 'netstat' command.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(Map<String, Object> map) {
|
||||
if (map.containsKey("enabled")) {
|
||||
enabled = (boolean) map.get("enabled");
|
||||
}
|
||||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() throws Exception {
|
||||
|
||||
HashMap<String, String> tagsMap;
|
||||
HashMap<String, Object> fieldsMap;
|
||||
|
||||
try (InputStream buf = PluginHelper.executeCommand("netstat -s -f inet")) {
|
||||
AixNetstatParser parser = processCommandOutput(buf);
|
||||
tagsMap = parser.getTags();
|
||||
fieldsMap = parser.getFields();
|
||||
}
|
||||
|
||||
log.debug(fieldsMap.toString());
|
||||
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
|
||||
}
|
||||
|
||||
|
||||
protected AixNetstatParser processCommandOutput(InputStream input) throws IOException {
|
||||
return new AixNetstatParser(input);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,155 @@
|
|||
package sysmon.plugins.os_aix;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.HashMap;
|
||||
|
||||
public class AixNetstatParser {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(AixNetstatParser.class);
|
||||
|
||||
private long ipTotalPacketsReceived;
|
||||
private long ipForwarded;
|
||||
|
||||
private long tcpConnectionsEstablished;
|
||||
private long tcpPacketsReceved;
|
||||
private long tcpPacketsSent;
|
||||
|
||||
private long udpPacketsReceived;
|
||||
private long udpPacketsSent;
|
||||
|
||||
|
||||
public AixNetstatParser(InputStream inputStream) throws IOException {
|
||||
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
while (reader.ready()) {
|
||||
String line = reader.readLine();
|
||||
log.debug("AixNetstatParser() - Line: " + line);
|
||||
|
||||
if(line.startsWith("tcp:")) {
|
||||
parseTcp(reader);
|
||||
}
|
||||
|
||||
if(line.startsWith("udp:")) {
|
||||
parseUdp(reader);
|
||||
}
|
||||
|
||||
if(line.startsWith("ip:")) {
|
||||
parseIp(reader);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
inputStream.close();
|
||||
}
|
||||
|
||||
|
||||
protected void parseIp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) total packets received")) {
|
||||
ipTotalPacketsReceived = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) packets forwarded")) {
|
||||
ipForwarded = getFirstLong(line);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected void parseTcp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) connections established \\(including accepts\\)")) {
|
||||
tcpConnectionsEstablished = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) packets received")) {
|
||||
tcpPacketsReceved = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) packets sent")) {
|
||||
tcpPacketsSent = getFirstLong(line);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void parseUdp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) datagrams received")) {
|
||||
udpPacketsReceived = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) datagrams output")) {
|
||||
udpPacketsSent = getFirstLong(line);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public HashMap<String, String> getTags() {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
HashMap<String, Object> fields = new HashMap<>();
|
||||
fields.put("ip_forwarded", ipForwarded);
|
||||
fields.put("ip_received", ipTotalPacketsReceived);
|
||||
|
||||
fields.put("tcp_connections", tcpConnectionsEstablished);
|
||||
fields.put("tcp_pkts_recv", tcpPacketsReceved);
|
||||
fields.put("tcp_pkts_sent", tcpPacketsSent);
|
||||
|
||||
fields.put("udp_pkts_recv", udpPacketsReceived);
|
||||
fields.put("udp_pkts_sent", udpPacketsSent);
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private Long getFirstLong(String line) {
|
||||
return Long.parseLong(line.substring(0, line.indexOf(" ")));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package sysmon.plugins.os_aix;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.pf4j.Plugin;
|
||||
import org.pf4j.PluginWrapper;
|
||||
|
||||
public class AixPlugin extends Plugin {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(AixPlugin.class);
|
||||
|
||||
public AixPlugin(PluginWrapper wrapper) {
|
||||
super(wrapper);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.power;
|
||||
package sysmon.plugins.os_aix;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -10,22 +10,23 @@ import sysmon.shared.PluginHelper;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class PowerProcessorExtension implements MetricExtension {
|
||||
public class AixProcessorExtension implements MetricExtension {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(PowerProcessorExtension.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(AixProcessorExtension.class);
|
||||
|
||||
// Extension details
|
||||
private final String name = "power_processor";
|
||||
private final String description = "IBM Power Processor Metrics";
|
||||
private final String name = "aix_processor";
|
||||
private final String provides = "lpar_processor";
|
||||
private final String description = "AIX Processor Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = true;
|
||||
private String interval = "10s";
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
|
@ -61,7 +62,12 @@ public class PowerProcessorExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -77,19 +83,16 @@ public class PowerProcessorExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() throws Exception {
|
||||
|
||||
TreeMap<String, String> tagsMap = null;
|
||||
TreeMap<String, Object> fieldsMap = null;
|
||||
HashMap<String, String> tagsMap = null;
|
||||
HashMap<String, Object> fieldsMap = null;
|
||||
|
||||
try (InputStream buf = PluginHelper.executeCommand("lparstat 3 1")) {
|
||||
PowerProcessorStat processorStat = processCommandOutput(buf);
|
||||
AixProcessorStat processorStat = processCommandOutput(buf);
|
||||
tagsMap = processorStat.getTags();
|
||||
fieldsMap = processorStat.getFields();
|
||||
} catch (IOException e) {
|
||||
|
@ -101,8 +104,8 @@ public class PowerProcessorExtension implements MetricExtension {
|
|||
}
|
||||
|
||||
|
||||
protected PowerProcessorStat processCommandOutput(InputStream input) throws IOException {
|
||||
return new PowerProcessorStat(input);
|
||||
protected AixProcessorStat processCommandOutput(InputStream input) throws IOException {
|
||||
return new AixProcessorStat(input);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.power;
|
||||
package sysmon.plugins.os_aix;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -8,14 +8,14 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class PowerProcessorStat {
|
||||
public class AixProcessorStat {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(PowerProcessorStat.class);
|
||||
private static final Logger log = LoggerFactory.getLogger(AixProcessorStat.class);
|
||||
|
||||
// System configuration: type=Shared mode=Uncapped smt=8 lcpu=8 mem=4096MB psize=19 ent=0.50
|
||||
private static final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
|
||||
|
@ -46,7 +46,7 @@ public class PowerProcessorStat {
|
|||
private final float lbusy; // Indicates the percentage of logical processor(s) utilization that occurred while executing at the user and system level.
|
||||
|
||||
|
||||
public PowerProcessorStat(InputStream inputStream) throws IOException {
|
||||
public AixProcessorStat(InputStream inputStream) throws IOException {
|
||||
|
||||
String lastLine = null;
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
|
@ -154,12 +154,12 @@ public class PowerProcessorStat {
|
|||
return 100 - idle;
|
||||
}
|
||||
|
||||
public TreeMap<String, String> getTags() {
|
||||
return new TreeMap<>();
|
||||
public HashMap<String, String> getTags() {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
public TreeMap<String, Object> getFields() {
|
||||
return new TreeMap<String, Object>() {{
|
||||
public HashMap<String, Object> getFields() {
|
||||
return new HashMap<String, Object>() {{
|
||||
put("lcpu", lcpu);
|
||||
put("ent", ent);
|
||||
put("user", user);
|
25
plugins/os-aix/src/test/groovy/AixNetstatTest.groovy
Normal file
25
plugins/os-aix/src/test/groovy/AixNetstatTest.groovy
Normal file
|
@ -0,0 +1,25 @@
|
|||
import spock.lang.Specification
|
||||
import sysmon.plugins.os_aix.AixNetstatParser
|
||||
|
||||
class AixNetstatTest extends Specification {
|
||||
|
||||
void "test netstat parsing"() {
|
||||
|
||||
setup:
|
||||
InputStream inputStream = getClass().getResourceAsStream('/netstat-aix.txt')
|
||||
|
||||
when:
|
||||
AixNetstatParser parser = new AixNetstatParser(inputStream)
|
||||
|
||||
then:
|
||||
parser.getFields().size() > 0
|
||||
parser.getFields().get('ip_received') == 76229L
|
||||
parser.getFields().get('ip_forwarded') == 24L
|
||||
parser.getFields().get('tcp_connections') == 85L
|
||||
parser.getFields().get('tcp_pkts_sent') == 31274L
|
||||
parser.getFields().get('tcp_pkts_recv') == 39830L
|
||||
parser.getFields().get('udp_pkts_sent') == 26332L
|
||||
parser.getFields().get('udp_pkts_recv') == 34559L
|
||||
}
|
||||
|
||||
}
|
|
@ -1,8 +1,8 @@
|
|||
import sysmon.plugins.power.PowerProcessorExtension
|
||||
import sysmon.plugins.power.PowerProcessorStat
|
||||
import sysmon.plugins.os_aix.AixProcessorExtension
|
||||
import sysmon.plugins.os_aix.AixProcessorStat
|
||||
import spock.lang.Specification
|
||||
|
||||
class PowerProcessorTest extends Specification {
|
||||
class AixProcessorTest extends Specification {
|
||||
|
||||
void "test AIX lparstat shared output processing"() {
|
||||
|
||||
|
@ -10,8 +10,8 @@ class PowerProcessorTest extends Specification {
|
|||
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-shared.txt')
|
||||
|
||||
when:
|
||||
PowerProcessorExtension extension = new PowerProcessorExtension()
|
||||
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
AixProcessorExtension extension = new AixProcessorExtension()
|
||||
AixProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
|
||||
then:
|
||||
stats.getUser() == 83.7f
|
||||
|
@ -30,8 +30,8 @@ class PowerProcessorTest extends Specification {
|
|||
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated-donating.txt')
|
||||
|
||||
when:
|
||||
PowerProcessorExtension extension = new PowerProcessorExtension()
|
||||
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
AixProcessorExtension extension = new AixProcessorExtension()
|
||||
AixProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
|
||||
then:
|
||||
stats.getUser() == 0.1f
|
||||
|
@ -51,8 +51,8 @@ class PowerProcessorTest extends Specification {
|
|||
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated-capped.txt')
|
||||
|
||||
when:
|
||||
PowerProcessorExtension extension = new PowerProcessorExtension()
|
||||
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
AixProcessorExtension extension = new AixProcessorExtension()
|
||||
AixProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
|
||||
then:
|
||||
stats.getUser() == 0.0f
|
||||
|
@ -71,8 +71,8 @@ class PowerProcessorTest extends Specification {
|
|||
InputStream inputStream = getClass().getResourceAsStream('/lparstat-linux.txt')
|
||||
|
||||
when:
|
||||
PowerProcessorExtension extension = new PowerProcessorExtension()
|
||||
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
AixProcessorExtension extension = new AixProcessorExtension()
|
||||
AixProcessorStat stats = extension.processCommandOutput(inputStream)
|
||||
|
||||
then:
|
||||
stats.getUser() == 0.03f
|
157
plugins/os-aix/src/test/resources/netstat-aix.txt
Normal file
157
plugins/os-aix/src/test/resources/netstat-aix.txt
Normal file
|
@ -0,0 +1,157 @@
|
|||
icmp:
|
||||
12 calls to icmp_error
|
||||
0 errors not generated because old message was icmp
|
||||
Output histogram:
|
||||
destination unreachable: 12
|
||||
0 messages with bad code fields
|
||||
0 messages < minimum length
|
||||
0 bad checksums
|
||||
0 messages with bad length
|
||||
Input histogram:
|
||||
destination unreachable: 3
|
||||
0 message responses generated
|
||||
igmp:
|
||||
0 messages received
|
||||
0 messages received with too few bytes
|
||||
0 messages received with bad checksum
|
||||
0 membership queries received
|
||||
0 membership queries received with invalid field(s)
|
||||
0 membership reports received
|
||||
0 membership reports received with invalid field(s)
|
||||
0 membership reports received for groups to which we belong
|
||||
2 membership reports sent
|
||||
tcp:
|
||||
31274 packets sent
|
||||
27328 data packets (82928168 bytes)
|
||||
86 data packets (108992 bytes) retransmitted
|
||||
2938 ack-only packets (2698 delayed)
|
||||
0 URG only packets
|
||||
0 window probe packets
|
||||
784 window update packets
|
||||
138 control packets
|
||||
3812 large sends
|
||||
74913716 bytes sent using largesend
|
||||
64069 bytes is the biggest largesend
|
||||
39830 packets received
|
||||
22701 acks (for 82928732 bytes)
|
||||
112 duplicate acks
|
||||
0 acks for unsent data
|
||||
15579 packets (5876585 bytes) received in-sequence
|
||||
62 completely duplicate packets (320 bytes)
|
||||
57 old duplicate packets
|
||||
0 packets with some dup. data (0 bytes duped)
|
||||
75 out-of-order packets (6408 bytes)
|
||||
0 packets (0 bytes) of data after window
|
||||
0 window probes
|
||||
1723 window update packets
|
||||
0 packets received after close
|
||||
0 packets with bad hardware assisted checksum
|
||||
0 discarded for bad checksums
|
||||
0 discarded for bad header offset fields
|
||||
0 discarded because packet too short
|
||||
1 discarded by listeners
|
||||
0 discarded due to listener's queue full
|
||||
3207 ack packet headers correctly predicted
|
||||
15050 data packet headers correctly predicted
|
||||
63 connection requests
|
||||
23 connection accepts
|
||||
85 connections established (including accepts)
|
||||
114 connections closed (including 0 drops)
|
||||
0 connections with ECN capability
|
||||
0 times responded to ECN
|
||||
0 embryonic connections dropped
|
||||
20314 segments updated rtt (of 16791 attempts)
|
||||
0 segments with congestion window reduced bit set
|
||||
0 segments with congestion experienced bit set
|
||||
0 resends due to path MTU discovery
|
||||
2 path MTU discovery terminations due to retransmits
|
||||
25 retransmit timeouts
|
||||
0 connections dropped by rexmit timeout
|
||||
4 fast retransmits
|
||||
1 when congestion window less than 4 segments
|
||||
28 newreno retransmits
|
||||
4 times avoided false fast retransmits
|
||||
0 persist timeouts
|
||||
0 connections dropped due to persist timeout
|
||||
0 keepalive timeouts
|
||||
0 keepalive probes sent
|
||||
0 connections dropped by keepalive
|
||||
0 times SACK blocks array is extended
|
||||
0 times SACK holes array is extended
|
||||
0 packets dropped due to memory allocation failure
|
||||
0 connections in timewait reused
|
||||
0 delayed ACKs for SYN
|
||||
0 delayed ACKs for FIN
|
||||
0 send_and_disconnects
|
||||
0 spliced connections
|
||||
0 spliced connections closed
|
||||
0 spliced connections reset
|
||||
0 spliced connections timeout
|
||||
0 spliced connections persist timeout
|
||||
0 spliced connections keepalive timeout
|
||||
0 TCP checksum offload disabled during retransmit
|
||||
0 Connections dropped due to bad ACKs
|
||||
0 Connections dropped due to duplicate SYN packets
|
||||
0 fastpath loopback connections
|
||||
0 fastpath loopback sent packets (0 bytes)
|
||||
0 fastpath loopback received packets (0 bytes)
|
||||
0 fake SYN segments dropped
|
||||
0 fake RST segments dropped
|
||||
0 data injection segments dropped
|
||||
0 TCPTR maximum connections dropped
|
||||
0 TCPTR connections dropped for no memory
|
||||
0 TCPTR maximum per host connections dropped
|
||||
0 connections dropped due to max assembly queue depth
|
||||
udp:
|
||||
34559 datagrams received
|
||||
0 incomplete headers
|
||||
0 bad data length fields
|
||||
0 bad checksums
|
||||
1849 dropped due to no socket
|
||||
8218 broadcast/multicast datagrams dropped due to no socket
|
||||
0 socket buffer overflows
|
||||
24492 delivered
|
||||
26332 datagrams output
|
||||
ip:
|
||||
76229 total packets received
|
||||
0 bad header checksums
|
||||
0 with size smaller than minimum
|
||||
0 with data size < data length
|
||||
0 with header length < data size
|
||||
0 with data length < header length
|
||||
0 with bad options
|
||||
0 with incorrect version number
|
||||
0 fragments received
|
||||
0 fragments dropped (dup or out of space)
|
||||
0 fragments dropped after timeout
|
||||
0 packets reassembled ok
|
||||
72552 packets for this host
|
||||
3 packets for unknown/unsupported protocol
|
||||
24 packets forwarded
|
||||
0 packets not forwardable
|
||||
0 redirects sent
|
||||
55784 packets sent from this host
|
||||
0 packets sent with fabricated ip header
|
||||
0 output packets dropped due to no bufs, etc.
|
||||
0 output packets discarded due to no route
|
||||
0 output datagrams fragmented
|
||||
0 fragments created
|
||||
0 datagrams that can't be fragmented
|
||||
0 IP Multicast packets dropped due to no receiver
|
||||
0 successful path MTU discovery cycles
|
||||
0 path MTU rediscovery cycles attempted
|
||||
0 path MTU discovery no-response estimates
|
||||
0 path MTU discovery response timeouts
|
||||
0 path MTU discovery decreases detected
|
||||
0 path MTU discovery packets sent
|
||||
0 path MTU discovery memory allocation failures
|
||||
0 ipintrq overflows
|
||||
0 with illegal source
|
||||
0 packets processed by threads
|
||||
0 packets dropped by threads
|
||||
0 packets dropped due to the full socket receive buffer
|
||||
0 dead gateway detection packets sent
|
||||
0 dead gateway detection packet allocation failures
|
||||
0 dead gateway detection gateway allocation failures
|
||||
0 incoming packets dropped due to MLS filters
|
||||
0 packets not sent due to MLS filters
|
|
@ -1,6 +1,6 @@
|
|||
# Base Plugin
|
||||
|
||||
The base plugins uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
|
||||
The base plugin uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
|
||||
|
||||
## Processor Extension
|
||||
|
||||
|
@ -51,7 +51,6 @@ Metrics reported are:
|
|||
```toml
|
||||
[extension.base_filesystem]
|
||||
enabled = true
|
||||
interval = "10s"
|
||||
exclude_type = [ "tmpfs", "ahafs" ]
|
||||
exclude_mount = [ "/boot/efi" ]
|
||||
```
|
||||
|
@ -80,6 +79,5 @@ The **include** option let's you specify what processes to report for.
|
|||
```toml
|
||||
[extension.base_process]
|
||||
enabled = true # true or false
|
||||
interval = "10s"
|
||||
include = [ "java", "influxd", "grafana-server" ]
|
||||
```
|
|
@ -1,5 +1,5 @@
|
|||
pluginId=sysmon-base
|
||||
pluginClass=sysmon.plugins.base.BasePlugin
|
||||
pluginClass=sysmon.plugins.os_base.BasePlugin
|
||||
pluginDependencies=
|
||||
pluginDescription=Base OS metrics where supported.
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -9,7 +9,10 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Extension
|
||||
public class BaseDiskExtension implements MetricExtension {
|
||||
|
@ -18,16 +21,14 @@ public class BaseDiskExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_disk";
|
||||
private final String provides = "disk";
|
||||
private final String description = "Base Disk Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
private List<HWDiskStore> diskStores;
|
||||
private int refreshCounter = 0;
|
||||
|
||||
|
||||
@Override
|
||||
|
@ -53,7 +54,12 @@ public class BaseDiskExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -69,37 +75,24 @@ public class BaseDiskExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
ArrayList<Measurement> measurementList = new ArrayList<>();
|
||||
if(diskStores == null || refreshCounter++ > 360) {
|
||||
log.debug("getMetrics() - refreshing list of disk stores");
|
||||
diskStores = hardwareAbstractionLayer.getDiskStores();
|
||||
refreshCounter = 0;
|
||||
}
|
||||
List<HWDiskStore> diskStores = hardwareAbstractionLayer.getDiskStores();
|
||||
|
||||
for(HWDiskStore store : diskStores) {
|
||||
|
||||
store.updateAttributes();
|
||||
String name = store.getName();
|
||||
if (name.matches("h?disk[0-9]+") ||
|
||||
//name.matches("/dev/dm-[0-9]+") ||
|
||||
name.matches("/dev/x?[sv]d[a-z]") ||
|
||||
name.matches("/dev/nvme[0-9]n[0-9]") ||
|
||||
name.startsWith("\\\\.\\PHYSICALDRIVE")
|
||||
) {
|
||||
if (name.matches("h?disk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]") || name.matches("/dev/nvme[0-9]n[0-9]") || name.startsWith("\\\\.\\PHYSICALDRIVE")) {
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("name", name);
|
||||
}};
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("read", store.getReadBytes());
|
||||
put("write", store.getWriteBytes());
|
||||
put("iotime", store.getTransferTime());
|
|
@ -1,15 +1,19 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import oshi.SystemInfo;
|
||||
import oshi.hardware.HardwareAbstractionLayer;
|
||||
import oshi.software.os.OSFileStore;
|
||||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Extension
|
||||
public class BaseFilesystemExtension implements MetricExtension {
|
||||
|
@ -18,12 +22,12 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_filesystem";
|
||||
private final String provides = "filesystem";
|
||||
private final String description = "Base Filesystem Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
private List<?> excludeType = new ArrayList<String>() {{
|
||||
add("tmpfs");
|
||||
add("ahafs");
|
||||
|
@ -32,9 +36,8 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
add("/boot/efi");
|
||||
}};
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
private SystemInfo systemInfo;
|
||||
private List<OSFileStore> fileStores;
|
||||
private int refreshCounter = 0;
|
||||
|
||||
|
||||
@Override
|
||||
|
@ -61,7 +64,12 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -79,10 +87,6 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
|
||||
if(map.containsKey("exclude_type")) {
|
||||
excludeType = (List<?>) map.get("exclude_type");
|
||||
}
|
||||
|
@ -97,10 +101,7 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
|
||||
ArrayList<String> alreadyProcessed = new ArrayList<>();
|
||||
ArrayList<Measurement> measurementList = new ArrayList<>();
|
||||
|
||||
if(fileStores == null || refreshCounter++ > 360) {
|
||||
fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
|
||||
}
|
||||
List<OSFileStore> fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
|
||||
|
||||
for(OSFileStore store : fileStores) {
|
||||
|
||||
|
@ -122,17 +123,15 @@ public class BaseFilesystemExtension implements MetricExtension {
|
|||
log.debug("Skipping name: " + name);
|
||||
continue;
|
||||
}
|
||||
|
||||
alreadyProcessed.add(name);
|
||||
store.updateAttributes();
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("name", name);
|
||||
put("type", type);
|
||||
put("mount", mount);
|
||||
}};
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("free_bytes", store.getFreeSpace());
|
||||
put("total_bytes", store.getTotalSpace());
|
||||
put("free_inodes", store.getFreeInodes());
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -10,7 +10,6 @@ import sysmon.shared.MetricResult;
|
|||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class BaseInfoExtension implements MetricExtension {
|
||||
|
@ -19,6 +18,7 @@ public class BaseInfoExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_info";
|
||||
private final String provides = "info";
|
||||
private final String description = "Base System Information";
|
||||
|
||||
// Configuration / Options
|
||||
|
@ -51,6 +51,11 @@ public class BaseInfoExtension implements MetricExtension {
|
|||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInterval() { return interval; }
|
||||
|
||||
|
@ -75,7 +80,7 @@ public class BaseInfoExtension implements MetricExtension {
|
|||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("os_manufacturer", systemInfo.getOperatingSystem().getManufacturer()); // GNU/Linux / IBM
|
||||
put("os_family", systemInfo.getOperatingSystem().getFamily()); // Freedesktop.org / AIX
|
||||
put("os_codename", systemInfo.getOperatingSystem().getVersionInfo().getCodeName()); // Flatpak runtime / ppc64
|
||||
|
@ -84,6 +89,7 @@ public class BaseInfoExtension implements MetricExtension {
|
|||
put("boot_time", systemInfo.getOperatingSystem().getSystemBootTime());
|
||||
}};
|
||||
|
||||
log.info(fieldsMap.toString());
|
||||
return new MetricResult(name, new Measurement(tags, fieldsMap));
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class BaseLoadExtension implements MetricExtension {
|
||||
|
@ -18,12 +18,12 @@ public class BaseLoadExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_load";
|
||||
private final String provides = "load";
|
||||
private final String description = "Base Load Average Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
|
||||
|
@ -50,7 +50,12 @@ public class BaseLoadExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,23 +71,20 @@ public class BaseLoadExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
double[] loadAvg = hardwareAbstractionLayer.getProcessor().getSystemLoadAverage(3);
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("1min", loadAvg[0]);
|
||||
put("5min", loadAvg[1]);
|
||||
put("15min", loadAvg[2]);
|
||||
}};
|
||||
|
||||
log.debug(fieldsMap.toString());
|
||||
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
|
||||
return new MetricResult(name, new Measurement(new HashMap<String, String>(), fieldsMap));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class BaseMemoryExtension implements MetricExtension {
|
||||
|
@ -18,12 +18,12 @@ public class BaseMemoryExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_memory";
|
||||
private final String provides = "memory";
|
||||
private final String description = "Base Memory Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
|
||||
|
@ -51,7 +51,12 @@ public class BaseMemoryExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -67,16 +72,13 @@ public class BaseMemoryExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<>();
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<>();
|
||||
HashMap<String, String> tagsMap = new HashMap<>();
|
||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||
|
||||
long total = hardwareAbstractionLayer.getMemory().getTotal();
|
||||
long available = hardwareAbstractionLayer.getMemory().getAvailable();
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class BaseNetstatExtension implements MetricExtension {
|
||||
|
@ -18,12 +18,12 @@ public class BaseNetstatExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_netstat";
|
||||
private final String provides = "netstat";
|
||||
private final String description = "Base Netstat Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private SystemInfo systemInfo;
|
||||
|
||||
|
@ -51,7 +51,12 @@ public class BaseNetstatExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -67,15 +72,12 @@ public class BaseNetstatExtension implements MetricExtension {
|
|||
if (map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
|
||||
put("ip_conn_total", systemInfo.getOperatingSystem().getInternetProtocolStats().getConnections().size());
|
||||
|
||||
|
@ -101,7 +103,7 @@ public class BaseNetstatExtension implements MetricExtension {
|
|||
|
||||
}};
|
||||
|
||||
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
|
||||
return new MetricResult(name, new Measurement(new HashMap<>(), fieldsMap));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -9,7 +9,10 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Extension
|
||||
public class BaseNetworkExtension implements MetricExtension {
|
||||
|
@ -18,16 +21,14 @@ public class BaseNetworkExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_network";
|
||||
private final String provides = "network";
|
||||
private final String description = "Base Network Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
private List<NetworkIF> interfaces;
|
||||
private int refreshCounter = 0;
|
||||
|
||||
|
||||
@Override
|
||||
|
@ -53,7 +54,12 @@ public class BaseNetworkExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -69,28 +75,21 @@ public class BaseNetworkExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
ArrayList<Measurement> measurementList = new ArrayList<>();
|
||||
if(interfaces == null || refreshCounter++ > 360) {
|
||||
log.debug("getMetrics() - refreshing list of network interfaces");
|
||||
interfaces = hardwareAbstractionLayer.getNetworkIFs();
|
||||
refreshCounter = 0;
|
||||
}
|
||||
|
||||
List<NetworkIF> interfaces = hardwareAbstractionLayer.getNetworkIFs();
|
||||
for(NetworkIF netif : interfaces) {
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("name", netif.getName());
|
||||
}};
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("rx_pkts", netif.getPacketsRecv());
|
||||
put("tx_pkts", netif.getPacketsSent());
|
||||
put("rx_bytes", netif.getBytesRecv());
|
|
@ -1,8 +1,9 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.pf4j.Plugin;
|
||||
import org.pf4j.PluginWrapper;
|
||||
import oshi.SystemInfo;
|
||||
import oshi.hardware.HardwareAbstractionLayer;
|
||||
|
||||
|
@ -14,6 +15,9 @@ public class BasePlugin extends Plugin {
|
|||
private static SystemInfo systemInfo;
|
||||
private static HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
|
||||
public BasePlugin(PluginWrapper wrapper) {
|
||||
super(wrapper);
|
||||
}
|
||||
|
||||
public static HardwareAbstractionLayer getHardwareAbstractionLayer() {
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -18,13 +18,12 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_process";
|
||||
private final String provides = "process";
|
||||
private final String description = "Base Process Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "60s";
|
||||
|
||||
private List<?> includeList = new ArrayList<Object>() {{
|
||||
add("java");
|
||||
add("node");
|
||||
|
@ -38,13 +37,10 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
add("corosync");
|
||||
add("rsyslogd");
|
||||
add("postgres");
|
||||
add("mariadbd");
|
||||
add("memcached");
|
||||
add("db2sysc");
|
||||
add("dsmserv");
|
||||
add("mmfsd");
|
||||
add("systemd");
|
||||
add("nginx");
|
||||
}};
|
||||
|
||||
private final long minUptimeInSeconds = 600;
|
||||
|
@ -74,7 +70,12 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,9 +91,6 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
if(map.containsKey("include")) {
|
||||
includeList = (List<?>) map.get("include");
|
||||
}
|
||||
|
@ -123,12 +121,12 @@ public class BaseProcessExtension implements MetricExtension {
|
|||
}
|
||||
log.debug("pid: " + p.getProcessID() + ", name: " + p.getName() + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
|
||||
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
|
||||
put("pid", String.valueOf(p.getProcessID()));
|
||||
put("name", p.getName());
|
||||
}};
|
||||
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("mem_rss", p.getResidentSetSize());
|
||||
put("mem_vsz", p.getVirtualSize());
|
||||
put("kernel_time", p.getKernelTime());
|
|
@ -1,4 +1,4 @@
|
|||
package sysmon.plugins.base;
|
||||
package sysmon.plugins.os_base;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -8,10 +8,9 @@ import oshi.hardware.HardwareAbstractionLayer;
|
|||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
import sysmon.shared.PluginHelper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
@Extension
|
||||
public class BaseProcessorExtension implements MetricExtension {
|
||||
|
@ -20,12 +19,12 @@ public class BaseProcessorExtension implements MetricExtension {
|
|||
|
||||
// Extension details
|
||||
private final String name = "base_processor";
|
||||
private final String provides = "processor";
|
||||
private final String description = "Base Processor Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
private String interval = "10s";
|
||||
|
||||
private HardwareAbstractionLayer hardwareAbstractionLayer;
|
||||
private long[] oldTicks;
|
||||
|
@ -53,7 +52,12 @@ public class BaseProcessorExtension implements MetricExtension {
|
|||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return interval;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -69,16 +73,13 @@ public class BaseProcessorExtension implements MetricExtension {
|
|||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
if(map.containsKey("interval")) {
|
||||
interval = (String) map.get("interval");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
TreeMap<String, String> tagsMap = new TreeMap<>();
|
||||
TreeMap<String, Object> fieldsMap = new TreeMap<>();
|
||||
HashMap<String, String> tagsMap = new HashMap<>();
|
||||
HashMap<String, Object> fieldsMap = new HashMap<>();
|
||||
|
||||
long[] ticks = hardwareAbstractionLayer.getProcessor().getSystemCpuLoadTicks();
|
||||
if(oldTicks == null || oldTicks.length != ticks.length) {
|
||||
|
@ -99,15 +100,15 @@ public class BaseProcessorExtension implements MetricExtension {
|
|||
long nonBusy = idle + iowait;
|
||||
long total = busy + nonBusy;
|
||||
|
||||
fieldsMap.put("system", PluginHelper.round(((double) system / (double) total) * 100, 2));
|
||||
fieldsMap.put("user", PluginHelper.round(((double) user / (double) total) * 100, 2));
|
||||
fieldsMap.put("nice", PluginHelper.round(((double) nice / (double) total) * 100, 2));
|
||||
fieldsMap.put("iowait", PluginHelper.round(((double) iowait / (double) total) * 100, 2));
|
||||
fieldsMap.put("steal", PluginHelper.round(((double) steal / (double) total) * 100, 2));
|
||||
fieldsMap.put("irq", PluginHelper.round(((double) irq / (double) total) * 100, 2));
|
||||
fieldsMap.put("softirq", PluginHelper.round(((double) softirq / (double) total) * 100, 2));
|
||||
fieldsMap.put("idle", PluginHelper.round(((double) idle / (double) total) * 100, 2));
|
||||
fieldsMap.put("busy", PluginHelper.round(((double) busy / (double) total) * 100, 2));
|
||||
fieldsMap.put("system", ((float) system / (float) total) * 100);
|
||||
fieldsMap.put("user", ((float) user / (float) total) * 100);
|
||||
fieldsMap.put("nice", ((float) nice / (float) total) * 100);
|
||||
fieldsMap.put("iowait", ((float) iowait / (float) total) * 100);
|
||||
fieldsMap.put("steal", ((float) steal / (float) total) * 100);
|
||||
fieldsMap.put("irq", ((float) irq / (float) total) * 100);
|
||||
fieldsMap.put("softirq", ((float) softirq / (float) total) * 100);
|
||||
fieldsMap.put("idle", ((float) idle / (float) total) * 100);
|
||||
fieldsMap.put("busy", ((float) busy / (float) total) * 100);
|
||||
|
||||
oldTicks = ticks;
|
||||
log.debug(fieldsMap.toString());
|
4
plugins/os-ibmi/README.md
Normal file
4
plugins/os-ibmi/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# IBM i Plugin
|
||||
|
||||
This is just for testing purposes.
|
||||
|
7
plugins/os-ibmi/build.gradle
Normal file
7
plugins/os-ibmi/build.gradle
Normal file
|
@ -0,0 +1,7 @@
|
|||
plugins {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// https://sourceforge.net/projects/jt400/ and http://jt400.sourceforge.net/
|
||||
implementation group: 'net.sf.jt400', name: 'jt400', version: '11.0'
|
||||
}
|
4
plugins/os-ibmi/gradle.properties
Normal file
4
plugins/os-ibmi/gradle.properties
Normal file
|
@ -0,0 +1,4 @@
|
|||
pluginId=sysmon-ibmi
|
||||
pluginClass=sysmon.plugins.os_ibmi.IbmIPlugin
|
||||
pluginDependencies=
|
||||
pluginDescription=Collects IBM-i OS metrics.
|
|
@ -0,0 +1,66 @@
|
|||
package sysmon.plugins.os_ibmi;
|
||||
|
||||
import com.ibm.as400.access.AS400;
|
||||
import com.ibm.as400.access.SystemStatus;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.pf4j.Plugin;
|
||||
import org.pf4j.PluginWrapper;
|
||||
import oshi.SystemInfo;
|
||||
import oshi.hardware.HardwareAbstractionLayer;
|
||||
|
||||
public class IbmIPlugin extends Plugin {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(IbmIPlugin.class);
|
||||
|
||||
private static SystemStatus systemStatus;
|
||||
private static AS400 as400;
|
||||
|
||||
public IbmIPlugin(PluginWrapper wrapper) {
|
||||
super(wrapper);
|
||||
}
|
||||
|
||||
|
||||
public static SystemStatus getSystemStatus() {
|
||||
|
||||
try {
|
||||
if (as400 == null) {
|
||||
as400 = IbmIPlugin.getAS400();
|
||||
}
|
||||
if(systemStatus == null) {
|
||||
systemStatus = new SystemStatus(as400);
|
||||
}
|
||||
} catch (Exception exception) {
|
||||
log.error("getSystemStatus() - {}", exception.getMessage());
|
||||
return null;
|
||||
}
|
||||
|
||||
return systemStatus;
|
||||
}
|
||||
|
||||
|
||||
public static AS400 getAS400() {
|
||||
|
||||
String osArch = System.getProperty("os.arch").toLowerCase();
|
||||
String osName = System.getProperty("os.name").toLowerCase();
|
||||
|
||||
if(!osArch.equals("ppc64") && !osName.equals("os/400")) {
|
||||
log.info("getAS400() - OS Arch: {}", osArch);
|
||||
log.info("getAS400() - OS Name: {}", osName);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
as400 = new AS400("localhost", "*CURRENT");
|
||||
//as400 = new AS400("10.32.64.142");
|
||||
} catch (Exception exception) {
|
||||
log.error("getAS400() - {}", exception.getMessage());
|
||||
return null;
|
||||
}
|
||||
|
||||
return as400;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
package sysmon.plugins.os_ibmi;
|
||||
|
||||
import com.ibm.as400.access.*;
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
//@Extension
|
||||
public class TestExtension implements MetricExtension {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(TestExtension.class);
|
||||
|
||||
// Extension details
|
||||
private final String name = "ibmi_test";
|
||||
private final String provides = "ibmi_test";
|
||||
private final String description = "IBM i Test Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
|
||||
private SystemStatus systemStatus;
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isThreaded() {
|
||||
return threaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported() {
|
||||
systemStatus = IbmIPlugin.getSystemStatus();
|
||||
return systemStatus != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(Map<String, Object> map) {
|
||||
if (map.containsKey("enabled")) {
|
||||
enabled = (boolean) map.get("enabled");
|
||||
}
|
||||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
if(systemStatus == null) {
|
||||
log.warn("getMetrics() - no system or status");
|
||||
return null;
|
||||
}
|
||||
|
||||
/* const hSql = "select
|
||||
SERVER_NAME,
|
||||
HTTP_FUNCTION,
|
||||
SERVER_NORMAL_CONNECTIONS,
|
||||
SERVER_ACTIVE_THREADS,
|
||||
SERVER_IDLE_THREADS,
|
||||
BYTES_RECEIVED,
|
||||
BYTES_SENT,
|
||||
NONCACHE_PROCESSING_TIME,
|
||||
CACHE_PROCESSING_TIME
|
||||
from
|
||||
QSYS2.HTTP_SERVER_INFO";
|
||||
*/
|
||||
|
||||
try {
|
||||
int jobsInSystem = systemStatus.getJobsInSystem();
|
||||
log.info("Jobs In System: {}", jobsInSystem);
|
||||
|
||||
int batchJobsRunning = systemStatus.getBatchJobsRunning();
|
||||
log.info("Batch Jobs Running: {}", batchJobsRunning);
|
||||
|
||||
int activeThreads = systemStatus.getActiveThreadsInSystem();
|
||||
log.info("Active Threads: {}", activeThreads);
|
||||
|
||||
int activeJobs = systemStatus.getActiveJobsInSystem();
|
||||
log.info("Active Jobs: {}", activeJobs);
|
||||
|
||||
int onlineUsers = systemStatus.getUsersCurrentSignedOn();
|
||||
log.info("Online Users: {}", onlineUsers);
|
||||
|
||||
// The storage capacity of the system auxiliary storage pool (ASP1) in MBytes.
|
||||
long systemAsp = systemStatus.getSystemASP();
|
||||
|
||||
System.out.println("Current Processing Capacity :" + systemStatus.getCurrentProcessingCapacity());
|
||||
System.out.println("ASPUsed:" + systemStatus.getPercentSystemASPUsed());
|
||||
System.out.println("Temp Addresses Used:" + systemStatus.getPercentTemporaryAddresses());
|
||||
|
||||
|
||||
|
||||
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
|
||||
put("jobs_total", jobsInSystem);
|
||||
put("jobs_running", batchJobsRunning);
|
||||
put("jobs_active", activeJobs);
|
||||
put("threads", activeThreads);
|
||||
put("users", onlineUsers);
|
||||
|
||||
}};
|
||||
return new MetricResult(name, new Measurement(new HashMap<>(), fieldsMap));
|
||||
|
||||
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException e) {
|
||||
log.error("getMetrics() {}", e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
7
plugins/os-linux/README.md
Normal file
7
plugins/os-linux/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Linux Plugins
|
||||
|
||||
## Components
|
||||
|
||||
### Network Sockets
|
||||
|
||||
Collects statistics from */proc/net/sockstats*.
|
2
plugins/os-linux/build.gradle
Normal file
2
plugins/os-linux/build.gradle
Normal file
|
@ -0,0 +1,2 @@
|
|||
plugins {
|
||||
}
|
5
plugins/os-linux/gradle.properties
Normal file
5
plugins/os-linux/gradle.properties
Normal file
|
@ -0,0 +1,5 @@
|
|||
pluginId=sysmon-linux
|
||||
pluginClass=sysmon.plugins.os_linux.LinuxPlugin
|
||||
pluginDependencies=
|
||||
pluginDescription=Linux OS Metrics.
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
package sysmon.plugins.os_linux;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
import sysmon.shared.PluginHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
// Disabled
|
||||
//@Extension
|
||||
public class LinuxNetstatExtension implements MetricExtension {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatExtension.class);
|
||||
|
||||
// Extension details
|
||||
private final String name = "linux_network_netstat";
|
||||
private final String provides = "network_netstat";
|
||||
private final String description = "Linux Netstat Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isThreaded() {
|
||||
return threaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported() {
|
||||
|
||||
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
|
||||
log.warn("Requires Linux.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if(PluginHelper.notExecutable("netstat")) {
|
||||
log.warn("Requires the 'netstat' command.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(Map<String, Object> map) {
|
||||
if (map.containsKey("enabled")) {
|
||||
enabled = (boolean) map.get("enabled");
|
||||
}
|
||||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() throws Exception {
|
||||
|
||||
HashMap<String, String> tagsMap;
|
||||
HashMap<String, Object> fieldsMap;
|
||||
|
||||
try (InputStream inputStream = PluginHelper.executeCommand("netstat -s")) {
|
||||
LinuxNetstatParser parser = processCommandOutput(inputStream);
|
||||
tagsMap = parser.getTags();
|
||||
fieldsMap = parser.getFields();
|
||||
}
|
||||
|
||||
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
|
||||
}
|
||||
|
||||
|
||||
protected LinuxNetstatParser processCommandOutput(InputStream input) throws IOException {
|
||||
return new LinuxNetstatParser(input);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,164 @@
|
|||
package sysmon.plugins.os_linux;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.HashMap;
|
||||
|
||||
public class LinuxNetstatParser {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatParser.class);
|
||||
|
||||
private long ipTotalPacketsReceived;
|
||||
private long ipForwarded;
|
||||
private long ipIncomingPacketsDiscarded;
|
||||
private long ipOutgoingPacketsDropped;
|
||||
|
||||
private long tcpConnectionsEstablished;
|
||||
private long tcpSegmentsReceived;
|
||||
private long tcpSegmentsSent;
|
||||
|
||||
private long udpPacketsReceived;
|
||||
private long udpPacketsSent;
|
||||
|
||||
|
||||
public LinuxNetstatParser(InputStream inputStream) throws IOException {
|
||||
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
while (reader.ready()) {
|
||||
String line = reader.readLine();
|
||||
log.debug("LinuxNetstatParser() - Line: " + line);
|
||||
|
||||
if(line.startsWith("Ip:")) {
|
||||
parseIp(reader);
|
||||
}
|
||||
|
||||
if(line.startsWith("Tcp:")) {
|
||||
parseTcp(reader);
|
||||
}
|
||||
|
||||
if(line.startsWith("Udp:")) {
|
||||
parseUdp(reader);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
inputStream.close();
|
||||
}
|
||||
|
||||
|
||||
protected void parseIp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) total packets received")) {
|
||||
ipTotalPacketsReceived = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) forwarded")) {
|
||||
ipForwarded = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) incoming packets discarded")) {
|
||||
ipIncomingPacketsDiscarded = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) outgoing packets dropped")) {
|
||||
ipOutgoingPacketsDropped = getFirstLong(line);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected void parseTcp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) connections established")) {
|
||||
tcpConnectionsEstablished = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) segments received")) {
|
||||
tcpSegmentsReceived = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) segments sent out")) {
|
||||
tcpSegmentsSent = getFirstLong(line);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void parseUdp(BufferedReader reader) throws IOException {
|
||||
|
||||
while (reader.ready()) {
|
||||
reader.mark(64);
|
||||
String line = reader.readLine();
|
||||
|
||||
if(!line.startsWith(" ")) {
|
||||
reader.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
line = line.trim();
|
||||
|
||||
if(line.matches("(\\d+) packets received")) {
|
||||
udpPacketsReceived = getFirstLong(line);
|
||||
}
|
||||
|
||||
if(line.matches("(\\d+) packets sent")) {
|
||||
udpPacketsSent = getFirstLong(line);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public HashMap<String, String> getTags() {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
return new HashMap<String, Object>() {{
|
||||
put("ip_forwarded", ipForwarded);
|
||||
put("ip_received", ipTotalPacketsReceived);
|
||||
put("ip_dropped", ipOutgoingPacketsDropped);
|
||||
put("ip_discarded", ipIncomingPacketsDiscarded);
|
||||
put("tcp_connections", tcpConnectionsEstablished);
|
||||
put("tcp_pkts_recv", tcpSegmentsReceived);
|
||||
put("tcp_pkts_sent", tcpSegmentsSent);
|
||||
put("udp_pkts_recv", udpPacketsReceived);
|
||||
put("udp_pkts_sent", udpPacketsSent);
|
||||
}};
|
||||
}
|
||||
|
||||
private Long getFirstLong(String line) {
|
||||
return Long.parseLong(line.substring(0, line.indexOf(" ")));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package sysmon.plugins.os_linux;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.pf4j.Plugin;
|
||||
import org.pf4j.PluginWrapper;
|
||||
|
||||
|
||||
public class LinuxPlugin extends Plugin {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LinuxPlugin.class);
|
||||
|
||||
public LinuxPlugin(PluginWrapper wrapper) {
|
||||
super(wrapper);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
package sysmon.plugins.os_linux;
|
||||
|
||||
import org.pf4j.Extension;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricExtension;
|
||||
import sysmon.shared.MetricResult;
|
||||
import sysmon.shared.PluginHelper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Extension
|
||||
public class LinuxSocketExtension implements MetricExtension {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LinuxSocketExtension.class);
|
||||
|
||||
// Extension details
|
||||
private final String name = "linux_network_sockets";
|
||||
private final String provides = "network_sockets";
|
||||
private final String description = "Linux Network Socket Metrics";
|
||||
|
||||
// Configuration / Options
|
||||
private boolean enabled = true;
|
||||
private boolean threaded = false;
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isThreaded() {
|
||||
return threaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSupported() {
|
||||
|
||||
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
|
||||
log.debug("Requires Linux.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInterval() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getProvides() {
|
||||
return provides;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConfiguration(Map<String, Object> map) {
|
||||
if (map.containsKey("enabled")) {
|
||||
enabled = (boolean) map.get("enabled");
|
||||
}
|
||||
if(map.containsKey("threaded")) {
|
||||
threaded = (boolean) map.get("threaded");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetricResult getMetrics() {
|
||||
|
||||
LinuxSocketStat sockStat = processSockOutput(PluginHelper.readFile("/proc/net/sockstat"));
|
||||
|
||||
HashMap<String, String> tagsMap = sockStat.getTags();
|
||||
HashMap<String, Object> fieldsMap = sockStat.getFields();
|
||||
|
||||
log.debug("getMetrics() - tags: {}, fields: {}", tagsMap, fieldsMap);
|
||||
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
|
||||
}
|
||||
|
||||
protected LinuxSocketStat processSockOutput(List<String> inputLines) {
|
||||
return new LinuxSocketStat(inputLines);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
package sysmon.plugins.os_linux;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class LinuxSocketStat {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(LinuxSocketStat.class);
|
||||
|
||||
private static final Pattern pattern1 = Pattern.compile("^sockets: used (\\d+)");
|
||||
private static final Pattern pattern2 = Pattern.compile("^TCP: inuse (\\d+) orphan (\\d+) tw (\\d+) alloc (\\d+) mem (\\d+)");
|
||||
private static final Pattern pattern3 = Pattern.compile("^UDP: inuse (\\d+) mem (\\d+)");
|
||||
|
||||
private long sockets;
|
||||
private long tcp_inuse;
|
||||
private long tcp_orphan;
|
||||
private long tcp_tw;
|
||||
private long tcp_alloc;
|
||||
private long tcp_mem;
|
||||
private long udp_inuse;
|
||||
private long udp_mem;
|
||||
|
||||
/*
|
||||
sockets: used 1238
|
||||
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
|
||||
UDP: inuse 11 mem 10
|
||||
UDPLITE: inuse 0
|
||||
RAW: inuse 0
|
||||
FRAG: inuse 0 memory 0
|
||||
*/
|
||||
|
||||
|
||||
LinuxSocketStat(List<String> lines) {
|
||||
|
||||
Matcher matcher;
|
||||
for(String line : lines) {
|
||||
String proto = line.substring(0, line.indexOf(':'));
|
||||
|
||||
switch (proto) {
|
||||
case "sockets":
|
||||
matcher = pattern1.matcher(line);
|
||||
if (matcher.matches() && matcher.groupCount() == 1) {
|
||||
sockets = Long.parseLong(matcher.group(1));
|
||||
}
|
||||
break;
|
||||
|
||||
case "TCP":
|
||||
matcher = pattern2.matcher(line);
|
||||
if (matcher.matches() && matcher.groupCount() == 5) {
|
||||
tcp_inuse = Long.parseLong(matcher.group(1));
|
||||
tcp_orphan = Long.parseLong(matcher.group(2));
|
||||
tcp_tw = Long.parseLong(matcher.group(3));
|
||||
tcp_alloc = Long.parseLong(matcher.group(4));
|
||||
tcp_mem = Long.parseLong(matcher.group(5));
|
||||
}
|
||||
break;
|
||||
|
||||
case "UDP":
|
||||
matcher = pattern3.matcher(line);
|
||||
if (matcher.matches() && matcher.groupCount() == 2) {
|
||||
udp_inuse = Long.parseLong(matcher.group(1));
|
||||
udp_mem = Long.parseLong(matcher.group(2));
|
||||
}
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public HashMap<String, String> getTags() {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
|
||||
public HashMap<String, Object> getFields() {
|
||||
return new HashMap<String, Object>() {{
|
||||
put("sockets", sockets);
|
||||
put("tcp_inuse", tcp_inuse);
|
||||
put("tcp_alloc", tcp_alloc);
|
||||
put("tcp_orphan", tcp_orphan);
|
||||
put("tcp_mem", tcp_mem);
|
||||
put("tcp_tw", tcp_tw);
|
||||
put("udp_inuse", udp_inuse);
|
||||
put("udp_mem", udp_mem);
|
||||
}};
|
||||
}
|
||||
|
||||
}
|
26
plugins/os-linux/src/test/groovy/LinuxNetstatTest.groovy
Normal file
26
plugins/os-linux/src/test/groovy/LinuxNetstatTest.groovy
Normal file
|
@ -0,0 +1,26 @@
|
|||
import spock.lang.Specification
|
||||
import sysmon.plugins.os_linux.LinuxNetstatParser
|
||||
|
||||
class LinuxNetstatTest extends Specification {
|
||||
|
||||
void "test netstat parsing"() {
|
||||
|
||||
setup:
|
||||
InputStream inputStream = getClass().getResourceAsStream('/netstat-linux.txt')
|
||||
|
||||
when:
|
||||
LinuxNetstatParser parser = new LinuxNetstatParser(inputStream)
|
||||
|
||||
then:
|
||||
parser.getFields().size() > 0
|
||||
parser.getFields().get('ip_received') == 109772L
|
||||
parser.getFields().get('ip_dropped') == 70L
|
||||
parser.getFields().get('ip_discarded') == 0L
|
||||
parser.getFields().get('tcp_pkts_sent') == 89891L
|
||||
parser.getFields().get('tcp_pkts_recv') == 86167L
|
||||
parser.getFields().get('udp_pkts_sent') == 10682L
|
||||
parser.getFields().get('udp_pkts_recv') == 31928L
|
||||
|
||||
}
|
||||
|
||||
}
|
29
plugins/os-linux/src/test/groovy/LinuxNetworkTest.groovy
Normal file
29
plugins/os-linux/src/test/groovy/LinuxNetworkTest.groovy
Normal file
|
@ -0,0 +1,29 @@
|
|||
import spock.lang.Specification
|
||||
import sysmon.plugins.os_linux.LinuxSocketExtension
|
||||
import sysmon.plugins.os_linux.LinuxSocketStat
|
||||
|
||||
class LinuxNetworkTest extends Specification {
|
||||
|
||||
void "test /proc/net/sockstat parsing"() {
|
||||
|
||||
setup:
|
||||
def testFile = new File(getClass().getResource('/proc_net_sockstat.txt').toURI())
|
||||
List<String> lines = testFile.readLines("UTF-8")
|
||||
|
||||
when:
|
||||
LinuxSocketExtension extension = new LinuxSocketExtension()
|
||||
LinuxSocketStat stats = extension.processSockOutput(lines)
|
||||
|
||||
then:
|
||||
stats.getFields().get("sockets") == 1238L
|
||||
stats.getFields().get("tcp_inuse") == 52L
|
||||
stats.getFields().get("tcp_orphan") == 0L
|
||||
stats.getFields().get("tcp_alloc") == 55L
|
||||
stats.getFields().get("tcp_mem") == 7l
|
||||
stats.getFields().get("tcp_tw") == 18L
|
||||
stats.getFields().get("udp_inuse") == 11L
|
||||
stats.getFields().get("udp_mem") == 10L
|
||||
|
||||
}
|
||||
|
||||
}
|
112
plugins/os-linux/src/test/resources/netstat-linux.txt
Normal file
112
plugins/os-linux/src/test/resources/netstat-linux.txt
Normal file
|
@ -0,0 +1,112 @@
|
|||
Ip:
|
||||
Forwarding: 1
|
||||
109772 total packets received
|
||||
1 with invalid addresses
|
||||
0 forwarded
|
||||
0 incoming packets discarded
|
||||
109769 incoming packets delivered
|
||||
103916 requests sent out
|
||||
70 outgoing packets dropped
|
||||
1 dropped because of missing route
|
||||
Icmp:
|
||||
52 ICMP messages received
|
||||
0 input ICMP message failed
|
||||
ICMP input histogram:
|
||||
destination unreachable: 40
|
||||
echo requests: 12
|
||||
108 ICMP messages sent
|
||||
0 ICMP messages failed
|
||||
ICMP output histogram:
|
||||
destination unreachable: 96
|
||||
echo replies: 12
|
||||
IcmpMsg:
|
||||
InType3: 40
|
||||
InType8: 12
|
||||
OutType0: 12
|
||||
OutType3: 96
|
||||
Tcp:
|
||||
3142 active connection openings
|
||||
5 passive connection openings
|
||||
2105 failed connection attempts
|
||||
193 connection resets received
|
||||
70 connections established
|
||||
86167 segments received
|
||||
89891 segments sent out
|
||||
184 segments retransmitted
|
||||
3 bad segments received
|
||||
2735 resets sent
|
||||
Udp:
|
||||
31928 packets received
|
||||
96 packets to unknown port received
|
||||
0 packet receive errors
|
||||
10682 packets sent
|
||||
0 receive buffer errors
|
||||
0 send buffer errors
|
||||
IgnoredMulti: 22
|
||||
UdpLite:
|
||||
TcpExt:
|
||||
30 packets pruned from receive queue because of socket buffer overrun
|
||||
178 TCP sockets finished time wait in fast timer
|
||||
426 delayed acks sent
|
||||
1 delayed acks further delayed because of locked socket
|
||||
Quick ack mode was activated 1059 times
|
||||
45809 packet headers predicted
|
||||
7293 acknowledgments not containing data payload received
|
||||
7659 predicted acknowledgments
|
||||
TCPSackRecovery: 3
|
||||
Detected reordering 4 times using SACK
|
||||
TCPDSACKUndo: 1
|
||||
1 congestion windows recovered without slow start after partial ack
|
||||
TCPLostRetransmit: 82
|
||||
3 timeouts after reno fast retransmit
|
||||
1 timeouts in loss state
|
||||
3 fast retransmits
|
||||
3 retransmits in slow start
|
||||
TCPTimeouts: 129
|
||||
TCPLossProbes: 69
|
||||
TCPLossProbeRecovery: 10
|
||||
TCPBacklogCoalesce: 450
|
||||
TCPDSACKOldSent: 991
|
||||
TCPDSACKOfoSent: 6
|
||||
TCPDSACKRecv: 45
|
||||
202 connections reset due to unexpected data
|
||||
147 connections reset due to early user close
|
||||
13 connections aborted due to timeout
|
||||
TCPDSACKIgnoredNoUndo: 10
|
||||
TCPSackShifted: 1
|
||||
TCPSackMerged: 1
|
||||
TCPSackShiftFallback: 9
|
||||
TCPRcvCoalesce: 5338
|
||||
TCPOFOQueue: 793
|
||||
TCPOFOMerge: 6
|
||||
TCPChallengeACK: 3
|
||||
TCPSYNChallenge: 3
|
||||
TCPSpuriousRtxHostQueues: 6
|
||||
TCPAutoCorking: 710
|
||||
TCPFromZeroWindowAdv: 1
|
||||
TCPToZeroWindowAdv: 1
|
||||
TCPWantZeroWindowAdv: 4
|
||||
TCPSynRetrans: 98
|
||||
TCPOrigDataSent: 19048
|
||||
TCPHystartTrainDetect: 3
|
||||
TCPHystartTrainCwnd: 54
|
||||
TCPHystartDelayDetect: 1
|
||||
TCPHystartDelayCwnd: 24
|
||||
TCPACKSkippedSeq: 1
|
||||
TCPKeepAlive: 2595
|
||||
TCPDelivered: 20025
|
||||
TCPAckCompressed: 260
|
||||
TcpTimeoutRehash: 116
|
||||
IpExt:
|
||||
InMcastPkts: 2257
|
||||
OutMcastPkts: 480
|
||||
InBcastPkts: 98
|
||||
OutBcastPkts: 78
|
||||
InOctets: 147193028
|
||||
OutOctets: 14723163
|
||||
InMcastOctets: 478599
|
||||
OutMcastOctets: 73462
|
||||
InBcastOctets: 10094
|
||||
OutBcastOctets: 5580
|
||||
InNoECTPkts: 177661
|
||||
MPTcpExt:
|
|
@ -0,0 +1,6 @@
|
|||
sockets: used 1238
|
||||
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
|
||||
UDP: inuse 11 mem 10
|
||||
UDPLITE: inuse 0
|
||||
RAW: inuse 0
|
||||
FRAG: inuse 0 memory 0
|
|
@ -1,6 +0,0 @@
|
|||
pluginId=sysmon-power
|
||||
pluginClass=sysmon.plugins.power.PowerPlugin
|
||||
pluginVersion=0.0.1
|
||||
pluginProvider=System Monitor
|
||||
pluginDependencies=
|
||||
pluginDescription=Collects IBM Power specific metrics.
|
|
@ -1,7 +0,0 @@
|
|||
package sysmon.plugins.power;
|
||||
|
||||
import org.pf4j.Plugin;
|
||||
|
||||
public class PowerPlugin extends Plugin {
|
||||
|
||||
}
|
|
@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
|
|||
plugins {
|
||||
id 'application'
|
||||
|
||||
id "net.nemerosa.versioning" version "2.15.1"
|
||||
id "com.github.johnrengelman.shadow" version "7.1.2"
|
||||
id "com.netflix.nebula.ospackage" version "11.3.0"
|
||||
id "net.nemerosa.versioning" version "2.15.1"
|
||||
id "nebula.ospackage" version "9.1.1"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -28,7 +28,7 @@ def projectName = "sysmon-server"
|
|||
application {
|
||||
// Define the main class for the application.
|
||||
mainClass.set('sysmon.server.Application')
|
||||
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
|
||||
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
|
||||
}
|
||||
|
||||
run {
|
||||
|
@ -40,6 +40,7 @@ tasks.named('test') {
|
|||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
apply plugin: 'nebula.ospackage'
|
||||
ospackage {
|
||||
packageName = projectName
|
||||
release = '1'
|
||||
|
|
|
@ -11,10 +11,10 @@
|
|||
|
||||
dir="/opt/sysmon/server"
|
||||
cmd="/opt/sysmon/server/bin/server"
|
||||
args="" # Add '-d' for debug output
|
||||
args="-d"
|
||||
user=""
|
||||
|
||||
name="sysmon-server"
|
||||
name=`basename $0`
|
||||
pid_file="/var/run/$name.pid"
|
||||
stdout_log="/var/log/$name.log"
|
||||
stderr_log="/var/log/$name.err"
|
||||
|
|
|
@ -8,6 +8,7 @@ import sysmon.shared.ComboResult;
|
|||
import sysmon.shared.Measurement;
|
||||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class ComboResultToPointProcessor implements Processor {
|
||||
|
@ -25,18 +26,34 @@ public class ComboResultToPointProcessor implements Processor {
|
|||
|
||||
BatchPoints.Builder batchPoints = BatchPoints
|
||||
.database(ComboResultToPointProcessor.influxDbName)
|
||||
.precision(TimeUnit.SECONDS);
|
||||
.precision(TimeUnit.MILLISECONDS);
|
||||
|
||||
for(MetricResult metricResult : comboResult.getMetricResults()) {
|
||||
|
||||
for(Measurement measurement : metricResult.getMeasurements()) {
|
||||
|
||||
Point.Builder point = Point.measurement(metricResult.getName())
|
||||
.time(metricResult.getTimestamp(), TimeUnit.SECONDS)
|
||||
.tag("hostname", metricResult.getHostname())
|
||||
.tag(measurement.getTags())
|
||||
.fields(measurement.getFields());
|
||||
.time(metricResult.getTimestamp(), TimeUnit.MILLISECONDS)
|
||||
.tag("hostname", metricResult.getHostname());
|
||||
|
||||
for (Map.Entry<String,String> entry : measurement.getTags().entrySet()) {
|
||||
//log.info("process() - tag: " + entry.getKey() + "=" + entry.getValue());
|
||||
point.tag(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
for (Map.Entry<String,Object> entry : measurement.getFields().entrySet()) {
|
||||
//log.info("process() - field: " + entry.getKey() + "=" + entry.getValue());
|
||||
if(entry.getValue() instanceof Number) {
|
||||
Number num = (Number) entry.getValue();
|
||||
point.addField(entry.getKey(), num);
|
||||
} else if(entry.getValue() instanceof Boolean) {
|
||||
Boolean bol = (Boolean) entry.getValue();
|
||||
point.addField(entry.getKey(), bol);
|
||||
} else {
|
||||
String str = (String) entry.getValue();
|
||||
point.addField(entry.getKey(), str);
|
||||
}
|
||||
}
|
||||
batchPoints.point(point.build());
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import sysmon.shared.Measurement;
|
|||
import sysmon.shared.MetricResult;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class MetricResultToPointProcessor implements Processor {
|
||||
|
@ -28,18 +29,33 @@ public class MetricResultToPointProcessor implements Processor {
|
|||
|
||||
BatchPoints.Builder batchPoints = BatchPoints
|
||||
.database(MetricResultToPointProcessor.influxDbName)
|
||||
.precision(TimeUnit.SECONDS)
|
||||
.precision(TimeUnit.MILLISECONDS)
|
||||
.tag("hostname", metricResult.getHostname());
|
||||
|
||||
for(Measurement measurement : measurementList) {
|
||||
|
||||
Point.Builder point = Point.measurement(metricResult.getName())
|
||||
.time(metricResult.getTimestamp(), TimeUnit.SECONDS)
|
||||
.fields(measurement.getFields())
|
||||
.tag(measurement.getTags());
|
||||
.time(metricResult.getTimestamp(), TimeUnit.MILLISECONDS);
|
||||
|
||||
for (Map.Entry<String,String> entry : measurement.getTags().entrySet()) {
|
||||
//log.info("process() - tag: " + entry.getKey() + "=" + entry.getValue());
|
||||
point.tag(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
for (Map.Entry<String,Object> entry : measurement.getFields().entrySet()) {
|
||||
//log.info("process() - field: " + entry.getKey() + "=" + entry.getValue());
|
||||
if(entry.getValue() instanceof Number) {
|
||||
Number num = (Number) entry.getValue();
|
||||
point.addField(entry.getKey(), num);
|
||||
} else if(entry.getValue() instanceof Boolean) {
|
||||
Boolean bol = (Boolean) entry.getValue();
|
||||
point.addField(entry.getKey(), bol);
|
||||
} else {
|
||||
String str = (String) entry.getValue();
|
||||
point.addField(entry.getKey(), str);
|
||||
}
|
||||
}
|
||||
batchPoints.point(point.build());
|
||||
|
||||
}
|
||||
|
||||
exchange.getIn().setBody(batchPoints.build());
|
||||
|
|
|
@ -21,6 +21,15 @@ public class ServerRouteBuilder extends RouteBuilder {
|
|||
.host(registry.lookupByNameAndType("http.host", String.class))
|
||||
.port(registry.lookupByNameAndType("http.port", Integer.class));
|
||||
|
||||
/*
|
||||
rest()
|
||||
.get("/")
|
||||
.produces("text/html")
|
||||
.route()
|
||||
.to("log:stdout")
|
||||
.endRest();
|
||||
*/
|
||||
|
||||
rest()
|
||||
.post("/metrics")
|
||||
.consumes("application/json")
|
||||
|
@ -31,7 +40,7 @@ public class ServerRouteBuilder extends RouteBuilder {
|
|||
.setHeader(Exchange.HTTP_RESPONSE_CODE, constant(202))
|
||||
.setHeader("Content-Type", constant("application/x-www-form-urlencoded"))
|
||||
.to("seda:inbound?discardWhenFull=true")
|
||||
.setBody(simple("OK, received."))
|
||||
.setBody(simple("OK, received by server."))
|
||||
.doCatch(Exception.class)
|
||||
.log(LoggingLevel.WARN, "Error: ${exception.message}.")
|
||||
.end()
|
||||
|
|
|
@ -15,5 +15,18 @@
|
|||
## limitations under the License.
|
||||
## ---------------------------------------------------------------------------
|
||||
|
||||
# to configure camel main
|
||||
# here you can configure options on camel main (see MainConfigurationProperties class)
|
||||
camel.main.name = sysmon-server
|
||||
|
||||
# enable tracing
|
||||
#camel.main.tracing = true
|
||||
|
||||
# bean introspection to log reflection based configuration
|
||||
#camel.main.beanIntrospectionExtendedStatistics=true
|
||||
#camel.main.beanIntrospectionLoggingLevel=INFO
|
||||
|
||||
# run in lightweight mode to be tiny as possible
|
||||
camel.main.lightweight = true
|
||||
# and eager load classes
|
||||
#camel.main.eager-classloading = true
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
plugins {
|
||||
id 'groovy'
|
||||
id 'java-library'
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
repositories {
|
||||
|
@ -34,24 +33,3 @@ tasks.named('test') {
|
|||
// Use junit platform for unit tests.
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
library(MavenPublication) {
|
||||
groupId = 'sysmon'
|
||||
artifactId = 'shared'
|
||||
|
||||
from components.java
|
||||
}
|
||||
}
|
||||
repositories {
|
||||
maven {
|
||||
name = "gitea"
|
||||
url = uri("https://git.data.coop/api/packages/$System.env.DRONE_REPO_OWNER/maven")
|
||||
credentials {
|
||||
username = "$System.env.DRONE_REPO_OWNER"
|
||||
password = "$System.env.AUTH_TOKEN"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
package sysmon.shared;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
|
||||
public class Measurement implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private Map<String, String> tags = new TreeMap<>();
|
||||
private Map<String, Object> fields = new TreeMap<>();
|
||||
private HashMap<String, String> tags = new HashMap<>();
|
||||
private HashMap<String, Object> fields = new HashMap<>();
|
||||
|
||||
|
||||
public Measurement() {
|
||||
}
|
||||
|
||||
public Measurement(Map<String, String> tags, TreeMap<String, Object> fields) {
|
||||
public Measurement(HashMap<String, String> tags, HashMap<String, Object> fields) {
|
||||
this.tags = Objects.requireNonNull(tags);
|
||||
this.fields = Objects.requireNonNull(fields);
|
||||
}
|
||||
|
@ -29,12 +29,12 @@ public class Measurement implements Serializable {
|
|||
return fields;
|
||||
}
|
||||
|
||||
public void setTags(TreeMap<String, String> tags) {
|
||||
public void setTags(HashMap<String, String> tags) {
|
||||
Objects.requireNonNull(tags);
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public void setFields(TreeMap<String, Object> fields) {
|
||||
public void setFields(HashMap<String, Object> fields) {
|
||||
Objects.requireNonNull(fields);
|
||||
this.fields = fields;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ public interface MetricExtension extends ExtensionPoint {
|
|||
|
||||
String getName();
|
||||
String getInterval();
|
||||
String getProvides();
|
||||
String getDescription();
|
||||
|
||||
void setConfiguration(Map<String, Object> map);
|
||||
|
|
|
@ -10,7 +10,7 @@ public class MetricResult implements Serializable {
|
|||
|
||||
private String name;
|
||||
private String hostname;
|
||||
private Long timestamp; // epoch seconds
|
||||
private Long timestamp; // epoch milli
|
||||
private ArrayList<Measurement> measurements;
|
||||
|
||||
public MetricResult() {
|
||||
|
@ -18,12 +18,12 @@ public class MetricResult implements Serializable {
|
|||
|
||||
public MetricResult(String name) {
|
||||
this.name = name;
|
||||
this.timestamp = Instant.now().getEpochSecond();
|
||||
this.timestamp = Instant.now().toEpochMilli();
|
||||
}
|
||||
|
||||
public MetricResult(String name, Measurement measurement) {
|
||||
this.name = name;
|
||||
this.timestamp = Instant.now().getEpochSecond();
|
||||
this.timestamp = Instant.now().toEpochMilli();
|
||||
this.measurements = new ArrayList<Measurement>() {{
|
||||
add(measurement);
|
||||
}};
|
||||
|
@ -31,7 +31,7 @@ public class MetricResult implements Serializable {
|
|||
|
||||
public MetricResult(String name, ArrayList<Measurement> measurements) {
|
||||
this.name = name;
|
||||
this.timestamp = Instant.now().getEpochSecond();
|
||||
this.timestamp = Instant.now().toEpochMilli();
|
||||
this.measurements = measurements;
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ public class MetricResult implements Serializable {
|
|||
}
|
||||
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname.toLowerCase();
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
package sysmon.shared;
|
||||
|
||||
public interface MetricScript {
|
||||
|
||||
MetricResult getMetrics();
|
||||
|
||||
}
|
|
@ -69,10 +69,4 @@ public class PluginHelper {
|
|||
return allLines;
|
||||
}
|
||||
|
||||
|
||||
public static double round(double average, int scale) {
|
||||
double pow = Math.pow(10, scale);
|
||||
return Math.round(average * pow) / pow;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue