Compare commits

...

35 Commits

Author SHA1 Message Date
Mark Nellemann bb6a5f47c6 Update README.md 2024-05-17 06:19:16 +00:00
Mark Nellemann 5204142cb4 Remove ansible example
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-11-02 20:03:01 +01:00
Mark Nellemann 80066b0c2e Update dependencies and bump version. 2023-11-02 09:40:47 +01:00
Mark Nellemann 462822b7e5 Merge pull request 'updates' (#3) from updates into main
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
Reviewed-on: #3
2023-08-13 16:56:51 +00:00
Mark Nellemann 6e05b5bb65 Update 3rd party build dependencies.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/pr Build is passing Details
2023-08-13 18:54:47 +02:00
Mark Nellemann c358e281ea Merge branch 'main' of git.data.coop:nellemann/sysmon 2023-08-13 18:15:51 +02:00
Mark Nellemann 3096ae450e Make a zip archive of plugins.
continuous-integration/drone/push Build is passing Details
2023-06-24 21:32:37 +02:00
Mark Nellemann 55848ee590 Update 3rd party build dependencies.
continuous-integration/drone/push Build is passing Details
2023-06-07 07:48:58 +02:00
Mark Nellemann a5e3b4afcd Merge pull request 'Just minor changes to build deps.' (#1) from power into main
Reviewed-on: #1
2023-05-10 09:04:25 +00:00
Mark Nellemann 27838ab6ec Update 3rd party build dependencies. 2023-05-10 11:02:22 +02:00
Mark Nellemann 7b9d27a124 Changes to reflect updated deps.
continuous-integration/drone/push Build is passing Details
2023-03-15 17:27:37 +01:00
Mark Nellemann 91c604e765 Merge branch 'main' into power
continuous-integration/drone/push Build is passing Details
2023-03-15 16:29:48 +01:00
Mark Nellemann 25e2f58264 Update dependencies.
continuous-integration/drone/push Build is passing Details
2023-03-15 16:28:50 +01:00
Mark Nellemann 11a22e84ba Initial work on power readings.
continuous-integration/drone/push Build is passing Details
2023-03-15 16:21:07 +01:00
Mark Nellemann fb94b9e563 Add ansible example
continuous-integration/drone/push Build is passing Details
2023-02-09 16:21:29 +01:00
Mark Nellemann 43d3e9babf Lowercase client hostnames, housekeeping, update dashboards.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-02-06 19:47:36 +01:00
Mark Nellemann d48934b94c Update dashboards to reflect Power/AIX plugin rename and fix urls.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-01-22 11:20:58 +01:00
Mark Nellemann 7ca1714198 Update dashboards to reflect Power/AIX plugin rename and fix urls.
continuous-integration/drone/push Build is passing Details
2023-01-22 11:19:42 +01:00
Mark Nellemann 186d678861 Set ID of routes and fix bug when no configuration file is found.
continuous-integration/drone/push Build is passing Details
2023-01-22 11:07:15 +01:00
Mark Nellemann ebd058a433 Update links.
continuous-integration/drone/push Build is passing Details
2023-01-18 15:50:09 +01:00
Mark Nellemann e0a6499daa Update links and provide screenshots
continuous-integration/drone/push Build is passing Details
2023-01-06 08:15:37 +01:00
Mark Nellemann f2d325425a Update links and provide screenshots. 2023-01-06 08:15:18 +01:00
Mark Nellemann 5faeb36000 Cleanup and fix username in drone pipeline.
continuous-integration/drone/push Build is passing Details
continuous-integration/drone/tag Build is passing Details
2023-01-04 15:00:31 +01:00
Mark Nellemann f9192bd223 Merged in scripts (pull request #19)
Scripts
2023-01-04 13:45:49 +00:00
Mark Nellemann 529d73890e Build updates. 2023-01-04 14:44:42 +01:00
Mark Nellemann fd43bee35d Update dependencies. 2023-01-04 12:35:00 +01:00
Mark Nellemann 79f3b3a81d Initial work on script support 2022-12-25 11:29:45 +01:00
Mark Nellemann 31b494163d Work on support for groovy scripts. 2022-12-21 17:12:15 +01:00
Mark Nellemann b07e949fb2 Update README with links to other related projects. 2022-12-17 10:35:05 +01:00
Mark Nellemann 6a6fdf6d25 Lower influx time precision. 2022-12-17 09:48:28 +01:00
Mark Nellemann 9b35a6f3dc Update dashboard. 2022-11-30 16:22:47 +01:00
Mark Nellemann d9cc633626 Bump OSHI dependency and version. 2022-11-30 08:58:07 +01:00
Mark Nellemann 6ffc943a51 Fix incorrect use of OSHI getDiskStores causing lots of reads on AIX. 2022-11-16 12:21:33 +01:00
Mark Nellemann c503032d94 Various small changes. 2022-11-07 17:44:14 +01:00
Mark Nellemann 8b82af339a Small improvements and bump to version 1.0.22 2022-11-07 13:33:35 +01:00
102 changed files with 851 additions and 1898 deletions

26
.drone.yml Normal file
View File

@ -0,0 +1,26 @@
---
kind: pipeline
name: default
type: docker
steps:
- name: test
image: eclipse-temurin:8-jdk
commands:
- ./gradlew test
- name: build
image: eclipse-temurin:8-jdk
environment:
AUTH_TOKEN: # Gitea access token ENV variable
from_secret: auth # Name of DroneCI secret exposed above
commands:
- ./gradlew build packages shared:publishLibraryPublicationToGiteaRepository
- for file in server/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in server/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
when:
event:
- tag

View File

@ -5,4 +5,7 @@ end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
indent_size = 4
[*.yml]
indent_size = 2

View File

@ -2,6 +2,31 @@
All notable changes to this project will be documented in this file.
## [1.1.2] - 2023-02-06
- Lowercase client hostnames
## [1.1.1] - 2023-01-22
- Simplify plugin naming
- Initial support for executing (groovy) scripts
- Fixed bug when no config file were found
- Update the default [dashboards](doc/dashboards/)
## [1.1.0] - 2022-12-17
- Lower influx time precision from milliseconds to seconds
- requires you to update server and clients to this version.
- Update *oshi* dependency (for AIX improvements).
## [1.0.24] - 2022-11-16
- Fix incorrect use of OSHI getDiskStores()
- Update dashboards
## [1.0.23] - 2022-11-07
- Update dashboards.
- Lower default interval for most plugins.
- Simplify metrics-results to influx points code.
- Remove logging of skipped disk devices (eg. cd0).
## [1.0.21] - 2022-10-30
- Update dashboard
- Add IP connections
@ -30,9 +55,14 @@ All notable changes to this project will be documented in this file.
### Changed
- Updated 3rd party dependencies.
[1.0.19]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
<!--
[1.1.0]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.1.0%0Dv0.1.24
[1.0.24]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.24%0Dv0.1.23
[1.0.23]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.23%0Dv0.1.21
[1.0.21]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
[1.0.18]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.18%0Dv0.1.13
[0.1.13]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.13%0Dv0.1.11
[0.1.11]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.11%0Dv0.1.10
[0.1.10]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.10%0Dv0.1.9
[0.1.9]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.9%0Dv0.1.8
-->

View File

@ -1,49 +1,3 @@
# System Monitor
# Repository moved
Open source system monitoring solution with support for plugins.
![Sysmon Icon](doc/sysmon.png)
This software is free to use and is licensed under the [Apache 2.0 License](LICENSE).
- Example dashboards are provided in the [doc/dashboards/](doc/dashboards/) folder, which can be imported into your Grafana installation.
- Screenshots are available in the [downloads](https://bitbucket.org/mnellemann/sysmon/downloads/) section.
## Components
This software consist of a server and client component.
### Server
The server component receives aggregated metrics from *clients* and saves these into InfluxDB.
- More information and documentation on the [sysmon-server](server/README.md).
### Client & Plugins
The client runs on all or some of your hosts and collects metrics, which are then sent to the central sysmon-server component. Plugins are loaded by the client at startup and should also be installed.
- More information and documentation on the [sysmon-client](client/README.md).
. More information and documentation on the [sysmon-plugins](plugins/README.md).
## Known problems
### Correct timezone and clock
- Ensure you have **correct timezone and date/time** and NTPd (or similar) running to keep it accurate!
### Naming collision
You can't have hosts with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the hostname as key).
### Renaming hosts
If you rename a host, the metrics in InfluxDB will still be available by the old hostname, and new metrics will be written with the new hostname. There is no easy way to migrate the old data, but you can delete it easily:
```text
USE sysmon;
DELETE WHERE hostname = 'unknown';
```
Please visit [github.com/mnellemann/sysmon](https://github.com/mnellemann/sysmon)

View File

@ -27,11 +27,14 @@ subprojects {
mavenCentral()
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
}
tasks.create("packages") {
tasks.register("packages") {
group "build"
dependsOn ":client:buildDeb"
@ -42,4 +45,5 @@ tasks.create("packages") {
dependsOn ":plugins:buildDeb"
dependsOn ":plugins:buildRpm"
dependsOn ":plugins:buildZip"
}

View File

@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
plugins {
id 'application'
id "com.github.johnrengelman.shadow" version "7.1.2"
id "net.nemerosa.versioning" version "2.15.1"
id "nebula.ospackage" version "9.1.1"
id "com.github.johnrengelman.shadow" version "7.1.2"
id "com.netflix.nebula.ospackage" version "11.3.0"
}
dependencies {
@ -23,6 +23,9 @@ dependencies {
exclude(group: "org.slf4j")
}
//implementation "org.apache.groovy:groovy-all:${groovyVersion}" // From version 4.+
implementation "org.codehaus.groovy:groovy:${groovyVersion}"
implementation group: 'org.apache.camel', name: 'camel-core', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-main', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-http', version: camelVersion
@ -38,7 +41,7 @@ def projectName = "sysmon-client"
application {
// Define the main class for the application.
mainClass.set('sysmon.client.Application')
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
}
run {
@ -73,7 +76,6 @@ shadowJar {
mergeServiceFiles() // Tell plugin to merge duplicate service files
}
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'

View File

@ -8,7 +8,7 @@ We require Java 8, which should already be installed on AIX, or is available to
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
```shell
rpm -i --ignoreos sysmon-client.rpm sysmon-plugins.rpm
rpm -ivh --ignoreos sysmon-client-*.rpm sysmon-plugins-*.rpm
```
### Run automatically at boot

View File

@ -18,11 +18,9 @@ Use *yum* if *dnf* is not available.
## Installation
[Download](https://bitbucket.org/mnellemann/sysmon/downloads/) the latest client and plugins rpm files and install:
[Download](https://git.data.coop/nellemann/-/packages/generic/sysmon/) the latest client and plugins rpm files and install:
```shell
wget https://bitbucket.org/mnellemann/sysmon/downloads/sysmon-client-1.0.16-1.noarch.rpm
wget https://bitbucket.org/mnellemann/sysmon/downloads/sysmon-plugins-1.0.16-1.noarch.rpm
rpm -ivh sysmon-client-*.noarch.rpm sysmon-plugins-*.noarch.rpm
cp /opt/sysmon/client/doc/sysmon-client.service /etc/systemd/system/
systemctl daemon-reload

View File

@ -0,0 +1,19 @@
class ExampleScript implements MetricScript {
MetricResult getMetrics() {
Map<String,String> tags = new TreeMap<>();
Map<String,Object> fields = new TreeMap<>();
tags.put("type", "temp");
fields.put("sensor1", 23.2);
fields.put("sensor2", 25.8);
Measurement measurement = new Measurement(tags, fields);
return new MetricResult("script_sensors", measurement);
}
}

View File

@ -0,0 +1,3 @@
# Example Scripts
TODO.

View File

@ -6,6 +6,7 @@ Description=Sysmon Client
#Group=nobody
TimeoutSec=20
Restart=on-failure
# BELOW: Specify sysmon-server URL, add '-n hostname' if needed
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
[Install]

View File

@ -11,17 +11,20 @@
dir="/opt/sysmon/client"
cmd="/opt/sysmon/client/bin/client"
args="-s http://10.20.30.40:9925/metrics" # Specify sysmon-server URL here
args="-s http://10.20.30.40:9925/metrics" # <- HERE: Specify sysmon-server URL, add '-n hostname' if needed
user=""
name=`basename $0`
name="sysmon-client"
pid_file="/var/run/$name.pid"
stdout_log="/var/log/$name.log"
stderr_log="/var/log/$name.err"
# Uncomment if required
# Java 8+ runtime required - Uncomment and export JAVA_HOME if needed
#JAVA_HOME=/usr/java8_64
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-11-jre
#JAVA_HOME=/opt/ibm-semeru-open-XX-jre
#JAVA_HOME=/opt/ibm-semeru-open-XX-jdk
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jre
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jdk
#export JAVA_HOME
get_pid() {

View File

@ -1,23 +1,29 @@
###
### Sysmon Client
###
### Example configuration with default values.
### Example configuration with some default values.
###
# Local path for Groovy scripts
scripts = "/opt/sysmon/scripts"
[extension.base_info]
enabled = true
interval = '60m'
[extension.base_disk]
enabled = true
interval = '10s'
[extension.base_filesystem]
enabled = true
interval = '10s'
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
[extension.base_process]
enabled = true
interval = '5m'
include = [
"java", "node", "httpd", "mongod", "mysqld",
"postgres", "influxd", "haproxy", "beam.smp",

View File

@ -85,7 +85,7 @@ public class Application implements Callable<Integer> {
try {
configuration.parse(configurationFile.toPath());
} catch (Exception e) {
System.err.println(e.getMessage());
System.err.println("Could not parse configuration file: " + e.getMessage());
return 1;
}
}

View File

@ -14,32 +14,39 @@ import sysmon.shared.ComboResult;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import javax.script.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ClientRouteBuilder extends RouteBuilder {
private static final Logger log = LoggerFactory.getLogger(ClientRouteBuilder.class);
private final Set<String> scriptFiles = new HashSet<>();
@Override
public void configure() {
Registry registry = getContext().getRegistry();
Configuration configuration = (Configuration) registry.lookupByName("configuration");
Path[] pluginpaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginpaths);
Path[] pluginPaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginPaths);
pluginManager.loadPlugins();
pluginManager.startPlugins();
List<String> providers = new ArrayList<>();
List<MetricExtension> metricExtensions = pluginManager.getExtensions(MetricExtension.class);
for (MetricExtension ext : metricExtensions) {
final String name = ext.getName();
final String provides = ext.getProvides();
// Load configuration if available
if(configuration.isForExtension(name)) {
@ -48,37 +55,14 @@ public class ClientRouteBuilder extends RouteBuilder {
}
if(ext.isSupported() && ext.isEnabled()) {
// Check that another extension has not already been loaded - TODO: Is this required ?
if(providers.contains(provides)) {
log.warn("Skipping extension (already provided): " + ext.getName());
continue;
}
log.info("Enabling extension: " + ext.getDescription());
providers.add(provides);
// Setup Camel route for this extension
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
String timerName = ext.isThreaded() ? ext.getProvides() : "default";
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
from("timer:"+timerName+"?fixedRate=true&period="+timerInterval)
.bean(ext, "getMetrics")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN,"Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
addExtensionRoute(ext);
} else {
log.info("Skipping extension (not supported or disabled): " + ext.getDescription());
}
}
from("seda:metrics?purgeWhenStopping=true")
.routeId("aggregation")
.aggregate(constant(true), AggregationStrategies.beanAllowNull(ComboAppender.class, "append"))
.completionTimeout(5000L)
.doTry()
@ -89,6 +73,7 @@ public class ClientRouteBuilder extends RouteBuilder {
.end();
from("seda:outbound?purgeWhenStopping=true")
.routeId("outbound")
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
.doTry()
.marshal(new JacksonDataFormat(ComboResult.class))
@ -98,7 +83,99 @@ public class ClientRouteBuilder extends RouteBuilder {
.log(LoggingLevel.WARN,"Error: ${exception.message}.")
.end();
// Find all local scripts
String scriptsPath = configuration.getScriptPath();
if(scriptsPath != null && Files.isDirectory(Paths.get(scriptsPath))) {
try {
scriptFiles.addAll(listFilesByExtension(scriptsPath, "groovy"));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// Enable the local scripts
for (String scriptFile : scriptFiles) {
try {
ScriptWrapper scriptWrapper = new ScriptWrapper(scriptsPath, scriptFile);
addScriptRoute(scriptWrapper);
} catch(Exception e) {
log.error("configure() - script error: {}", e.getMessage());
}
}
}
void addScriptRoute(ScriptWrapper script) {
Registry registry = getContext().getRegistry();
from("timer:scripts?fixedRate=true&period=30s")
.routeId(script.toString())
.bean(script, "run")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
void addExtensionRoute(MetricExtension ext) {
Registry registry = getContext().getRegistry();
// Setup Camel route for this extension
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
String timerName = ext.isThreaded() ? ext.getName() : "default";
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
from("timer:" + timerName + "?fixedRate=true&period=" + timerInterval)
.routeId(ext.getName())
.bean(ext, "getMetrics")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
List<String> findScripts(String location) {
log.info("Looking for scripts in: {}", location);
List<String> scripts = new ArrayList<>();
ScriptEngineManager manager = new ScriptEngineManager();
List<ScriptEngineFactory> factoryList = manager.getEngineFactories();
for (ScriptEngineFactory factory : factoryList) {
log.info("findScripts() - Supporting: {}", factory.getLanguageName());
for(String ex : factory.getExtensions()) {
log.info("findScripts() - Extension: {}", ex);
try {
scripts.addAll(listFilesByExtension(location, ex));
log.warn(scripts.toString());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
return scripts;
}
Set<String> listFilesByExtension(String dir, String ext) throws IOException {
try (Stream<Path> stream = Files.list(Paths.get(dir))) {
return stream
.filter(file -> !Files.isDirectory(file))
.map(Path::getFileName)
.map(Path::toString)
.filter(s -> s.endsWith(ext))
.collect(Collectors.toSet());
}
}
}

View File

@ -65,4 +65,14 @@ public final class Configuration {
return map;
}
String getScriptPath() {
if(result == null) {
log.debug("No configuration file loaded ...");
return null;
}
return result.getString("scripts");
}
}

View File

@ -32,4 +32,4 @@ public class MetricEnrichProcessor implements Processor {
exchange.getIn().setBody(metricResult);
}
}
}

View File

@ -0,0 +1,47 @@
package sysmon.client;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.MetricResult;
import sysmon.shared.MetricScript;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
public class ScriptWrapper {
private static final Logger log = LoggerFactory.getLogger(ScriptWrapper.class);
private final static GroovyClassLoader loader = new GroovyClassLoader();
private GroovyObject script;
private final String name;
public ScriptWrapper(String scriptPath, String scriptFile) {
name = scriptFile;
try {
Class<?> scriptClass = loader.parseClass(new File(scriptPath, scriptFile));
script = (GroovyObject) scriptClass.getDeclaredConstructor().newInstance();
} catch (IOException |InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
log.error("ScriptWrapper() - error: {}", e.getMessage());
}
}
MetricResult run() {
MetricResult result = null;
if (script != null && script instanceof MetricScript) {
result = (MetricResult) script.invokeMethod("getMetrics", null);
}
return result;
}
@Override
public String toString() {
return name;
}
}

View File

@ -15,21 +15,7 @@
## limitations under the License.
## ---------------------------------------------------------------------------
# to configure camel main
# here you can configure options on camel main (see MainConfigurationProperties class)
camel.main.name = sysmon-client
# enable tracing
#camel.main.tracing = true
# bean introspection to log reflection based configuration
#camel.main.beanIntrospectionExtendedStatistics=true
#camel.main.beanIntrospectionLoggingLevel=INFO
# run in lightweight mode to be tiny as possible
camel.main.jmxEnabled = false
camel.main.lightweight = true
# and eager load classes
#camel.main.eager-classloading = true
# limit the seda queue size
camel.component.seda.queue-size=10
camel.component.seda.queue-size = 100

View File

@ -2,7 +2,7 @@
"__inputs": [
{
"name": "DS_SYSMON",
"label": "sysmon",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
@ -35,6 +35,12 @@
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "table",
"name": "Table",
"version": ""
},
{
"type": "panel",
"id": "text",
@ -70,7 +76,7 @@
}
]
},
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from sysmon agent.",
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
@ -91,7 +97,7 @@
},
"id": 28,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
@ -115,8 +121,10 @@
"description": "",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
"custom": {
"align": "center",
"displayMode": "auto",
"inspect": false
},
"mappings": [],
"thresholds": {
@ -128,30 +136,26 @@
}
]
},
"unit": "text"
"unit": "string"
},
"overrides": []
},
"gridPos": {
"h": 6,
"h": 3,
"w": 24,
"x": 0,
"y": 3
},
"id": 35,
"options": {
"colorMode": "none",
"graphMode": "none",
"justifyMode": "center",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
"footer": {
"fields": "",
"reducer": [
"sum"
],
"fields": "/.*/",
"values": false
"show": false
},
"textMode": "value_and_name"
"showHeader": true
},
"pluginVersion": "9.1.6",
"targets": [
@ -184,7 +188,7 @@
"measurement": "base_info",
"orderByTime": "DESC",
"policy": "default",
"query": "SELECT last(\"os_manufacturer\") AS \"manufacturer\", last(\"os_family\") AS \"family\", last(\"os_version\") AS \"version\", last(\"os_codename\") AS \"codename\", last(\"os_build\") AS \"build\", last(\"boot_time\") * 1000 AS \"boottime\" FROM \"base_info\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval), \"hostname\" fill(null) ORDER BY time DESC LIMIT 1000",
"query": "SELECT last(\"os_manufacturer\") AS \"manufacturer\", last(\"os_family\") AS \"family\", last(\"os_version\") AS \"version\", last(\"os_codename\") AS \"codename\", last(\"os_build\") AS \"build\", last(\"boot_time\") * 1000 AS \"boottime\" FROM \"base_info\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY \"hostname\" fill(previous) ORDER BY time DESC LIMIT 1",
"rawQuery": true,
"refId": "A",
"resultFormat": "table",
@ -350,7 +354,8 @@
}
}
],
"type": "stat"
"transparent": true,
"type": "table"
},
{
"datasource": {
@ -415,7 +420,7 @@
"h": 5,
"w": 12,
"x": 0,
"y": 9
"y": 6
},
"id": 2,
"options": {
@ -560,7 +565,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.mode"
"options": "power_processor.mode"
},
"properties": [
{
@ -580,7 +585,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.type"
"options": "power_processor.type"
},
"properties": [
{
@ -600,7 +605,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.ent"
"options": "power_processor.ent"
},
"properties": [
{
@ -616,7 +621,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.lcpu"
"options": "power_processor.lcpu"
},
"properties": [
{
@ -632,7 +637,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.smt"
"options": "power_processor.smt"
},
"properties": [
{
@ -647,7 +652,7 @@
"h": 5,
"w": 12,
"x": 12,
"y": 9
"y": 6
},
"id": 16,
"options": {
@ -686,9 +691,11 @@
"type": "fill"
}
],
"measurement": "aix_processor",
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT last(\"mode\") AS \"mode\", last(\"type\") AS \"type\", last(\"ent\") AS \"ent\", last(\"lcpu\") AS \"lcpu\", last(\"smt\") AS \"smt\" FROM \"power_processor\" WHERE (\"hostname\" =~ /^$hostname$/) AND $timeFilter GROUP BY time($__interval) fill(previous) SLIMIT 1",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
@ -800,11 +807,11 @@
"options": {
"include": {
"names": [
"aix_processor.mode",
"aix_processor.type",
"aix_processor.ent",
"aix_processor.lcpu",
"aix_processor.smt"
"power_processor.mode",
"power_processor.type",
"power_processor.ent",
"power_processor.lcpu",
"power_processor.smt"
]
}
}
@ -876,7 +883,7 @@
"h": 6,
"w": 12,
"x": 0,
"y": 14
"y": 11
},
"id": 29,
"options": {
@ -908,7 +915,7 @@
},
{
"params": [
"none"
"null"
],
"type": "fill"
}
@ -932,7 +939,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -956,7 +963,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -980,7 +987,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1004,7 +1011,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1028,7 +1035,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1052,7 +1059,7 @@
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1137,7 +1144,7 @@
"h": 6,
"w": 4,
"x": 12,
"y": 14
"y": 11
},
"id": 19,
"options": {
@ -1169,7 +1176,7 @@
},
{
"params": [
"none"
"null"
],
"type": "fill"
}
@ -1189,13 +1196,11 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [
"1s"
],
"type": "non_negative_derivative"
"params": [],
"type": "non_negative_difference"
},
{
"params": [
@ -1213,13 +1218,11 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [
"10s"
],
"type": "non_negative_derivative"
"params": [],
"type": "non_negative_difference"
},
{
"params": [
@ -1270,7 +1273,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.entc"
"options": "power_processor.entc"
},
"properties": [
{
@ -1286,7 +1289,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.physc"
"options": "power_processor.physc"
},
"properties": [
{
@ -1298,7 +1301,7 @@
{
"matcher": {
"id": "byName",
"options": "aix_processor.lbusy"
"options": "power_processor.lbusy"
},
"properties": [
{
@ -1317,7 +1320,7 @@
"h": 6,
"w": 8,
"x": 16,
"y": 14
"y": 11
},
"id": 17,
"options": {
@ -1356,7 +1359,7 @@
"type": "fill"
}
],
"measurement": "aix_processor",
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
@ -1461,7 +1464,7 @@
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "normal"
@ -1492,7 +1495,7 @@
"h": 9,
"w": 12,
"x": 0,
"y": 20
"y": 17
},
"id": 10,
"options": {
@ -1560,13 +1563,13 @@
},
{
"params": [
"1s"
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1652,7 +1655,7 @@
"h": 9,
"w": 12,
"x": 12,
"y": 20
"y": 17
},
"id": 30,
"options": {
@ -1720,13 +1723,13 @@
},
{
"params": [
"1s"
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
10
5
],
"type": "moving_average"
},
@ -1813,7 +1816,7 @@
"h": 9,
"w": 12,
"x": 0,
"y": 29
"y": 26
},
"id": 18,
"options": {
@ -1875,15 +1878,15 @@
},
{
"params": [
"1s"
5
],
"type": "non_negative_derivative"
"type": "moving_average"
},
{
"params": [
10
"$__interval"
],
"type": "moving_average"
"type": "non_negative_derivative"
},
{
"params": [
@ -1968,7 +1971,7 @@
"h": 9,
"w": 12,
"x": 12,
"y": 29
"y": 26
},
"id": 31,
"options": {
@ -2030,7 +2033,13 @@
},
{
"params": [
"1s"
5
],
"type": "moving_average"
},
{
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
@ -2182,7 +2191,7 @@
"h": 5,
"w": 24,
"x": 0,
"y": 38
"y": 35
},
"id": 22,
"options": {
@ -2236,7 +2245,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2254,11 +2269,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2276,11 +2293,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2298,11 +2317,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2320,11 +2341,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2342,11 +2365,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2364,11 +2389,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2386,11 +2413,13 @@
},
{
"params": [],
"type": "mean"
"type": "last"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -2474,7 +2503,7 @@
"h": 10,
"w": 12,
"x": 0,
"y": 43
"y": 40
},
"id": 24,
"options": {
@ -2543,14 +2572,16 @@
"type": "mean"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
5
],
"type": "moving_average"
},
{
"params": [
10
"$__interval"
],
"type": "moving_average"
"type": "non_negative_derivative"
}
]
],
@ -2627,7 +2658,7 @@
"h": 10,
"w": 12,
"x": 12,
"y": 43
"y": 40
},
"id": 25,
"options": {
@ -2693,7 +2724,7 @@
},
{
"params": [],
"type": "last"
"type": "mean"
}
]
],
@ -2772,7 +2803,7 @@
"h": 9,
"w": 12,
"x": 0,
"y": 53
"y": 50
},
"id": 8,
"options": {
@ -2970,7 +3001,7 @@
"h": 9,
"w": 12,
"x": 12,
"y": 53
"y": 50
},
"id": 26,
"options": {
@ -3107,12 +3138,13 @@
]
},
"time": {
"from": "now-2d",
"to": "now-30s"
"from": "now-24h",
"to": "now-10s"
},
"timepicker": {
"nowDelay": "30s",
"nowDelay": "10s",
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
@ -3126,6 +3158,6 @@
"timezone": "",
"title": "Sysmon - Host Overview",
"uid": "QkVPjseMt",
"version": 9,
"version": 23,
"weekStart": ""
}
}

View File

@ -2,7 +2,7 @@
"__inputs": [
{
"name": "DS_SYSMON",
"label": "sysmon",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
@ -15,7 +15,7 @@
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.3"
"version": "9.1.6"
},
{
"type": "datasource",
@ -58,7 +58,7 @@
}
]
},
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from sysmon agent.",
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
@ -79,10 +79,10 @@
},
"id": 28,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.3",
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
@ -126,8 +126,8 @@
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
@ -155,7 +155,7 @@
"overrides": []
},
"gridPos": {
"h": 14,
"h": 13,
"w": 12,
"x": 0,
"y": 3
@ -218,6 +218,12 @@
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
},
{
"params": [
"5min"
@ -270,7 +276,7 @@
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
@ -299,7 +305,7 @@
"overrides": []
},
"gridPos": {
"h": 14,
"h": 13,
"w": 12,
"x": 12,
"y": 3
@ -345,7 +351,7 @@
"type": "fill"
}
],
"measurement": "aix_processor",
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
@ -361,6 +367,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -408,7 +420,7 @@
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
@ -437,10 +449,10 @@
"overrides": []
},
"gridPos": {
"h": 15,
"h": 14,
"w": 12,
"x": 0,
"y": 17
"y": 16
},
"id": 30,
"options": {
@ -483,7 +495,7 @@
"type": "fill"
}
],
"measurement": "aix_processor",
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
@ -499,6 +511,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -547,7 +565,7 @@
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
@ -576,10 +594,10 @@
"overrides": []
},
"gridPos": {
"h": 15,
"h": 14,
"w": 12,
"x": 12,
"y": 17
"y": 16
},
"id": 31,
"options": {
@ -622,7 +640,7 @@
"type": "fill"
}
],
"measurement": "aix_processor",
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
@ -638,6 +656,12 @@
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
@ -669,14 +693,14 @@
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"definition": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"hide": 0,
"includeAll": true,
"label": "Host",
"multi": true,
"name": "hostname",
"options": [],
"query": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"query": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
@ -690,11 +714,12 @@
},
"time": {
"from": "now-7d",
"to": "now-30s"
"to": "now-10s"
},
"timepicker": {
"nowDelay": "30s",
"nowDelay": "10s",
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
@ -706,8 +731,8 @@
]
},
"timezone": "",
"title": "Sysmon - Power Performance",
"title": "Sysmon - IBM Power",
"uid": "3zPCIbN4z",
"version": 7,
"weekStart": ""
}
}

View File

@ -2,7 +2,7 @@
"__inputs": [
{
"name": "DS_SYSMON",
"label": "sysmon",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
@ -70,7 +70,7 @@
}
]
},
"description": "https://bitbucket.org/mnellemann/sysmon/ - Metrics from within host / guest / partition.",
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from within host / guest / partition.",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
@ -91,7 +91,7 @@
},
"id": 30,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information: [bitbucket.org/mnellemann/sysmon](https://bitbucket.org/mnellemann/sysmon)\n ",
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
@ -697,7 +697,7 @@
},
{
"params": [
"null"
"none"
],
"type": "fill"
}
@ -722,8 +722,10 @@
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -744,8 +746,10 @@
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -961,7 +965,9 @@
"refresh": "1m",
"schemaVersion": 37,
"style": "dark",
"tags": [],
"tags": [
"sysmon"
],
"templating": {
"list": [
{
@ -1049,6 +1055,6 @@
"timezone": "",
"title": "Sysmon - Process Explorer",
"uid": "Vjut5mS7k",
"version": 3,
"version": 5,
"weekStart": ""
}
}

BIN
doc/screenshots/sysmon1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 265 KiB

BIN
doc/screenshots/sysmon2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 128 KiB

View File

@ -1,9 +1,9 @@
version = 1.0.21
pf4jVersion = 3.7.0
slf4jVersion = 2.0.3
camelVersion = 3.14.5
groovyVersion = 3.0.13
picocliVersion = 4.6.3
oshiVersion = 6.3.1
version = 1.1.4
pf4jVersion = 3.9.0
slf4jVersion = 2.0.9
camelVersion = 3.14.9
groovyVersion = 3.0.18
picocliVersion = 4.7.5
oshiVersion = 6.4.7
spockVersion = 2.3-groovy-3.0
tomljVersion = 1.1.0

Binary file not shown.

View File

@ -1,5 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
networkTimeout=10000
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

18
gradlew vendored
View File

@ -55,7 +55,7 @@
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
@ -80,10 +80,10 @@ do
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
@ -143,12 +143,16 @@ fi
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
@ -205,6 +209,12 @@ set -- \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.

15
gradlew.bat vendored
View File

@ -14,7 +14,7 @@
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@ -25,7 +25,8 @@
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View File

@ -2,6 +2,5 @@
Collection of standard sysmon plugins for use with the client.
- [base](os-base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
- [aix](os-aix/README.md) - AIX (and IBM Power) specific metrics
- [linux](os-linux/README.md) - Linux specific metrics
- [base](base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
- [power](power/README.md) - IBM Power specific metrics

View File

@ -1,6 +1,6 @@
# Base Plugin
The base plugin uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
The base plugins uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
## Processor Extension
@ -51,6 +51,7 @@ Metrics reported are:
```toml
[extension.base_filesystem]
enabled = true
interval = "10s"
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
```
@ -79,5 +80,6 @@ The **include** option let's you specify what processes to report for.
```toml
[extension.base_process]
enabled = true # true or false
interval = "10s"
include = [ "java", "influxd", "grafana-server" ]
```

View File

@ -1,5 +1,5 @@
pluginId=sysmon-base
pluginClass=sysmon.plugins.os_base.BasePlugin
pluginClass=sysmon.plugins.base.BasePlugin
pluginDependencies=
pluginDescription=Base OS metrics where supported.

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -9,10 +9,7 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
@Extension
public class BaseDiskExtension implements MetricExtension {
@ -21,14 +18,16 @@ public class BaseDiskExtension implements MetricExtension {
// Extension details
private final String name = "base_disk";
private final String provides = "disk";
private final String description = "Base Disk Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<HWDiskStore> diskStores;
private int refreshCounter = 0;
@Override
@ -54,12 +53,7 @@ public class BaseDiskExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -75,24 +69,37 @@ public class BaseDiskExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
List<HWDiskStore> diskStores = hardwareAbstractionLayer.getDiskStores();
if(diskStores == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of disk stores");
diskStores = hardwareAbstractionLayer.getDiskStores();
refreshCounter = 0;
}
for(HWDiskStore store : diskStores) {
store.updateAttributes();
String name = store.getName();
if (name.matches("h?disk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]") || name.matches("/dev/nvme[0-9]n[0-9]") || name.startsWith("\\\\.\\PHYSICALDRIVE")) {
if (name.matches("h?disk[0-9]+") ||
//name.matches("/dev/dm-[0-9]+") ||
name.matches("/dev/x?[sv]d[a-z]") ||
name.matches("/dev/nvme[0-9]n[0-9]") ||
name.startsWith("\\\\.\\PHYSICALDRIVE")
) {
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
}};
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("read", store.getReadBytes());
put("write", store.getWriteBytes());
put("iotime", store.getTransferTime());

View File

@ -1,19 +1,15 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.software.os.OSFileStore;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
@Extension
public class BaseFilesystemExtension implements MetricExtension {
@ -22,12 +18,12 @@ public class BaseFilesystemExtension implements MetricExtension {
// Extension details
private final String name = "base_filesystem";
private final String provides = "filesystem";
private final String description = "Base Filesystem Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private List<?> excludeType = new ArrayList<String>() {{
add("tmpfs");
add("ahafs");
@ -36,8 +32,9 @@ public class BaseFilesystemExtension implements MetricExtension {
add("/boot/efi");
}};
private HardwareAbstractionLayer hardwareAbstractionLayer;
private SystemInfo systemInfo;
private List<OSFileStore> fileStores;
private int refreshCounter = 0;
@Override
@ -64,12 +61,7 @@ public class BaseFilesystemExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -87,6 +79,10 @@ public class BaseFilesystemExtension implements MetricExtension {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("exclude_type")) {
excludeType = (List<?>) map.get("exclude_type");
}
@ -101,7 +97,10 @@ public class BaseFilesystemExtension implements MetricExtension {
ArrayList<String> alreadyProcessed = new ArrayList<>();
ArrayList<Measurement> measurementList = new ArrayList<>();
List<OSFileStore> fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
if(fileStores == null || refreshCounter++ > 360) {
fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
}
for(OSFileStore store : fileStores) {
@ -123,15 +122,17 @@ public class BaseFilesystemExtension implements MetricExtension {
log.debug("Skipping name: " + name);
continue;
}
alreadyProcessed.add(name);
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
alreadyProcessed.add(name);
store.updateAttributes();
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
put("type", type);
put("mount", mount);
}};
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("free_bytes", store.getFreeSpace());
put("total_bytes", store.getTotalSpace());
put("free_inodes", store.getFreeInodes());

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -10,6 +10,7 @@ import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseInfoExtension implements MetricExtension {
@ -18,7 +19,6 @@ public class BaseInfoExtension implements MetricExtension {
// Extension details
private final String name = "base_info";
private final String provides = "info";
private final String description = "Base System Information";
// Configuration / Options
@ -51,11 +51,6 @@ public class BaseInfoExtension implements MetricExtension {
return name;
}
@Override
public String getProvides() {
return provides;
}
@Override
public String getInterval() { return interval; }
@ -80,7 +75,7 @@ public class BaseInfoExtension implements MetricExtension {
@Override
public MetricResult getMetrics() {
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("os_manufacturer", systemInfo.getOperatingSystem().getManufacturer()); // GNU/Linux / IBM
put("os_family", systemInfo.getOperatingSystem().getFamily()); // Freedesktop.org / AIX
put("os_codename", systemInfo.getOperatingSystem().getVersionInfo().getCodeName()); // Flatpak runtime / ppc64
@ -89,7 +84,6 @@ public class BaseInfoExtension implements MetricExtension {
put("boot_time", systemInfo.getOperatingSystem().getSystemBootTime());
}};
log.info(fieldsMap.toString());
return new MetricResult(name, new Measurement(tags, fieldsMap));
}

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseLoadExtension implements MetricExtension {
@ -18,12 +18,12 @@ public class BaseLoadExtension implements MetricExtension {
// Extension details
private final String name = "base_load";
private final String provides = "load";
private final String description = "Base Load Average Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@ -50,12 +50,7 @@ public class BaseLoadExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -71,20 +66,23 @@ public class BaseLoadExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
double[] loadAvg = hardwareAbstractionLayer.getProcessor().getSystemLoadAverage(3);
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("1min", loadAvg[0]);
put("5min", loadAvg[1]);
put("15min", loadAvg[2]);
}};
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(new HashMap<String, String>(), fieldsMap));
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseMemoryExtension implements MetricExtension {
@ -18,12 +18,12 @@ public class BaseMemoryExtension implements MetricExtension {
// Extension details
private final String name = "base_memory";
private final String provides = "memory";
private final String description = "Base Memory Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@ -51,12 +51,7 @@ public class BaseMemoryExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -72,13 +67,16 @@ public class BaseMemoryExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
long total = hardwareAbstractionLayer.getMemory().getTotal();
long available = hardwareAbstractionLayer.getMemory().getAvailable();

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -8,8 +8,8 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseNetstatExtension implements MetricExtension {
@ -18,12 +18,12 @@ public class BaseNetstatExtension implements MetricExtension {
// Extension details
private final String name = "base_netstat";
private final String provides = "netstat";
private final String description = "Base Netstat Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private SystemInfo systemInfo;
@ -51,12 +51,7 @@ public class BaseNetstatExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -72,12 +67,15 @@ public class BaseNetstatExtension implements MetricExtension {
if (map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("ip_conn_total", systemInfo.getOperatingSystem().getInternetProtocolStats().getConnections().size());
@ -103,7 +101,7 @@ public class BaseNetstatExtension implements MetricExtension {
}};
return new MetricResult(name, new Measurement(new HashMap<>(), fieldsMap));
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -9,10 +9,7 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
@Extension
public class BaseNetworkExtension implements MetricExtension {
@ -21,14 +18,16 @@ public class BaseNetworkExtension implements MetricExtension {
// Extension details
private final String name = "base_network";
private final String provides = "network";
private final String description = "Base Network Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<NetworkIF> interfaces;
private int refreshCounter = 0;
@Override
@ -54,12 +53,7 @@ public class BaseNetworkExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -75,21 +69,28 @@ public class BaseNetworkExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
if(interfaces == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of network interfaces");
interfaces = hardwareAbstractionLayer.getNetworkIFs();
refreshCounter = 0;
}
List<NetworkIF> interfaces = hardwareAbstractionLayer.getNetworkIFs();
for(NetworkIF netif : interfaces) {
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", netif.getName());
}};
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("rx_pkts", netif.getPacketsRecv());
put("tx_pkts", netif.getPacketsSent());
put("rx_bytes", netif.getBytesRecv());

View File

@ -1,9 +1,8 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
@ -15,9 +14,6 @@ public class BasePlugin extends Plugin {
private static SystemInfo systemInfo;
private static HardwareAbstractionLayer hardwareAbstractionLayer;
public BasePlugin(PluginWrapper wrapper) {
super(wrapper);
}
public static HardwareAbstractionLayer getHardwareAbstractionLayer() {

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -18,12 +18,13 @@ public class BaseProcessExtension implements MetricExtension {
// Extension details
private final String name = "base_process";
private final String provides = "process";
private final String description = "Base Process Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "60s";
private List<?> includeList = new ArrayList<Object>() {{
add("java");
add("node");
@ -37,10 +38,13 @@ public class BaseProcessExtension implements MetricExtension {
add("corosync");
add("rsyslogd");
add("postgres");
add("mariadbd");
add("memcached");
add("db2sysc");
add("dsmserv");
add("mmfsd");
add("systemd");
add("nginx");
}};
private final long minUptimeInSeconds = 600;
@ -70,12 +74,7 @@ public class BaseProcessExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -91,6 +90,9 @@ public class BaseProcessExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("include")) {
includeList = (List<?>) map.get("include");
}
@ -121,12 +123,12 @@ public class BaseProcessExtension implements MetricExtension {
}
log.debug("pid: " + p.getProcessID() + ", name: " + p.getName() + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
HashMap<String, String> tagsMap = new HashMap<String, String>() {{
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("pid", String.valueOf(p.getProcessID()));
put("name", p.getName());
}};
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("mem_rss", p.getResidentSetSize());
put("mem_vsz", p.getVirtualSize());
put("kernel_time", p.getKernelTime());

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -8,9 +8,10 @@ import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseProcessorExtension implements MetricExtension {
@ -19,12 +20,12 @@ public class BaseProcessorExtension implements MetricExtension {
// Extension details
private final String name = "base_processor";
private final String provides = "processor";
private final String description = "Base Processor Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private long[] oldTicks;
@ -52,12 +53,7 @@ public class BaseProcessorExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -73,13 +69,16 @@ public class BaseProcessorExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
long[] ticks = hardwareAbstractionLayer.getProcessor().getSystemCpuLoadTicks();
if(oldTicks == null || oldTicks.length != ticks.length) {
@ -100,15 +99,15 @@ public class BaseProcessorExtension implements MetricExtension {
long nonBusy = idle + iowait;
long total = busy + nonBusy;
fieldsMap.put("system", ((float) system / (float) total) * 100);
fieldsMap.put("user", ((float) user / (float) total) * 100);
fieldsMap.put("nice", ((float) nice / (float) total) * 100);
fieldsMap.put("iowait", ((float) iowait / (float) total) * 100);
fieldsMap.put("steal", ((float) steal / (float) total) * 100);
fieldsMap.put("irq", ((float) irq / (float) total) * 100);
fieldsMap.put("softirq", ((float) softirq / (float) total) * 100);
fieldsMap.put("idle", ((float) idle / (float) total) * 100);
fieldsMap.put("busy", ((float) busy / (float) total) * 100);
fieldsMap.put("system", PluginHelper.round(((double) system / (double) total) * 100, 2));
fieldsMap.put("user", PluginHelper.round(((double) user / (double) total) * 100, 2));
fieldsMap.put("nice", PluginHelper.round(((double) nice / (double) total) * 100, 2));
fieldsMap.put("iowait", PluginHelper.round(((double) iowait / (double) total) * 100, 2));
fieldsMap.put("steal", PluginHelper.round(((double) steal / (double) total) * 100, 2));
fieldsMap.put("irq", PluginHelper.round(((double) irq / (double) total) * 100, 2));
fieldsMap.put("softirq", PluginHelper.round(((double) softirq / (double) total) * 100, 2));
fieldsMap.put("idle", PluginHelper.round(((double) idle / (double) total) * 100, 2));
fieldsMap.put("busy", PluginHelper.round(((double) busy / (double) total) * 100, 2));
oldTicks = ticks;
log.debug(fieldsMap.toString());

View File

@ -1,7 +1,7 @@
import org.redline_rpm.header.Os
plugins {
id "nebula.ospackage" version "9.1.1"
id "com.netflix.nebula.ospackage" version "11.3.0"
}
@ -82,7 +82,6 @@ tasks.clean.dependsOn(tasks.customCleanUp)
def projectName = "sysmon-plugins"
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'
@ -111,3 +110,12 @@ task buildRpmAix(type: Rpm) {
packageName = "${projectName}-AIX"
os = Os.AIX
}
task buildZip(type: Zip) {
subprojects.each {
dependsOn("${it.name}:copyJar")
}
from "output"
setArchivesBaseName(projectName as String)
setArchiveVersion(project.property("version") as String)
}

View File

@ -0,0 +1,39 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 56.250
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 35.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 37.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 0.681
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 37.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View File

@ -0,0 +1,39 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 53.875
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 36.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 41.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 1.281
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 42.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View File

@ -1,6 +0,0 @@
pluginId=sysmon-aix
pluginClass=sysmon.plugins.os_aix.AixPlugin
pluginVersion=0.0.1
pluginProvider=System Monitor
pluginDependencies=
pluginDescription=Collects AIX OS metrics.

View File

@ -1,109 +0,0 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
// Disabled
//@Extension
public class AixNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixNetstatExtension.class);
// Extension details
private final String name = "aix_network_netstat";
private final String provides = "network_netstat";
private final String description = "AIX Netstat Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("aix")) {
log.warn("Requires AIX.");
return false;
}
if(PluginHelper.notExecutable("netstat")) {
log.warn("Requires the 'netstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap;
HashMap<String, Object> fieldsMap;
try (InputStream buf = PluginHelper.executeCommand("netstat -s -f inet")) {
AixNetstatParser parser = processCommandOutput(buf);
tagsMap = parser.getTags();
fieldsMap = parser.getFields();
}
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
}
protected AixNetstatParser processCommandOutput(InputStream input) throws IOException {
return new AixNetstatParser(input);
}
}

View File

@ -1,155 +0,0 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
public class AixNetstatParser {
private static final Logger log = LoggerFactory.getLogger(AixNetstatParser.class);
private long ipTotalPacketsReceived;
private long ipForwarded;
private long tcpConnectionsEstablished;
private long tcpPacketsReceved;
private long tcpPacketsSent;
private long udpPacketsReceived;
private long udpPacketsSent;
public AixNetstatParser(InputStream inputStream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while (reader.ready()) {
String line = reader.readLine();
log.debug("AixNetstatParser() - Line: " + line);
if(line.startsWith("tcp:")) {
parseTcp(reader);
}
if(line.startsWith("udp:")) {
parseUdp(reader);
}
if(line.startsWith("ip:")) {
parseIp(reader);
}
}
inputStream.close();
}
protected void parseIp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) total packets received")) {
ipTotalPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) packets forwarded")) {
ipForwarded = getFirstLong(line);
}
}
}
protected void parseTcp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) connections established \\(including accepts\\)")) {
tcpConnectionsEstablished = getFirstLong(line);
}
if(line.matches("(\\d+) packets received")) {
tcpPacketsReceved = getFirstLong(line);
}
if(line.matches("(\\d+) packets sent")) {
tcpPacketsSent = getFirstLong(line);
}
}
}
protected void parseUdp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) datagrams received")) {
udpPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) datagrams output")) {
udpPacketsSent = getFirstLong(line);
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("ip_forwarded", ipForwarded);
fields.put("ip_received", ipTotalPacketsReceived);
fields.put("tcp_connections", tcpConnectionsEstablished);
fields.put("tcp_pkts_recv", tcpPacketsReceved);
fields.put("tcp_pkts_sent", tcpPacketsSent);
fields.put("udp_pkts_recv", udpPacketsReceived);
fields.put("udp_pkts_sent", udpPacketsSent);
return fields;
}
private Long getFirstLong(String line) {
return Long.parseLong(line.substring(0, line.indexOf(" ")));
}
}

View File

@ -1,18 +0,0 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class AixPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(AixPlugin.class);
public AixPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View File

@ -1,25 +0,0 @@
import spock.lang.Specification
import sysmon.plugins.os_aix.AixNetstatParser
class AixNetstatTest extends Specification {
void "test netstat parsing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/netstat-aix.txt')
when:
AixNetstatParser parser = new AixNetstatParser(inputStream)
then:
parser.getFields().size() > 0
parser.getFields().get('ip_received') == 76229L
parser.getFields().get('ip_forwarded') == 24L
parser.getFields().get('tcp_connections') == 85L
parser.getFields().get('tcp_pkts_sent') == 31274L
parser.getFields().get('tcp_pkts_recv') == 39830L
parser.getFields().get('udp_pkts_sent') == 26332L
parser.getFields().get('udp_pkts_recv') == 34559L
}
}

View File

@ -1,157 +0,0 @@
icmp:
12 calls to icmp_error
0 errors not generated because old message was icmp
Output histogram:
destination unreachable: 12
0 messages with bad code fields
0 messages < minimum length
0 bad checksums
0 messages with bad length
Input histogram:
destination unreachable: 3
0 message responses generated
igmp:
0 messages received
0 messages received with too few bytes
0 messages received with bad checksum
0 membership queries received
0 membership queries received with invalid field(s)
0 membership reports received
0 membership reports received with invalid field(s)
0 membership reports received for groups to which we belong
2 membership reports sent
tcp:
31274 packets sent
27328 data packets (82928168 bytes)
86 data packets (108992 bytes) retransmitted
2938 ack-only packets (2698 delayed)
0 URG only packets
0 window probe packets
784 window update packets
138 control packets
3812 large sends
74913716 bytes sent using largesend
64069 bytes is the biggest largesend
39830 packets received
22701 acks (for 82928732 bytes)
112 duplicate acks
0 acks for unsent data
15579 packets (5876585 bytes) received in-sequence
62 completely duplicate packets (320 bytes)
57 old duplicate packets
0 packets with some dup. data (0 bytes duped)
75 out-of-order packets (6408 bytes)
0 packets (0 bytes) of data after window
0 window probes
1723 window update packets
0 packets received after close
0 packets with bad hardware assisted checksum
0 discarded for bad checksums
0 discarded for bad header offset fields
0 discarded because packet too short
1 discarded by listeners
0 discarded due to listener's queue full
3207 ack packet headers correctly predicted
15050 data packet headers correctly predicted
63 connection requests
23 connection accepts
85 connections established (including accepts)
114 connections closed (including 0 drops)
0 connections with ECN capability
0 times responded to ECN
0 embryonic connections dropped
20314 segments updated rtt (of 16791 attempts)
0 segments with congestion window reduced bit set
0 segments with congestion experienced bit set
0 resends due to path MTU discovery
2 path MTU discovery terminations due to retransmits
25 retransmit timeouts
0 connections dropped by rexmit timeout
4 fast retransmits
1 when congestion window less than 4 segments
28 newreno retransmits
4 times avoided false fast retransmits
0 persist timeouts
0 connections dropped due to persist timeout
0 keepalive timeouts
0 keepalive probes sent
0 connections dropped by keepalive
0 times SACK blocks array is extended
0 times SACK holes array is extended
0 packets dropped due to memory allocation failure
0 connections in timewait reused
0 delayed ACKs for SYN
0 delayed ACKs for FIN
0 send_and_disconnects
0 spliced connections
0 spliced connections closed
0 spliced connections reset
0 spliced connections timeout
0 spliced connections persist timeout
0 spliced connections keepalive timeout
0 TCP checksum offload disabled during retransmit
0 Connections dropped due to bad ACKs
0 Connections dropped due to duplicate SYN packets
0 fastpath loopback connections
0 fastpath loopback sent packets (0 bytes)
0 fastpath loopback received packets (0 bytes)
0 fake SYN segments dropped
0 fake RST segments dropped
0 data injection segments dropped
0 TCPTR maximum connections dropped
0 TCPTR connections dropped for no memory
0 TCPTR maximum per host connections dropped
0 connections dropped due to max assembly queue depth
udp:
34559 datagrams received
0 incomplete headers
0 bad data length fields
0 bad checksums
1849 dropped due to no socket
8218 broadcast/multicast datagrams dropped due to no socket
0 socket buffer overflows
24492 delivered
26332 datagrams output
ip:
76229 total packets received
0 bad header checksums
0 with size smaller than minimum
0 with data size < data length
0 with header length < data size
0 with data length < header length
0 with bad options
0 with incorrect version number
0 fragments received
0 fragments dropped (dup or out of space)
0 fragments dropped after timeout
0 packets reassembled ok
72552 packets for this host
3 packets for unknown/unsupported protocol
24 packets forwarded
0 packets not forwardable
0 redirects sent
55784 packets sent from this host
0 packets sent with fabricated ip header
0 output packets dropped due to no bufs, etc.
0 output packets discarded due to no route
0 output datagrams fragmented
0 fragments created
0 datagrams that can't be fragmented
0 IP Multicast packets dropped due to no receiver
0 successful path MTU discovery cycles
0 path MTU rediscovery cycles attempted
0 path MTU discovery no-response estimates
0 path MTU discovery response timeouts
0 path MTU discovery decreases detected
0 path MTU discovery packets sent
0 path MTU discovery memory allocation failures
0 ipintrq overflows
0 with illegal source
0 packets processed by threads
0 packets dropped by threads
0 packets dropped due to the full socket receive buffer
0 dead gateway detection packets sent
0 dead gateway detection packet allocation failures
0 dead gateway detection gateway allocation failures
0 incoming packets dropped due to MLS filters
0 packets not sent due to MLS filters

View File

@ -1,4 +0,0 @@
# IBM i Plugin
This is just for testing purposes.

View File

@ -1,7 +0,0 @@
plugins {
}
dependencies {
// https://sourceforge.net/projects/jt400/ and http://jt400.sourceforge.net/
implementation group: 'net.sf.jt400', name: 'jt400', version: '11.0'
}

View File

@ -1,4 +0,0 @@
pluginId=sysmon-ibmi
pluginClass=sysmon.plugins.os_ibmi.IbmIPlugin
pluginDependencies=
pluginDescription=Collects IBM-i OS metrics.

View File

@ -1,66 +0,0 @@
package sysmon.plugins.os_ibmi;
import com.ibm.as400.access.AS400;
import com.ibm.as400.access.SystemStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
public class IbmIPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(IbmIPlugin.class);
private static SystemStatus systemStatus;
private static AS400 as400;
public IbmIPlugin(PluginWrapper wrapper) {
super(wrapper);
}
public static SystemStatus getSystemStatus() {
try {
if (as400 == null) {
as400 = IbmIPlugin.getAS400();
}
if(systemStatus == null) {
systemStatus = new SystemStatus(as400);
}
} catch (Exception exception) {
log.error("getSystemStatus() - {}", exception.getMessage());
return null;
}
return systemStatus;
}
public static AS400 getAS400() {
String osArch = System.getProperty("os.arch").toLowerCase();
String osName = System.getProperty("os.name").toLowerCase();
if(!osArch.equals("ppc64") && !osName.equals("os/400")) {
log.info("getAS400() - OS Arch: {}", osArch);
log.info("getAS400() - OS Name: {}", osName);
return null;
}
try {
as400 = new AS400("localhost", "*CURRENT");
//as400 = new AS400("10.32.64.142");
} catch (Exception exception) {
log.error("getAS400() - {}", exception.getMessage());
return null;
}
return as400;
}
}

View File

@ -1,142 +0,0 @@
package sysmon.plugins.os_ibmi;
import com.ibm.as400.access.*;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
//@Extension
public class TestExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(TestExtension.class);
// Extension details
private final String name = "ibmi_test";
private final String provides = "ibmi_test";
private final String description = "IBM i Test Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private SystemStatus systemStatus;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemStatus = IbmIPlugin.getSystemStatus();
return systemStatus != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
}
@Override
public MetricResult getMetrics() {
if(systemStatus == null) {
log.warn("getMetrics() - no system or status");
return null;
}
/* const hSql = "select
SERVER_NAME,
HTTP_FUNCTION,
SERVER_NORMAL_CONNECTIONS,
SERVER_ACTIVE_THREADS,
SERVER_IDLE_THREADS,
BYTES_RECEIVED,
BYTES_SENT,
NONCACHE_PROCESSING_TIME,
CACHE_PROCESSING_TIME
from
QSYS2.HTTP_SERVER_INFO";
*/
try {
int jobsInSystem = systemStatus.getJobsInSystem();
log.info("Jobs In System: {}", jobsInSystem);
int batchJobsRunning = systemStatus.getBatchJobsRunning();
log.info("Batch Jobs Running: {}", batchJobsRunning);
int activeThreads = systemStatus.getActiveThreadsInSystem();
log.info("Active Threads: {}", activeThreads);
int activeJobs = systemStatus.getActiveJobsInSystem();
log.info("Active Jobs: {}", activeJobs);
int onlineUsers = systemStatus.getUsersCurrentSignedOn();
log.info("Online Users: {}", onlineUsers);
// The storage capacity of the system auxiliary storage pool (ASP1) in MBytes.
long systemAsp = systemStatus.getSystemASP();
System.out.println("Current Processing Capacity :" + systemStatus.getCurrentProcessingCapacity());
System.out.println("ASPUsed:" + systemStatus.getPercentSystemASPUsed());
System.out.println("Temp Addresses Used:" + systemStatus.getPercentTemporaryAddresses());
HashMap<String, Object> fieldsMap = new HashMap<String, Object>() {{
put("jobs_total", jobsInSystem);
put("jobs_running", batchJobsRunning);
put("jobs_active", activeJobs);
put("threads", activeThreads);
put("users", onlineUsers);
}};
return new MetricResult(name, new Measurement(new HashMap<>(), fieldsMap));
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException e) {
log.error("getMetrics() {}", e.getMessage());
e.printStackTrace();
}
return null;
}
}

View File

@ -1,7 +0,0 @@
# Linux Plugins
## Components
### Network Sockets
Collects statistics from */proc/net/sockstats*.

View File

@ -1,2 +0,0 @@
plugins {
}

View File

@ -1,5 +0,0 @@
pluginId=sysmon-linux
pluginClass=sysmon.plugins.os_linux.LinuxPlugin
pluginDependencies=
pluginDescription=Linux OS Metrics.

View File

@ -1,108 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
// Disabled
//@Extension
public class LinuxNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatExtension.class);
// Extension details
private final String name = "linux_network_netstat";
private final String provides = "network_netstat";
private final String description = "Linux Netstat Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
log.warn("Requires Linux.");
return false;
}
if(PluginHelper.notExecutable("netstat")) {
log.warn("Requires the 'netstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap;
HashMap<String, Object> fieldsMap;
try (InputStream inputStream = PluginHelper.executeCommand("netstat -s")) {
LinuxNetstatParser parser = processCommandOutput(inputStream);
tagsMap = parser.getTags();
fieldsMap = parser.getFields();
}
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
}
protected LinuxNetstatParser processCommandOutput(InputStream input) throws IOException {
return new LinuxNetstatParser(input);
}
}

View File

@ -1,164 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
public class LinuxNetstatParser {
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatParser.class);
private long ipTotalPacketsReceived;
private long ipForwarded;
private long ipIncomingPacketsDiscarded;
private long ipOutgoingPacketsDropped;
private long tcpConnectionsEstablished;
private long tcpSegmentsReceived;
private long tcpSegmentsSent;
private long udpPacketsReceived;
private long udpPacketsSent;
public LinuxNetstatParser(InputStream inputStream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while (reader.ready()) {
String line = reader.readLine();
log.debug("LinuxNetstatParser() - Line: " + line);
if(line.startsWith("Ip:")) {
parseIp(reader);
}
if(line.startsWith("Tcp:")) {
parseTcp(reader);
}
if(line.startsWith("Udp:")) {
parseUdp(reader);
}
}
inputStream.close();
}
protected void parseIp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) total packets received")) {
ipTotalPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) forwarded")) {
ipForwarded = getFirstLong(line);
}
if(line.matches("(\\d+) incoming packets discarded")) {
ipIncomingPacketsDiscarded = getFirstLong(line);
}
if(line.matches("(\\d+) outgoing packets dropped")) {
ipOutgoingPacketsDropped = getFirstLong(line);
}
}
}
protected void parseTcp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) connections established")) {
tcpConnectionsEstablished = getFirstLong(line);
}
if(line.matches("(\\d+) segments received")) {
tcpSegmentsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) segments sent out")) {
tcpSegmentsSent = getFirstLong(line);
}
}
}
protected void parseUdp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) packets received")) {
udpPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) packets sent")) {
udpPacketsSent = getFirstLong(line);
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
return new HashMap<String, Object>() {{
put("ip_forwarded", ipForwarded);
put("ip_received", ipTotalPacketsReceived);
put("ip_dropped", ipOutgoingPacketsDropped);
put("ip_discarded", ipIncomingPacketsDiscarded);
put("tcp_connections", tcpConnectionsEstablished);
put("tcp_pkts_recv", tcpSegmentsReceived);
put("tcp_pkts_sent", tcpSegmentsSent);
put("udp_pkts_recv", udpPacketsReceived);
put("udp_pkts_sent", udpPacketsSent);
}};
}
private Long getFirstLong(String line) {
return Long.parseLong(line.substring(0, line.indexOf(" ")));
}
}

View File

@ -1,17 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class LinuxPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(LinuxPlugin.class);
public LinuxPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View File

@ -1,97 +0,0 @@
package sysmon.plugins.os_linux;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class LinuxSocketExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(LinuxSocketExtension.class);
// Extension details
private final String name = "linux_network_sockets";
private final String provides = "network_sockets";
private final String description = "Linux Network Socket Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
log.debug("Requires Linux.");
return false;
}
return true;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
}
@Override
public MetricResult getMetrics() {
LinuxSocketStat sockStat = processSockOutput(PluginHelper.readFile("/proc/net/sockstat"));
HashMap<String, String> tagsMap = sockStat.getTags();
HashMap<String, Object> fieldsMap = sockStat.getFields();
log.debug("getMetrics() - tags: {}, fields: {}", tagsMap, fieldsMap);
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
}
protected LinuxSocketStat processSockOutput(List<String> inputLines) {
return new LinuxSocketStat(inputLines);
}
}

View File

@ -1,96 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LinuxSocketStat {
private static final Logger log = LoggerFactory.getLogger(LinuxSocketStat.class);
private static final Pattern pattern1 = Pattern.compile("^sockets: used (\\d+)");
private static final Pattern pattern2 = Pattern.compile("^TCP: inuse (\\d+) orphan (\\d+) tw (\\d+) alloc (\\d+) mem (\\d+)");
private static final Pattern pattern3 = Pattern.compile("^UDP: inuse (\\d+) mem (\\d+)");
private long sockets;
private long tcp_inuse;
private long tcp_orphan;
private long tcp_tw;
private long tcp_alloc;
private long tcp_mem;
private long udp_inuse;
private long udp_mem;
/*
sockets: used 1238
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
UDP: inuse 11 mem 10
UDPLITE: inuse 0
RAW: inuse 0
FRAG: inuse 0 memory 0
*/
LinuxSocketStat(List<String> lines) {
Matcher matcher;
for(String line : lines) {
String proto = line.substring(0, line.indexOf(':'));
switch (proto) {
case "sockets":
matcher = pattern1.matcher(line);
if (matcher.matches() && matcher.groupCount() == 1) {
sockets = Long.parseLong(matcher.group(1));
}
break;
case "TCP":
matcher = pattern2.matcher(line);
if (matcher.matches() && matcher.groupCount() == 5) {
tcp_inuse = Long.parseLong(matcher.group(1));
tcp_orphan = Long.parseLong(matcher.group(2));
tcp_tw = Long.parseLong(matcher.group(3));
tcp_alloc = Long.parseLong(matcher.group(4));
tcp_mem = Long.parseLong(matcher.group(5));
}
break;
case "UDP":
matcher = pattern3.matcher(line);
if (matcher.matches() && matcher.groupCount() == 2) {
udp_inuse = Long.parseLong(matcher.group(1));
udp_mem = Long.parseLong(matcher.group(2));
}
break;
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
return new HashMap<String, Object>() {{
put("sockets", sockets);
put("tcp_inuse", tcp_inuse);
put("tcp_alloc", tcp_alloc);
put("tcp_orphan", tcp_orphan);
put("tcp_mem", tcp_mem);
put("tcp_tw", tcp_tw);
put("udp_inuse", udp_inuse);
put("udp_mem", udp_mem);
}};
}
}

View File

@ -1,26 +0,0 @@
import spock.lang.Specification
import sysmon.plugins.os_linux.LinuxNetstatParser
class LinuxNetstatTest extends Specification {
void "test netstat parsing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/netstat-linux.txt')
when:
LinuxNetstatParser parser = new LinuxNetstatParser(inputStream)
then:
parser.getFields().size() > 0
parser.getFields().get('ip_received') == 109772L
parser.getFields().get('ip_dropped') == 70L
parser.getFields().get('ip_discarded') == 0L
parser.getFields().get('tcp_pkts_sent') == 89891L
parser.getFields().get('tcp_pkts_recv') == 86167L
parser.getFields().get('udp_pkts_sent') == 10682L
parser.getFields().get('udp_pkts_recv') == 31928L
}
}

View File

@ -1,29 +0,0 @@
import spock.lang.Specification
import sysmon.plugins.os_linux.LinuxSocketExtension
import sysmon.plugins.os_linux.LinuxSocketStat
class LinuxNetworkTest extends Specification {
void "test /proc/net/sockstat parsing"() {
setup:
def testFile = new File(getClass().getResource('/proc_net_sockstat.txt').toURI())
List<String> lines = testFile.readLines("UTF-8")
when:
LinuxSocketExtension extension = new LinuxSocketExtension()
LinuxSocketStat stats = extension.processSockOutput(lines)
then:
stats.getFields().get("sockets") == 1238L
stats.getFields().get("tcp_inuse") == 52L
stats.getFields().get("tcp_orphan") == 0L
stats.getFields().get("tcp_alloc") == 55L
stats.getFields().get("tcp_mem") == 7l
stats.getFields().get("tcp_tw") == 18L
stats.getFields().get("udp_inuse") == 11L
stats.getFields().get("udp_mem") == 10L
}
}

View File

@ -1,112 +0,0 @@
Ip:
Forwarding: 1
109772 total packets received
1 with invalid addresses
0 forwarded
0 incoming packets discarded
109769 incoming packets delivered
103916 requests sent out
70 outgoing packets dropped
1 dropped because of missing route
Icmp:
52 ICMP messages received
0 input ICMP message failed
ICMP input histogram:
destination unreachable: 40
echo requests: 12
108 ICMP messages sent
0 ICMP messages failed
ICMP output histogram:
destination unreachable: 96
echo replies: 12
IcmpMsg:
InType3: 40
InType8: 12
OutType0: 12
OutType3: 96
Tcp:
3142 active connection openings
5 passive connection openings
2105 failed connection attempts
193 connection resets received
70 connections established
86167 segments received
89891 segments sent out
184 segments retransmitted
3 bad segments received
2735 resets sent
Udp:
31928 packets received
96 packets to unknown port received
0 packet receive errors
10682 packets sent
0 receive buffer errors
0 send buffer errors
IgnoredMulti: 22
UdpLite:
TcpExt:
30 packets pruned from receive queue because of socket buffer overrun
178 TCP sockets finished time wait in fast timer
426 delayed acks sent
1 delayed acks further delayed because of locked socket
Quick ack mode was activated 1059 times
45809 packet headers predicted
7293 acknowledgments not containing data payload received
7659 predicted acknowledgments
TCPSackRecovery: 3
Detected reordering 4 times using SACK
TCPDSACKUndo: 1
1 congestion windows recovered without slow start after partial ack
TCPLostRetransmit: 82
3 timeouts after reno fast retransmit
1 timeouts in loss state
3 fast retransmits
3 retransmits in slow start
TCPTimeouts: 129
TCPLossProbes: 69
TCPLossProbeRecovery: 10
TCPBacklogCoalesce: 450
TCPDSACKOldSent: 991
TCPDSACKOfoSent: 6
TCPDSACKRecv: 45
202 connections reset due to unexpected data
147 connections reset due to early user close
13 connections aborted due to timeout
TCPDSACKIgnoredNoUndo: 10
TCPSackShifted: 1
TCPSackMerged: 1
TCPSackShiftFallback: 9
TCPRcvCoalesce: 5338
TCPOFOQueue: 793
TCPOFOMerge: 6
TCPChallengeACK: 3
TCPSYNChallenge: 3
TCPSpuriousRtxHostQueues: 6
TCPAutoCorking: 710
TCPFromZeroWindowAdv: 1
TCPToZeroWindowAdv: 1
TCPWantZeroWindowAdv: 4
TCPSynRetrans: 98
TCPOrigDataSent: 19048
TCPHystartTrainDetect: 3
TCPHystartTrainCwnd: 54
TCPHystartDelayDetect: 1
TCPHystartDelayCwnd: 24
TCPACKSkippedSeq: 1
TCPKeepAlive: 2595
TCPDelivered: 20025
TCPAckCompressed: 260
TcpTimeoutRehash: 116
IpExt:
InMcastPkts: 2257
OutMcastPkts: 480
InBcastPkts: 98
OutBcastPkts: 78
InOctets: 147193028
OutOctets: 14723163
InMcastOctets: 478599
OutMcastOctets: 73462
InBcastOctets: 10094
OutBcastOctets: 5580
InNoECTPkts: 177661
MPTcpExt:

View File

@ -1,6 +0,0 @@
sockets: used 1238
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
UDP: inuse 11 mem 10
UDPLITE: inuse 0
RAW: inuse 0
FRAG: inuse 0 memory 0

View File

@ -1,6 +1,6 @@
# AIX Plugin
# IBM Power Plugin
## LPAR Processor Extension
## Power LPAR Processor Extension
The processor extension works for both AIX and Linux on the Power ppc64/ppc64le architecture.

View File

@ -0,0 +1,6 @@
pluginId=sysmon-power
pluginClass=sysmon.plugins.power.PowerPlugin
pluginVersion=0.0.1
pluginProvider=System Monitor
pluginDependencies=
pluginDescription=Collects IBM Power specific metrics.

View File

@ -0,0 +1,7 @@
package sysmon.plugins.power;
import org.pf4j.Plugin;
public class PowerPlugin extends Plugin {
}

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_aix;
package sysmon.plugins.power;
import org.pf4j.Extension;
import org.slf4j.Logger;
@ -10,23 +10,22 @@ import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class AixProcessorExtension implements MetricExtension {
public class PowerProcessorExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixProcessorExtension.class);
private static final Logger log = LoggerFactory.getLogger(PowerProcessorExtension.class);
// Extension details
private final String name = "aix_processor";
private final String provides = "lpar_processor";
private final String description = "AIX Processor Metrics";
private final String name = "power_processor";
private final String description = "IBM Power Processor Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = true;
private String interval = "10s";
@Override
public boolean isEnabled() {
@ -62,12 +61,7 @@ public class AixProcessorExtension implements MetricExtension {
@Override
public String getInterval() {
return null;
}
@Override
public String getProvides() {
return provides;
return interval;
}
@Override
@ -83,16 +77,19 @@ public class AixProcessorExtension implements MetricExtension {
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
TreeMap<String, String> tagsMap = null;
TreeMap<String, Object> fieldsMap = null;
try (InputStream buf = PluginHelper.executeCommand("lparstat 3 1")) {
AixProcessorStat processorStat = processCommandOutput(buf);
PowerProcessorStat processorStat = processCommandOutput(buf);
tagsMap = processorStat.getTags();
fieldsMap = processorStat.getFields();
} catch (IOException e) {
@ -104,8 +101,8 @@ public class AixProcessorExtension implements MetricExtension {
}
protected AixProcessorStat processCommandOutput(InputStream input) throws IOException {
return new AixProcessorStat(input);
protected PowerProcessorStat processCommandOutput(InputStream input) throws IOException {
return new PowerProcessorStat(input);
}
}

View File

@ -1,4 +1,4 @@
package sysmon.plugins.os_aix;
package sysmon.plugins.power;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -8,14 +8,14 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Objects;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class AixProcessorStat {
public class PowerProcessorStat {
private static final Logger log = LoggerFactory.getLogger(AixProcessorStat.class);
private static final Logger log = LoggerFactory.getLogger(PowerProcessorStat.class);
// System configuration: type=Shared mode=Uncapped smt=8 lcpu=8 mem=4096MB psize=19 ent=0.50
private static final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
@ -46,7 +46,7 @@ public class AixProcessorStat {
private final float lbusy; // Indicates the percentage of logical processor(s) utilization that occurred while executing at the user and system level.
public AixProcessorStat(InputStream inputStream) throws IOException {
public PowerProcessorStat(InputStream inputStream) throws IOException {
String lastLine = null;
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
@ -154,12 +154,12 @@ public class AixProcessorStat {
return 100 - idle;
}
public HashMap<String, String> getTags() {
return new HashMap<>();
public TreeMap<String, String> getTags() {
return new TreeMap<>();
}
public HashMap<String, Object> getFields() {
return new HashMap<String, Object>() {{
public TreeMap<String, Object> getFields() {
return new TreeMap<String, Object>() {{
put("lcpu", lcpu);
put("ent", ent);
put("user", user);

View File

@ -1,8 +1,8 @@
import sysmon.plugins.os_aix.AixProcessorExtension
import sysmon.plugins.os_aix.AixProcessorStat
import sysmon.plugins.power.PowerProcessorExtension
import sysmon.plugins.power.PowerProcessorStat
import spock.lang.Specification
class AixProcessorTest extends Specification {
class PowerProcessorTest extends Specification {
void "test AIX lparstat shared output processing"() {
@ -10,8 +10,8 @@ class AixProcessorTest extends Specification {
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-shared.txt')
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
PowerProcessorExtension extension = new PowerProcessorExtension()
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 83.7f
@ -30,8 +30,8 @@ class AixProcessorTest extends Specification {
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated-donating.txt')
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
PowerProcessorExtension extension = new PowerProcessorExtension()
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.1f
@ -51,8 +51,8 @@ class AixProcessorTest extends Specification {
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated-capped.txt')
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
PowerProcessorExtension extension = new PowerProcessorExtension()
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.0f
@ -71,8 +71,8 @@ class AixProcessorTest extends Specification {
InputStream inputStream = getClass().getResourceAsStream('/lparstat-linux.txt')
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
PowerProcessorExtension extension = new PowerProcessorExtension()
PowerProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.03f

View File

@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
plugins {
id 'application'
id "com.github.johnrengelman.shadow" version "7.1.2"
id "net.nemerosa.versioning" version "2.15.1"
id "nebula.ospackage" version "9.1.1"
id "com.github.johnrengelman.shadow" version "7.1.2"
id "com.netflix.nebula.ospackage" version "11.3.0"
}
dependencies {
@ -28,7 +28,7 @@ def projectName = "sysmon-server"
application {
// Define the main class for the application.
mainClass.set('sysmon.server.Application')
applicationDefaultJvmArgs = [ "-server", "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
}
run {
@ -40,7 +40,6 @@ tasks.named('test') {
useJUnitPlatform()
}
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'

View File

@ -11,10 +11,10 @@
dir="/opt/sysmon/server"
cmd="/opt/sysmon/server/bin/server"
args="-d"
args="" # Add '-d' for debug output
user=""
name=`basename $0`
name="sysmon-server"
pid_file="/var/run/$name.pid"
stdout_log="/var/log/$name.log"
stderr_log="/var/log/$name.err"

View File

@ -8,7 +8,6 @@ import sysmon.shared.ComboResult;
import sysmon.shared.Measurement;
import sysmon.shared.MetricResult;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class ComboResultToPointProcessor implements Processor {
@ -26,34 +25,18 @@ public class ComboResultToPointProcessor implements Processor {
BatchPoints.Builder batchPoints = BatchPoints
.database(ComboResultToPointProcessor.influxDbName)
.precision(TimeUnit.MILLISECONDS);
.precision(TimeUnit.SECONDS);
for(MetricResult metricResult : comboResult.getMetricResults()) {
for(Measurement measurement : metricResult.getMeasurements()) {
Point.Builder point = Point.measurement(metricResult.getName())
.time(metricResult.getTimestamp(), TimeUnit.MILLISECONDS)
.tag("hostname", metricResult.getHostname());
.time(metricResult.getTimestamp(), TimeUnit.SECONDS)
.tag("hostname", metricResult.getHostname())
.tag(measurement.getTags())
.fields(measurement.getFields());
for (Map.Entry<String,String> entry : measurement.getTags().entrySet()) {
//log.info("process() - tag: " + entry.getKey() + "=" + entry.getValue());
point.tag(entry.getKey(), entry.getValue());
}
for (Map.Entry<String,Object> entry : measurement.getFields().entrySet()) {
//log.info("process() - field: " + entry.getKey() + "=" + entry.getValue());
if(entry.getValue() instanceof Number) {
Number num = (Number) entry.getValue();
point.addField(entry.getKey(), num);
} else if(entry.getValue() instanceof Boolean) {
Boolean bol = (Boolean) entry.getValue();
point.addField(entry.getKey(), bol);
} else {
String str = (String) entry.getValue();
point.addField(entry.getKey(), str);
}
}
batchPoints.point(point.build());
}

View File

@ -8,7 +8,6 @@ import sysmon.shared.Measurement;
import sysmon.shared.MetricResult;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MetricResultToPointProcessor implements Processor {
@ -29,33 +28,18 @@ public class MetricResultToPointProcessor implements Processor {
BatchPoints.Builder batchPoints = BatchPoints
.database(MetricResultToPointProcessor.influxDbName)
.precision(TimeUnit.MILLISECONDS)
.precision(TimeUnit.SECONDS)
.tag("hostname", metricResult.getHostname());
for(Measurement measurement : measurementList) {
Point.Builder point = Point.measurement(metricResult.getName())
.time(metricResult.getTimestamp(), TimeUnit.MILLISECONDS);
.time(metricResult.getTimestamp(), TimeUnit.SECONDS)
.fields(measurement.getFields())
.tag(measurement.getTags());
for (Map.Entry<String,String> entry : measurement.getTags().entrySet()) {
//log.info("process() - tag: " + entry.getKey() + "=" + entry.getValue());
point.tag(entry.getKey(), entry.getValue());
}
for (Map.Entry<String,Object> entry : measurement.getFields().entrySet()) {
//log.info("process() - field: " + entry.getKey() + "=" + entry.getValue());
if(entry.getValue() instanceof Number) {
Number num = (Number) entry.getValue();
point.addField(entry.getKey(), num);
} else if(entry.getValue() instanceof Boolean) {
Boolean bol = (Boolean) entry.getValue();
point.addField(entry.getKey(), bol);
} else {
String str = (String) entry.getValue();
point.addField(entry.getKey(), str);
}
}
batchPoints.point(point.build());
}
exchange.getIn().setBody(batchPoints.build());

View File

@ -21,15 +21,6 @@ public class ServerRouteBuilder extends RouteBuilder {
.host(registry.lookupByNameAndType("http.host", String.class))
.port(registry.lookupByNameAndType("http.port", Integer.class));
/*
rest()
.get("/")
.produces("text/html")
.route()
.to("log:stdout")
.endRest();
*/
rest()
.post("/metrics")
.consumes("application/json")
@ -40,7 +31,7 @@ public class ServerRouteBuilder extends RouteBuilder {
.setHeader(Exchange.HTTP_RESPONSE_CODE, constant(202))
.setHeader("Content-Type", constant("application/x-www-form-urlencoded"))
.to("seda:inbound?discardWhenFull=true")
.setBody(simple("OK, received by server."))
.setBody(simple("OK, received."))
.doCatch(Exception.class)
.log(LoggingLevel.WARN, "Error: ${exception.message}.")
.end()

View File

@ -15,18 +15,5 @@
## limitations under the License.
## ---------------------------------------------------------------------------
# to configure camel main
# here you can configure options on camel main (see MainConfigurationProperties class)
camel.main.name = sysmon-server
# enable tracing
#camel.main.tracing = true
# bean introspection to log reflection based configuration
#camel.main.beanIntrospectionExtendedStatistics=true
#camel.main.beanIntrospectionLoggingLevel=INFO
# run in lightweight mode to be tiny as possible
camel.main.lightweight = true
# and eager load classes
#camel.main.eager-classloading = true

View File

@ -16,4 +16,4 @@ new File(rootDir, "plugins").listFiles().each {
include ":plugins:${it.name}"
}
}
}

View File

@ -9,6 +9,7 @@
plugins {
id 'groovy'
id 'java-library'
id 'maven-publish'
}
repositories {
@ -33,3 +34,24 @@ tasks.named('test') {
// Use junit platform for unit tests.
useJUnitPlatform()
}
publishing {
publications {
library(MavenPublication) {
groupId = 'sysmon'
artifactId = 'shared'
from components.java
}
}
repositories {
maven {
name = "gitea"
url = uri("https://git.data.coop/api/packages/$System.env.DRONE_REPO_OWNER/maven")
credentials {
username = "$System.env.DRONE_REPO_OWNER"
password = "$System.env.AUTH_TOKEN"
}
}
}
}

View File

@ -1,22 +1,22 @@
package sysmon.shared;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
public class Measurement implements Serializable {
private static final long serialVersionUID = 1L;
private HashMap<String, String> tags = new HashMap<>();
private HashMap<String, Object> fields = new HashMap<>();
private Map<String, String> tags = new TreeMap<>();
private Map<String, Object> fields = new TreeMap<>();
public Measurement() {
}
public Measurement(HashMap<String, String> tags, HashMap<String, Object> fields) {
public Measurement(Map<String, String> tags, TreeMap<String, Object> fields) {
this.tags = Objects.requireNonNull(tags);
this.fields = Objects.requireNonNull(fields);
}
@ -29,12 +29,12 @@ public class Measurement implements Serializable {
return fields;
}
public void setTags(HashMap<String, String> tags) {
public void setTags(TreeMap<String, String> tags) {
Objects.requireNonNull(tags);
this.tags = tags;
}
public void setFields(HashMap<String, Object> fields) {
public void setFields(TreeMap<String, Object> fields) {
Objects.requireNonNull(fields);
this.fields = fields;
}

View File

@ -12,7 +12,6 @@ public interface MetricExtension extends ExtensionPoint {
String getName();
String getInterval();
String getProvides();
String getDescription();
void setConfiguration(Map<String, Object> map);

View File

@ -10,7 +10,7 @@ public class MetricResult implements Serializable {
private String name;
private String hostname;
private Long timestamp; // epoch milli
private Long timestamp; // epoch seconds
private ArrayList<Measurement> measurements;
public MetricResult() {
@ -18,12 +18,12 @@ public class MetricResult implements Serializable {
public MetricResult(String name) {
this.name = name;
this.timestamp = Instant.now().toEpochMilli();
this.timestamp = Instant.now().getEpochSecond();
}
public MetricResult(String name, Measurement measurement) {
this.name = name;
this.timestamp = Instant.now().toEpochMilli();
this.timestamp = Instant.now().getEpochSecond();
this.measurements = new ArrayList<Measurement>() {{
add(measurement);
}};
@ -31,7 +31,7 @@ public class MetricResult implements Serializable {
public MetricResult(String name, ArrayList<Measurement> measurements) {
this.name = name;
this.timestamp = Instant.now().toEpochMilli();
this.timestamp = Instant.now().getEpochSecond();
this.measurements = measurements;
}
@ -46,7 +46,7 @@ public class MetricResult implements Serializable {
}
public void setHostname(String hostname) {
this.hostname = hostname;
this.hostname = hostname.toLowerCase();
}
public void setName(String name) {
@ -85,5 +85,5 @@ public class MetricResult implements Serializable {
return sb.toString();
}
}

Some files were not shown because too many files have changed in this diff Show More