Compare commits

..

No commits in common. "main" and "v0.0.9" have entirely different histories.
main ... v0.0.9

139 changed files with 4460 additions and 7051 deletions

View file

@ -1,26 +0,0 @@
---
kind: pipeline
name: default
type: docker
steps:
- name: test
image: eclipse-temurin:8-jdk
commands:
- ./gradlew test
- name: build
image: eclipse-temurin:8-jdk
environment:
AUTH_TOKEN: # Gitea access token ENV variable
from_secret: auth # Name of DroneCI secret exposed above
commands:
- ./gradlew build packages shared:publishLibraryPublicationToGiteaRepository
- for file in server/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in server/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
when:
event:
- tag

View file

@ -1,11 +0,0 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
[*.yml]
indent_size = 2

1
.gitignore vendored
View file

@ -3,7 +3,6 @@
.classpath
.project
.gradle
.vscode
output
build
bin

View file

@ -1,68 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
## [1.1.2] - 2023-02-06
- Lowercase client hostnames
## [1.1.1] - 2023-01-22
- Simplify plugin naming
- Initial support for executing (groovy) scripts
- Fixed bug when no config file were found
- Update the default [dashboards](doc/dashboards/)
## [1.1.0] - 2022-12-17
- Lower influx time precision from milliseconds to seconds
- requires you to update server and clients to this version.
- Update *oshi* dependency (for AIX improvements).
## [1.0.24] - 2022-11-16
- Fix incorrect use of OSHI getDiskStores()
- Update dashboards
## [1.0.23] - 2022-11-07
- Update dashboards.
- Lower default interval for most plugins.
- Simplify metrics-results to influx points code.
- Remove logging of skipped disk devices (eg. cd0).
## [1.0.21] - 2022-10-30
- Update dashboard
- Add IP connections
## [1.0.18] - 2022-10-24
- Bump version to 1.x to indicate stable release.
- Update 3rd party dependencies.
## [0.1.13] - 2022-06-27
## [0.1.11] - 2022-03-02
### Changed
- (plugins) Removed groovy dependency from build.gradle (it increased size and was not needed).
## [0.1.10] - 2022-03-01
### Added
- (client) More debug options.
- (plugins/linux) Re-enabled network socket-statistics extension.
### Changed
- Updated the oshi dependency to v. 6.1.4.
- (plugins/aix) Improved AIX lparstat parsing.
- (plugins/aix) More debug output from (Power) processor extension.
- (plugins/base) More debug output from plugins-base disk extension.
## [0.1.9] - 2022-02-15
### Changed
- Updated 3rd party dependencies.
<!--
[1.1.0]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.1.0%0Dv0.1.24
[1.0.24]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.24%0Dv0.1.23
[1.0.23]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.23%0Dv0.1.21
[1.0.21]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
[1.0.18]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.18%0Dv0.1.13
[0.1.13]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.13%0Dv0.1.11
[0.1.11]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.11%0Dv0.1.10
[0.1.10]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.10%0Dv0.1.9
[0.1.9]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.9%0Dv0.1.8
-->

202
LICENSE
View file

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,3 +1,47 @@
# Repository moved
# System Monitor
Please visit [github.com/mnellemann/sysmon](https://github.com/mnellemann/sysmon)
Java based system monitoring solution with support for plugins.
- Example dashboards are provided in the [doc/](doc) folder, which can be imported into your Grafana installation.
- Screenshots are available in the [downloads](https://bitbucket.org/mnellemann/sysmon/downloads/) section.
## Components
### Client
Runs on your hosts and collects metrics, which are sent to the central *server*.
[More information](client/README.md).
### Server
Receives aggregated metrics from clients and saves these into InfluxDB.
[More information](server/README.md).
### Plugins
Loaded by the client and provides extensions for doing the actual collecting of metrics.
[More information](plugins/README.md).
## Known problems
### Correct timezone and clock
- Ensure you have **correct timezone and date/time** and NTPd (or similar) running to keep it accurate!
### Naming collision
You can't have hosts with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the hostname as key).
### Renaming hosts
If you rename a host, the metrics in InfluxDB will still be available by the old hostname, and new metrics will be written with the new hostname. There is no easy way to migrate the old data, but you can delete it easily:
```text
USE sysmon;
DELETE WHERE hostname = 'unknown';
```

View file

@ -1,4 +1,4 @@
image: eclipse-temurin:8-jdk
image: openjdk:8
pipelines:
branches:

View file

@ -13,13 +13,13 @@ subprojects {
apply plugin: 'groovy'
dependencies {
testImplementation "org.spockframework:spock-core:${spockVersion}"
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation "org.slf4j:slf4j-api:${slf4jVersion}"
testImplementation "org.slf4j:slf4j-simple:${slf4jVersion}"
implementation "org.slf4j:slf4j-api:${slf4jVersion}"
implementation "org.slf4j:slf4j-simple:${slf4jVersion}"
implementation "org.tomlj:tomlj:${tomljVersion}"
}
repositories {
@ -27,14 +27,11 @@ subprojects {
mavenCentral()
}
java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
}
tasks.register("packages") {
tasks.create("packages") {
group "build"
dependsOn ":client:buildDeb"
@ -45,5 +42,5 @@ tasks.register("packages") {
dependsOn ":plugins:buildDeb"
dependsOn ":plugins:buildRpm"
dependsOn ":plugins:buildZip"
}

View file

@ -1,12 +1,6 @@
# Client / Agent
# Client
This is the client/agent component of sysmon, which you install (together with sysmon-plugins) on your hosts.
## Installation
Download *.deb* or *.rpm* packages for sysmon-client *and* sysmon-plugins, and install.
See the [doc/systemd.md](doc/systemd.md) or [doc/sysv-init.md](doc/sysv-init.md) files for further instructions on running as a system service.
Client component.
## Development

View file

@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
plugins {
id 'application'
id "net.nemerosa.versioning" version "2.15.1"
id "com.github.johnrengelman.shadow" version "7.1.2"
id "com.netflix.nebula.ospackage" version "11.3.0"
id "com.github.johnrengelman.shadow" version "7.0.0"
id "net.nemerosa.versioning" version "2.14.0"
id "nebula.ospackage" version "8.6.1"
}
dependencies {
@ -17,15 +17,12 @@ dependencies {
annotationProcessor "info.picocli:picocli-codegen:${picocliVersion}"
implementation "info.picocli:picocli:${picocliVersion}"
implementation "org.tomlj:tomlj:${tomljVersion}"
implementation 'org.tomlj:tomlj:1.0.0'
runtimeOnly(group: 'com.github.oshi', name: 'oshi-core', version: oshiVersion) {
exclude(group: "org.slf4j")
}
//implementation "org.apache.groovy:groovy-all:${groovyVersion}" // From version 4.+
implementation "org.codehaus.groovy:groovy:${groovyVersion}"
implementation group: 'org.apache.camel', name: 'camel-core', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-main', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-http', version: camelVersion
@ -33,7 +30,6 @@ dependencies {
implementation group: 'org.apache.camel', name: 'camel-bean', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-timer', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-stream', version: camelVersion
}
def projectName = "sysmon-client"
@ -41,13 +37,11 @@ def projectName = "sysmon-client"
application {
// Define the main class for the application.
mainClass.set('sysmon.client.Application')
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
applicationDefaultJvmArgs = [ "-server", "-XX:+UseG1GC", "-Xmx32m" ]
}
run {
systemProperty 'sysmon.pluginsDir', '../plugins/output/'
systemProperty 'sysmon.cfgFile', 'doc/sysmon-client.toml'
systemProperty 'sysmon.debug', '1'
systemProperty 'pf4j.pluginsDir', '../plugins/output/'
}
tasks.named('test') {
@ -76,6 +70,7 @@ shadowJar {
mergeServiceFiles() // Tell plugin to merge duplicate service files
}
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'

21
client/doc/AIX.md Normal file
View file

@ -0,0 +1,21 @@
# AIX Notes
Works on IBM Power VIO (Virtual IO) servers, as well as regular IBM Power AIX installations.
## Installation
We require Java 8, which should already be installed.
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
```shell
rpm -i --ignoreos sysmon-client.rpm sysmon-plugins.rpm
```
## Run automatically at boot
Change the *sysmon-server* URL for your environment.
```shell
mkitab 'sysmon:2:respawn:env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics >/tmp/sysmon.log 2>&1'
init q
```

View file

@ -1,37 +0,0 @@
# AIX Notes
Works on IBM Power VIO (Virtual IO) servers, as well as regular IBM Power AIX installations.
## Installation
We require Java 8, which should already be installed on AIX, or is available to install.
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
```shell
rpm -ivh --ignoreos sysmon-client-*.rpm sysmon-plugins-*.rpm
```
### Run automatically at boot
See the [sysv-init.md](sysv-init.md) file for instructions, or run from inittab:
```shell
mkitab "sysmon:2:respawn:env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s http://10.x.y.z:9925/metrics"
init q
```
## Upgrades
To upgrade the packages:
```shell
rpm -Uvh --ignoreos sysmon-*.noarch.rpm
```
To restart sysmon-client process after upgrade:
```shell
/etc/rc.d/init.d/sysmon-client stop; /etc/rc.d/init.d/sysmon-client start
# or, if running from inittab:
kill -HUP `ps -e -o pid,comm,args | grep sysmon-client | grep java | awk '{print $1}'`
```

View file

@ -1,43 +0,0 @@
# Instruction for RedHat / CentOS / AlmaLinux Systems
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
More details are available in the [README.md](../README.md) file.
## Requirements
Java 8 (or later) runtime is required.
```shell
sudo dnf install java-11-openjdk-headless
```
Use *yum* if *dnf* is not available.
## Installation
[Download](https://git.data.coop/nellemann/-/packages/generic/sysmon/) the latest client and plugins rpm files and install:
```shell
rpm -ivh sysmon-client-*.noarch.rpm sysmon-plugins-*.noarch.rpm
cp /opt/sysmon/client/doc/sysmon-client.service /etc/systemd/system/
systemctl daemon-reload
```
Now edit the **/etc/systemd/system/sysmon-client.service** file and change the URL so that it points to *your* sysmon-server.
````
# Modify below line in /etc/systemd/system/sysmon-client.service
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
````
Now enable and start the sysmon-client service:
```shell
systemctl enable sysmon-client
systemctl start sysmon-client
```
Check logs for errors with: ```journalctl -u sysmon-client```

View file

@ -1,19 +0,0 @@
class ExampleScript implements MetricScript {
MetricResult getMetrics() {
Map<String,String> tags = new TreeMap<>();
Map<String,Object> fields = new TreeMap<>();
tags.put("type", "temp");
fields.put("sensor1", 23.2);
fields.put("sensor2", 25.8);
Measurement measurement = new Measurement(tags, fields);
return new MetricResult("script_sensors", measurement);
}
}

View file

@ -1,3 +0,0 @@
# Example Scripts
TODO.

View file

@ -1,12 +1,9 @@
[Unit]
Description=Sysmon Client
Description=Sysmon Client Service
[Service]
#User=nobody
#Group=nobody
TimeoutSec=20
Restart=on-failure
# BELOW: Specify sysmon-server URL, add '-n hostname' if needed
TimeoutStartSec=0
Restart=always
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
[Install]

View file

@ -1,106 +0,0 @@
#!/bin/sh
### BEGIN INIT INFO
# Provides:
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start daemon at boot time
# Description: Enable service provided by daemon.
### END INIT INFO
dir="/opt/sysmon/client"
cmd="/opt/sysmon/client/bin/client"
args="-s http://10.20.30.40:9925/metrics" # <- HERE: Specify sysmon-server URL, add '-n hostname' if needed
user=""
name="sysmon-client"
pid_file="/var/run/$name.pid"
stdout_log="/var/log/$name.log"
stderr_log="/var/log/$name.err"
# Java 8+ runtime required - Uncomment and export JAVA_HOME if needed
#JAVA_HOME=/usr/java8_64
#JAVA_HOME=/opt/ibm-semeru-open-XX-jre
#JAVA_HOME=/opt/ibm-semeru-open-XX-jdk
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jre
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jdk
#export JAVA_HOME
get_pid() {
cat "$pid_file"
}
is_running() {
[ -f "$pid_file" ] && ps -p $(get_pid) > /dev/null 2>&1
}
case "$1" in
start)
if is_running; then
echo "Already started"
else
echo "Starting $name"
cd "$dir" || exit 1
if [ -z "$user" ]; then
$cmd $args >> "$stdout_log" 2>> "$stderr_log" &
else
sudo -u "$user" $cmd $args >> "$stdout_log" 2>> "$stderr_log" &
fi
echo $! > "$pid_file"
if ! is_running; then
echo "Unable to start, see $stdout_log and $stderr_log"
exit 1
fi
fi
;;
stop)
if is_running; then
echo "Stopping $name.."
kill $(get_pid)
for i in 1 2 3 4 5 6 7 8 9 10
# for i in `seq 10`
do
if ! is_running; then
break
fi
sleep 1
done
echo
if is_running; then
echo "Not stopped; may still be shutting down or shutdown may have failed"
exit 1
else
echo "Stopped"
if [ -f "$pid_file" ]; then
rm "$pid_file"
fi
fi
else
echo "Not running"
fi
;;
restart)
$0 stop
if is_running; then
echo "Unable to stop, will not attempt to start"
exit 1
fi
$0 start
;;
status)
if is_running; then
echo "Running"
else
echo "Stopped"
exit 1
fi
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
;;
esac
exit 0

View file

@ -1,32 +0,0 @@
###
### Sysmon Client
###
### Example configuration with some default values.
###
# Local path for Groovy scripts
scripts = "/opt/sysmon/scripts"
[extension.base_info]
enabled = true
interval = '60m'
[extension.base_disk]
enabled = true
interval = '10s'
[extension.base_filesystem]
enabled = true
interval = '10s'
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
[extension.base_process]
enabled = true
interval = '5m'
include = [
"java", "node", "httpd", "mongod", "mysqld",
"postgres", "influxd", "haproxy", "beam.smp",
"filebeat", "corosync", "rsyslogd", "memcached",
"db2sysc", "dsmserv", "mmfsd",
]

View file

@ -1,4 +1,4 @@
# Linux systemd notes
# SystemD Notes
Edit the *sysmon-client.service* file and change the sysmon-server URL accordingly to your environment.

View file

@ -1,18 +0,0 @@
# SysV init Notes
- Copy the *sysmon-client.sh* into *sysmon-client* in the correct location for init scripts on your operating system.
- Edit the file and specify the sysmon-server URL in the *args* variable.
- Edit the file and uncomment *JAVA_HOME* if required
- SymLink to the required run-levels.
## AIX & VIO
```shell
# Remember to edit and set JAVA_HOME to eg. /usr/java8_64
cp sysmon-client.sh /etc/rc.d/init.d/sysmon-client
chmod +x /etc/rc.d/init.d/sysmon-client
ln -s /etc/rc.d/init.d/sysmon-client /etc/rc.d/rc2.d/Ssysmon-client
ln -s /etc/rc.d/init.d/sysmon-client /etc/rc.d/rc2.d/Ksysmon-client
```

View file

@ -4,18 +4,21 @@
package sysmon.client;
import org.apache.camel.main.Main;
import org.slf4j.simple.SimpleLogger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.concurrent.Callable;
@CommandLine.Command(name = "sysmon-client", mixinStandardHelpOptions = true)
public class Application implements Callable<Integer> {
private static final Logger log = LoggerFactory.getLogger(Application.class);
@CommandLine.Option(names = { "-s", "--server-url" }, description = "Server URL (default: ${DEFAULT-VALUE}).", defaultValue = "http://127.0.0.1:9925/metrics", paramLabel = "<url>")
private URL serverUrl;
@ -25,15 +28,6 @@ public class Application implements Callable<Integer> {
@CommandLine.Option(names = { "-p", "--plugin-dir" }, description = "Plugin jar path (default: ${DEFAULT-VALUE}).", paramLabel = "<path>", defaultValue = "/opt/sysmon/plugins")
private String pluginPath;
@CommandLine.Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/sysmon-client.toml'].", paramLabel = "<file>", defaultValue = "/etc/sysmon-client.toml")
private File configurationFile;
//@CommandLine.Option(names = { "-d", "--debug" }, description = "Enable debugging (default: ${DEFAULT_VALUE}).")
//private boolean enableDebug = false;
@CommandLine.Option(names = { "-d", "--debug" }, description = "Enable debugging (default: ${DEFAULT_VALUE}).")
private boolean[] enableDebug = new boolean[0];
public static void main(String... args) {
int exitCode = new CommandLine(new Application()).execute(args);
System.exit(exitCode);
@ -41,60 +35,26 @@ public class Application implements Callable<Integer> {
@Override
public Integer call() {
String sysmonDebug = System.getProperty("sysmon.debug");
if(sysmonDebug != null) {
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "INFO");
}
switch (enableDebug.length) {
case 1:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "INFO");
break;
case 2:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG");
break;
case 3:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "TRACE");
break;
}
String sysmonCfgFile = System.getProperty("sysmon.cfgFile");
if(sysmonCfgFile != null) {
configurationFile = new File(sysmonCfgFile);
}
String sysmonPluginsDir = System.getProperty("sysmon.pluginsDir");
if(sysmonPluginsDir != null) {
pluginPath = sysmonPluginsDir;
}
public Integer call() throws IOException {
if(hostname == null || hostname.isEmpty()) {
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
System.err.println("Could not detect hostname. Use the '-n' or '--hostname' option to specify.");
System.err.println("Could not detect hostname. Use the '-n' or '--hostname' option to specify it.");
return -1;
}
}
Configuration configuration = new Configuration();
if(configurationFile.exists()) {
try {
configuration.parse(configurationFile.toPath());
} catch (Exception e) {
System.err.println("Could not parse configuration file: " + e.getMessage());
return 1;
}
String pf4jPluginsDir = System.getProperty("pf4j.pluginsDir");
if(pf4jPluginsDir != null) {
pluginPath = pf4jPluginsDir;
}
Main main = new Main();
main.bind("pluginPath", pluginPath);
main.bind("myServerUrl", serverUrl.toString());
main.bind("myHostname", hostname);
main.bind("configuration", configuration);
main.configure().addRoutesBuilder(ClientRouteBuilder.class);
// now keep the application running until the JVM is terminated (ctrl + c or sigterm)
@ -102,7 +62,6 @@ public class Application implements Callable<Integer> {
main.run();
} catch (Exception e) {
System.err.println(e.getMessage());
return 1;
}
return 0;

View file

@ -1,181 +1,87 @@
package sysmon.client;
import org.apache.camel.Exchange;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.AggregationStrategies;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.jackson.JacksonDataFormat;
import org.apache.camel.model.dataformat.JsonLibrary;
import org.apache.camel.spi.Registry;
import org.pf4j.JarPluginManager;
import org.pf4j.PluginManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.ComboResult;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import javax.script.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ClientRouteBuilder extends RouteBuilder {
private static final Logger log = LoggerFactory.getLogger(ClientRouteBuilder.class);
private final Set<String> scriptFiles = new HashSet<>();
@Override
public void configure() {
Registry registry = getContext().getRegistry();
Configuration configuration = (Configuration) registry.lookupByName("configuration");
Path[] pluginPaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginPaths);
Path[] pluginpaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginpaths);
pluginManager.loadPlugins();
pluginManager.startPlugins();
List<String> providers = new ArrayList<>();
List<MetricExtension> metricExtensions = pluginManager.getExtensions(MetricExtension.class);
for (MetricExtension ext : metricExtensions) {
final String name = ext.getName();
// Load configuration if available
if(configuration.isForExtension(name)) {
log.info("Loading configuring for extension: " + ext.getDescription());
ext.setConfiguration(configuration.getForExtension(name));
if(ext.isSupported()) {
String provides = ext.getProvides();
if(providers.contains(provides)) {
log.warn("Skipping extension (already provided): " + ext.getName());
continue;
}
if(ext.isSupported() && ext.isEnabled()) {
addExtensionRoute(ext);
} else {
log.info("Skipping extension (not supported or disabled): " + ext.getDescription());
}
}
from("seda:metrics?purgeWhenStopping=true")
.routeId("aggregation")
.aggregate(constant(true), AggregationStrategies.beanAllowNull(ComboAppender.class, "append"))
.completionTimeout(5000L)
.doTry()
.to("seda:outbound?discardWhenFull=true")
.log("Aggregating ${body} before sending to server.")
.doCatch(Exception.class)
.log(LoggingLevel.WARN, "Error: ${exception.message}.")
.end();
from("seda:outbound?purgeWhenStopping=true")
.routeId("outbound")
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
.doTry()
.marshal(new JacksonDataFormat(ComboResult.class))
.to((String)registry.lookupByName("myServerUrl"))
.log("${body}")
.doCatch(Exception.class)
.log(LoggingLevel.WARN,"Error: ${exception.message}.")
.end();
// Find all local scripts
String scriptsPath = configuration.getScriptPath();
if(scriptsPath != null && Files.isDirectory(Paths.get(scriptsPath))) {
try {
scriptFiles.addAll(listFilesByExtension(scriptsPath, "groovy"));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// Enable the local scripts
for (String scriptFile : scriptFiles) {
try {
ScriptWrapper scriptWrapper = new ScriptWrapper(scriptsPath, scriptFile);
addScriptRoute(scriptWrapper);
} catch(Exception e) {
log.error("configure() - script error: {}", e.getMessage());
}
}
}
log.info(">>> Enabling extension: " + ext.getDescription());
providers.add(provides);
void addScriptRoute(ScriptWrapper script) {
Registry registry = getContext().getRegistry();
from("timer:scripts?fixedRate=true&period=30s")
.routeId(script.toString())
.bean(script, "run")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
void addExtensionRoute(MetricExtension ext) {
Registry registry = getContext().getRegistry();
// TODO: Make timer thread configurable
// Setup Camel route for this extension
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
String timerName = ext.isThreaded() ? ext.getName() : "default";
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
from("timer:" + timerName + "?fixedRate=true&period=" + timerInterval)
.routeId(ext.getName())
//from("timer:"+provides+"?fixedRate=true&period=30s")
from("timer:extensions?fixedRate=true&period=30s")
.bean(ext, "getMetrics")
//.doTry()
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.log("Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
List<String> findScripts(String location) {
log.info("Looking for scripts in: {}", location);
List<String> scripts = new ArrayList<>();
ScriptEngineManager manager = new ScriptEngineManager();
List<ScriptEngineFactory> factoryList = manager.getEngineFactories();
for (ScriptEngineFactory factory : factoryList) {
log.info("findScripts() - Supporting: {}", factory.getLanguageName());
for(String ex : factory.getExtensions()) {
log.info("findScripts() - Extension: {}", ex);
try {
scripts.addAll(listFilesByExtension(location, ex));
log.warn(scripts.toString());
} catch (IOException e) {
throw new RuntimeException(e);
} else {
log.info(">>> Skipping extension (not supported here): " + ext.getDescription());
}
}
}
return scripts;
}
Set<String> listFilesByExtension(String dir, String ext) throws IOException {
try (Stream<Path> stream = Files.list(Paths.get(dir))) {
return stream
.filter(file -> !Files.isDirectory(file))
.map(Path::getFileName)
.map(Path::toString)
.filter(s -> s.endsWith(ext))
.collect(Collectors.toSet());
}
// TODO: Make 'concurrentConsumers' configurable
from("seda:metrics?concurrentConsumers=1")
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
//.setHeader(Exchange.CONTENT_TYPE, constant("application/json"))
.doTry()
//.process(new MetricProcessor())
.marshal().json(JsonLibrary.Jackson, MetricResult.class)
.to((String)registry.lookupByName("myServerUrl"))
.doCatch(Exception.class)
.log("Error: ${exception.message}")
//.log("Error sending metric to collector: ${body}")
.end();
}
}

View file

@ -1,18 +0,0 @@
package sysmon.client;
import sysmon.shared.ComboResult;
import sysmon.shared.MetricResult;
public class ComboAppender {
public ComboResult append(ComboResult comboResult, MetricResult metricResult) {
if (comboResult == null) {
comboResult = new ComboResult();
}
comboResult.getMetricResults().add(metricResult);
return comboResult;
}
}

View file

@ -1,78 +0,0 @@
package sysmon.client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tomlj.Toml;
import org.tomlj.TomlParseResult;
import org.tomlj.TomlTable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public final class Configuration {
private final static Logger log = LoggerFactory.getLogger(Configuration.class);
private TomlParseResult result;
void parse(Path configurationFile) throws IOException {
log.info("Parsing configuration file: " + configurationFile);
result = Toml.parse(configurationFile);
result.errors().forEach(error -> log.error(error.toString()));
}
boolean isForExtension(String extName) {
if(result == null) {
return false;
}
String key = String.format("extension.%s", extName);
return result.contains(key);
}
Map<String, Object> getForExtension(String extName) {
if(result == null) {
log.debug("No configuration file loaded ...");
return null;
}
Map<String, Object> map = new HashMap<>();
String key = String.format("extension.%s", extName);
TomlTable table = result.getTableOrEmpty(key);
table.keySet().forEach( k -> {
if(table.isBoolean(k)) {
map.put(k, table.getBoolean(k));
} else if(table.isString(k)) {
map.put(k, table.getString(k));
} else if(table.isLong(k)) {
map.put(k, table.getLong(k));
} else if(table.isDouble(k)) {
map.put(k, table.getDouble(k));
} else if(table.isArray(k)) {
map.put(k, Objects.requireNonNull(table.getArray(k)).toList());
} else if(table.isTable(k)) {
map.put(k, table.getTable(k));
}
});
return map;
}
String getScriptPath() {
if(result == null) {
log.debug("No configuration file loaded ...");
return null;
}
return result.getString("scripts");
}
}

View file

@ -1,14 +0,0 @@
package sysmon.client;
import org.apache.camel.Exchange;
import org.apache.camel.processor.aggregate.AbstractListAggregationStrategy;
import sysmon.shared.MetricResult;
public class ListOfResultsStrategy extends AbstractListAggregationStrategy<MetricResult> {
@Override
public MetricResult getValue(Exchange exchange) {
return exchange.getIn().getBody(MetricResult.class);
}
}

View file

@ -1,47 +0,0 @@
package sysmon.client;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.MetricResult;
import sysmon.shared.MetricScript;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
public class ScriptWrapper {
private static final Logger log = LoggerFactory.getLogger(ScriptWrapper.class);
private final static GroovyClassLoader loader = new GroovyClassLoader();
private GroovyObject script;
private final String name;
public ScriptWrapper(String scriptPath, String scriptFile) {
name = scriptFile;
try {
Class<?> scriptClass = loader.parseClass(new File(scriptPath, scriptFile));
script = (GroovyObject) scriptClass.getDeclaredConstructor().newInstance();
} catch (IOException |InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
log.error("ScriptWrapper() - error: {}", e.getMessage());
}
}
MetricResult run() {
MetricResult result = null;
if (script != null && script instanceof MetricScript) {
result = (MetricResult) script.invokeMethod("getMetrics", null);
}
return result;
}
@Override
public String toString() {
return name;
}
}

View file

@ -15,7 +15,30 @@
## limitations under the License.
## ---------------------------------------------------------------------------
# to configure camel main
# here you can configure options on camel main (see MainConfigurationProperties class)
camel.main.name = sysmon-client
camel.main.jmxEnabled = false
# enable tracing
#camel.main.tracing = true
# bean introspection to log reflection based configuration
#camel.main.beanIntrospectionExtendedStatistics=true
#camel.main.beanIntrospectionLoggingLevel=INFO
# run in lightweight mode to be tiny as possible
camel.main.lightweight = true
camel.component.seda.queue-size = 100
# and eager load classes
#camel.main.eager-classloading = true
# use object pooling to reduce JVM garbage collection
#camel.main.exchange-factory = pooled
#camel.main.exchange-factory-statistics-enabled = true
# can be used to not start the route
# camel.main.auto-startup = false
# configure beans
#camel.beans.metricProcessor = #class:org.sysmon.client.MetricProcessor
#camel.dataformat.json-jackson.use-list = true

View file

@ -3,6 +3,4 @@ org.slf4j.simpleLogger.showDateTime=true
org.slf4j.simpleLogger.showShortLogName=true
org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS
org.slf4j.simpleLogger.levelInBrackets=true
org.slf4j.simpleLogger.defaultLogLevel=warn
org.slf4j.simpleLogger.showThreadName=false
org.slf4j.simpleLogger.showLogName=false
org.slf4j.simpleLogger.defaultLogLevel=info

File diff suppressed because it is too large Load diff

View file

@ -1,21 +1,26 @@
{
"__inputs": [
{
"name": "DS_SYSMON",
"label": "Database",
"name": "DS_INFLUXDB-SYSMON",
"label": "InfluxDB-sysmon",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.6"
"version": "8.0.6"
},
{
"type": "datasource",
@ -35,12 +40,6 @@
"name": "Table",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
@ -52,93 +51,40 @@
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from within host / guest / partition.",
"description": "Metrics from within host / guest / partition.",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": null,
"graphTooltip": 0,
"id": null,
"iteration": 1631013505736,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 30,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"collapsed": false,
"datasource": {
"type": "influxdb",
"uid": "5KYZifB7z"
},
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 3
"y": 0
},
"id": 4,
"panels": [],
"repeat": "hostname",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "5KYZifB7z"
},
"refId": "A"
}
],
"title": "${hostname}",
"type": "row"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"description": "",
"fieldConfig": {
"defaults": {
@ -147,8 +93,7 @@
},
"custom": {
"align": "left",
"displayMode": "auto",
"inspect": false
"displayMode": "auto"
},
"mappings": [],
"thresholds": {
@ -168,27 +113,16 @@
"h": 6,
"w": 12,
"x": 0,
"y": 4
"y": 1
},
"id": 16,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "9.1.6",
"pluginVersion": "8.0.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -279,18 +213,17 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Details",
"type": "table"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
"mode": "palette-classic"
},
"mappings": [],
"thresholds": {
@ -335,7 +268,7 @@
"h": 6,
"w": 7,
"x": 12,
"y": 4
"y": 1
},
"id": 19,
"options": {
@ -353,13 +286,9 @@
"text": {},
"textMode": "auto"
},
"pluginVersion": "9.1.6",
"pluginVersion": "8.0.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -369,7 +298,7 @@
},
{
"params": [
"null"
"linear"
],
"type": "fill"
}
@ -438,18 +367,17 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Memory Metrics",
"type": "stat"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
"mode": "continuous-GrYlRd"
},
"mappings": [],
"thresholds": {
@ -494,14 +422,10 @@
"h": 6,
"w": 5,
"x": 19,
"y": 4
"y": 1
},
"id": 26,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
@ -509,16 +433,13 @@
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
"showThresholdLabels": false,
"showThresholdMarkers": true,
"text": {}
},
"pluginVersion": "9.1.6",
"pluginVersion": "8.0.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -528,7 +449,7 @@
},
{
"params": [
"null"
"previous"
],
"type": "fill"
}
@ -550,6 +471,10 @@
"params": [],
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
},
{
"params": [
"read"
@ -568,6 +493,10 @@
"params": [],
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
},
{
"params": [
"write"
@ -597,14 +526,13 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Disk Metrics",
"type": "stat"
"type": "gauge"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"description": "",
"fieldConfig": {
"defaults": {
@ -612,8 +540,6 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -659,29 +585,23 @@
"h": 11,
"w": 12,
"x": 0,
"y": 10
"y": 7
},
"id": 24,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
"placement": "bottom"
},
"tooltip": {
"mode": "multi",
"sort": "none"
"mode": "multi"
}
},
"pluginVersion": "8.0.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -697,7 +617,7 @@
},
{
"params": [
"none"
"null"
],
"type": "fill"
}
@ -722,10 +642,8 @@
"type": "sum"
},
{
"params": [
"$__interval"
],
"type": "non_negative_derivative"
"params": [],
"type": "non_negative_difference"
},
{
"params": [
@ -746,10 +664,8 @@
"type": "sum"
},
{
"params": [
"$__interval"
],
"type": "non_negative_derivative"
"params": [],
"type": "non_negative_difference"
},
{
"params": [
@ -784,10 +700,7 @@
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"description": "",
"fieldConfig": {
"defaults": {
@ -795,8 +708,6 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -849,29 +760,23 @@
"h": 11,
"w": 12,
"x": 12,
"y": 10
"y": 7
},
"id": 25,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
"placement": "bottom"
},
"tooltip": {
"mode": "multi",
"sort": "none"
"mode": "multi"
}
},
"pluginVersion": "8.0.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -963,20 +868,18 @@
}
],
"refresh": "1m",
"schemaVersion": 37,
"schemaVersion": 30,
"style": "dark",
"tags": [
"sysmon"
],
"tags": [],
"templating": {
"list": [
{
"allValue": null,
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"definition": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"label": "Host",
@ -987,19 +890,19 @@
"refresh": 2,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"sort": 0,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
},
{
"allValue": null,
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"datasource": "${DS_INFLUXDB-SYSMON}",
"definition": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"name\" WHERE hostname =~ /$hostname/AND time > now() - 60m",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"label": "Process",
@ -1010,33 +913,33 @@
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"sort": 0,
"type": "query"
},
{
"allValue": "",
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SELECT DISTINCT(\"pid\") FROM (SELECT * FROM \"base_process\" WHERE time > now() - 60m AND \"hostname\" =~ /$hostname/ AND \"name\" =~ /$process/)",
"datasource": "${DS_INFLUXDB-SYSMON}",
"definition": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"pid\" WHERE hostname =~ /$hostname/AND \"name\" =~ /$process/ AND time > now() - 60m",
"description": null,
"error": null,
"hide": 0,
"includeAll": true,
"label": "PID",
"multi": false,
"name": "pid",
"options": [],
"query": "SELECT DISTINCT(\"pid\") FROM (SELECT * FROM \"base_process\" WHERE time > now() - 60m AND \"hostname\" =~ /$hostname/ AND \"name\" =~ /$process/)",
"refresh": 2,
"query": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"pid\" WHERE hostname =~ /$hostname/AND \"name\" =~ /$process/ AND time > now() - 60m",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 3,
"sort": 0,
"type": "query"
}
]
},
"time": {
"from": "now-6h",
"from": "now-3h",
"to": "now-30s"
},
"timepicker": {
@ -1055,6 +958,5 @@
"timezone": "",
"title": "Sysmon - Process Explorer",
"uid": "Vjut5mS7k",
"version": 5,
"weekStart": ""
"version": 11
}

3
doc/ansible/README.md Normal file
View file

@ -0,0 +1,3 @@
# Example Ansible Playbooks
For installing on AIX and RPM-based Linux.

View file

@ -0,0 +1,63 @@
---
#
# Example ansible playbook for installation of sysmon client on AIX.
# More information at: https://bitbucket.org/mnellemann/sysmon
#
# ansible-galaxy collection install community.general
# ansible-playbook -i aixhost, -u root sysmon-client-aix.yml
#
# NOTE: Ensure correct timezone and time
- name: "Install Sysmon Client and Plugins on AIX"
hosts: all
gather_facts: yes
vars:
server_url: http://sysmon-server:9925/metrics
tasks:
- name: Sysmon Client | Ensure hostname resolves
ansible.builtin.lineinfile:
path: /etc/hosts
state: present
line: "127.0.1.1 {{ ansible_hostname }}"
- name: Sysmon Client | Copy sysmon-client.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-client.rpm
with_fileglob:
- ../../client/build/distributions/sysmon-client-*.noarch.rpm
- name: Sysmon Client | Copy sysmon-plugins.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-plugins.rpm
with_fileglob:
- ../../plugins/build/distributions/sysmon-plugins-*.noarch.rpm
- name: Sysmon Client | Install sysmon-client.rpm
ansible.builtin.command: /usr/bin/rpm -i --ignoreos /opt/sysmon-client.rpm
args:
creates: /opt/sysmon/client
- name: Sysmon Client | Install sysmon-plugins.rpm
ansible.builtin.command: /usr/bin/rpm -i --ignoreos /opt/sysmon-plugins.rpm
args:
creates: /opt/sysmon/plugins
- name: Sysmon Client | Create inittab entry for sysmon-client
community.general.aix_inittab:
name: sysmon
runlevel: '2'
action: respawn
command: env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s {{ server_url }} >/tmp/sysmon.log 2>&1
state: present
become: yes
notify:
- reload inittab
handlers:
- name: reload inittab
command: init q

View file

@ -0,0 +1,61 @@
---
#
# Example ansible playbook for installation of sysmon client on Linux (RPM based).
# More information at: https://bitbucket.org/mnellemann/sysmon
#
# ansible-playbook -i linuxhost, -u root sysmon-client-linux.yml
#
# NOTE: Ensure correct timezone and time
- name: "Install Sysmon Client and Plugins on Linux (RPM based)"
hosts: all
gather_facts: no
vars:
server_url: http://sysmon-server:9925/metrics
tasks:
- name: Sysmon Client | Copy sysmon-client.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-client.rpm
with_fileglob:
- ../../client/build/distributions/sysmon-client-*.noarch.rpm
- name: Sysmon Client | Copy sysmon-plugins.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-plugins.rpm
with_fileglob:
- ../../plugins/build/distributions/sysmon-plugins-*.noarch.rpm
- name: Sysmon Client | Install OpenJDK (headless)
yum:
name: "java-11-openjdk-headless"
state: present
- name: Sysmon Client | Install sysmon-client.rpm
yum:
name: /opt/sysmon-client.rpm
state: present
disable_gpg_check: true
- name: Sysmon Plugins | Install sysmon-plugins.rpm
yum:
name: /opt/sysmon-plugins.rpm
state: present
disable_gpg_check: true
- name: Sysmon Client | Create service file
template: src=sysmon-client.service.j2 dest=/lib/systemd/system/sysmon-client.service mode=644
notify:
- reload systemctl
- name: Sysmon Client | Start service
service: name=sysmon-client.service state=started enabled=yes
handlers:
- name: reload systemctl
command: systemctl daemon-reload

View file

@ -0,0 +1,10 @@
[Unit]
Description=Sysmon Client Service
[Service]
TimeoutStartSec=3
Restart=always
ExecStart=/opt/sysmon/client/bin/client -s {{server_url}}
[Install]
WantedBy=default.target

View file

@ -0,0 +1,33 @@
---
#
# Example ansible playbook for timezone and NTP setup on AIX.
#
# ansible-playbook -i aixhost, -u root timezone-aix.yml
#
- name: "Timezone and NTP on AIX"
hosts: all
gather_facts: no
vars:
timezone: Europe/Copenhagen
ntp_server: dk.pool.ntp.org
tasks:
- name: Configure timezone
ansible.builtin.replace:
path: /etc/environment
regexp: '^TZ=(.*)$'
replace: "TZ={{ timezone }}"
- name: Update time from NTP server
ansible.builtin.command: "env TZ={{ timezone }} /usr/sbin/ntpdate {{ ntp_server }}"
- name: Create cron entry for updating time periodically
ansible.builtin.cron:
name: ntpdate
weekday: "*"
minute: "1"
hour: "*"
user: root
job: "/usr/sbin/ntpdate {{ ntp_server }}"

File diff suppressed because it is too large Load diff

View file

@ -1,738 +0,0 @@
{
"__inputs": [
{
"name": "DS_SYSMON",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.6"
},
{
"type": "datasource",
"id": "influxdb",
"name": "InfluxDB",
"version": "1.0.0"
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 28,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Load average as reported by OS.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 13,
"w": 12,
"x": 0,
"y": 3
},
"id": 2,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname - $col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "base_load",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"5min"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
},
{
"params": [
"5min"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "System Load Average",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Shows the number of physical processors consumed.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 13,
"w": 12,
"x": 12,
"y": 3
},
"id": 17,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"physc"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Physical Cores Consumed",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Percentage of the entitled capacity consumed.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 14,
"w": 12,
"x": 0,
"y": 16
},
"id": 30,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"entc"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Entitled Capacity Consumed",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Percentage of logical processor(s) utilization",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"axisSoftMin": 0,
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 14,
"w": 12,
"x": 12,
"y": 16
},
"id": 31,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"lbusy"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Logical Processor Utilization",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 37,
"style": "dark",
"tags": [
"sysmon",
"Power"
],
"templating": {
"list": [
{
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"hide": 0,
"includeAll": true,
"label": "Host",
"multi": true,
"name": "hostname",
"options": [],
"query": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-7d",
"to": "now-10s"
},
"timepicker": {
"nowDelay": "10s",
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "Sysmon - IBM Power",
"uid": "3zPCIbN4z",
"version": 7,
"weekStart": ""
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

View file

@ -1,74 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 512.001 512.001" style="enable-background:new 0 0 512.001 512.001;" xml:space="preserve">
<polygon style="fill:#88ACB5;" points="306.111,381.102 306.111,435.57 256,457.357 205.889,435.57 205.889,381.102 "/>
<g>
<rect x="205.889" y="435.571" style="fill:#A7CBCF;" width="100.221" height="49.564"/>
<path style="fill:#A7CBCF;" d="M492.566,26.09h-32.681v336.471l43.945-21.787V37.354C503.83,31.166,498.764,26.09,492.566,26.09z"
/>
</g>
<path style="fill:#C7E7EB;" d="M459.885,26.09H19.434c-6.198,0-11.264,5.076-11.264,11.264v303.42l32.681,21.787h430.298V37.354
C471.149,31.166,466.083,26.09,459.885,26.09z"/>
<path style="fill:#367596;" d="M471.149,340.774l-11.264,66.473h32.681c6.198,0,11.264-5.066,11.264-11.264v-55.209H471.149z"/>
<path style="fill:#5195AF;" d="M8.17,340.774v55.209c0,6.198,5.066,11.264,11.264,11.264h440.451
c6.198,0,11.264-5.066,11.264-11.264v-55.209H8.17z"/>
<path style="fill:#FFFFFF;" d="M256,69.668c-84.662,0-158.638,45.698-198.646,113.764C97.361,251.498,171.338,297.197,256,297.197
s158.639-45.698,198.646-113.764C414.638,115.366,340.662,69.668,256,69.668z"/>
<path style="fill:#27467A;" d="M256,69.668v227.528c62.881,0,113.764-50.883,113.764-113.764S318.881,69.668,256,69.668z"/>
<path style="fill:#367596;" d="M256,69.667c-62.881,0-113.764,50.883-113.764,113.764S193.119,297.196,256,297.196
c44.817,0,81.083-50.883,81.083-113.764S300.817,69.667,256,69.667z"/>
<circle style="fill:#111449;" cx="256" cy="183.34" r="46.298"/>
<path d="M492.565,17.826H19.435C8.718,17.826,0,26.544,0,37.261v358.623c0,10.717,8.718,19.435,19.435,19.435H197.88v62.515h-77.06
c-4.512,0-8.17,3.657-8.17,8.17c0,4.513,3.658,8.17,8.17,8.17h270.36c4.513,0,8.17-3.657,8.17-8.17c0-4.513-3.657-8.17-8.17-8.17
h-77.059V415.32h178.445c10.717,0,19.435-8.718,19.435-19.435V37.261C512,26.544,503.282,17.826,492.565,17.826z M297.779,477.835
H214.22V415.32h83.559V477.835z M495.66,395.884c0,1.678-1.417,3.095-3.095,3.095H19.435c-1.678,0-3.095-1.417-3.095-3.095V37.261
c0-1.678,1.417-3.095,3.095-3.095h473.129c1.678,0,3.095,1.417,3.095,3.095V395.884z"/>
<path d="M470.036,332.504H41.965c-4.512,0-8.17,3.657-8.17,8.17c0,4.513,3.658,8.17,8.17,8.17h428.07c4.513,0,8.17-3.657,8.17-8.17
C478.206,336.161,474.548,332.504,470.036,332.504z"/>
<path d="M135.806,272.794C172.129,294.04,213.691,305.27,256,305.27c42.31,0,83.871-11.23,120.194-32.476
c35.241-20.612,64.804-50.115,85.496-85.318c1.502-2.557,1.502-5.725,0-8.281c-20.692-35.203-50.257-64.706-85.496-85.319
C339.871,72.63,298.31,61.4,256,61.4s-83.872,11.23-120.194,32.475c-35.241,20.613-64.805,50.116-85.496,85.319
c-1.502,2.557-1.502,5.725,0,8.281C71.001,222.679,100.566,252.182,135.806,272.794z M252.818,288.877
c-56.759-1.689-102.412-48.382-102.412-105.542c0-57.161,45.654-103.854,102.412-105.543c1.061-0.015,2.119-0.052,3.182-0.052
c1.063,0,2.121,0.037,3.182,0.052c56.758,1.689,102.412,48.382,102.412,105.543c0,57.16-45.654,103.852-102.412,105.542
c-1.061,0.015-2.119,0.052-3.182,0.052C254.937,288.93,253.879,288.893,252.818,288.877z M445.109,183.336
c-25.232,40.845-62.884,71.925-106.353,89.465c24.078-22.288,39.179-54.143,39.179-89.465s-15.102-67.177-39.18-89.466
C382.223,111.41,419.877,142.49,445.109,183.336z M173.246,93.87c-24.079,22.289-39.18,54.145-39.18,89.466
c0,35.32,15.101,67.175,39.18,89.465c-43.469-17.54-81.122-48.619-106.353-89.465C92.123,142.49,129.776,111.41,173.246,93.87z"/>
<path d="M310.379,183.335c0-7.773-1.621-15.299-4.818-22.371c-1.857-4.11-6.696-5.938-10.81-4.08
c-4.111,1.858-5.938,6.697-4.08,10.81c2.234,4.944,3.367,10.205,3.367,15.641c0,20.975-17.064,38.038-38.038,38.038
s-38.038-17.064-38.038-38.038s17.064-38.038,38.038-38.038c5.435,0,10.698,1.133,15.642,3.368c4.112,1.861,8.951,0.032,10.81-4.08
c1.858-4.111,0.032-8.951-4.08-10.81c-7.073-3.198-14.601-4.819-22.372-4.819c-29.985,0-54.379,24.395-54.379,54.379
s24.394,54.379,54.379,54.379C285.984,237.713,310.379,213.318,310.379,183.335z"/>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 265 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 128 KiB

View file

@ -1,52 +0,0 @@
<mxfile host="65bd71144e" scale="1" border="15">
<diagram id="JBJC25AnoTCSJF4dnfuA" name="Page-1">
<mxGraphModel dx="1761" dy="1167" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
<root>
<mxCell id="0"/>
<mxCell id="1" parent="0"/>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-1" value="Linux" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="250" y="50" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-2" value="AIX" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="180" y="140" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-6" style="edgeStyle=orthogonalEdgeStyle;rounded=1;orthogonalLoop=1;jettySize=auto;html=1;sketch=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-3" target="n2YpyNFSe_BwzrgFeqL7-4" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-3" value="Sysmon Server" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1">
<mxGeometry x="510" y="120" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-7" style="edgeStyle=orthogonalEdgeStyle;rounded=1;orthogonalLoop=1;jettySize=auto;html=1;sketch=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-4" target="n2YpyNFSe_BwzrgFeqL7-5" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-4" value="InfluxDB&lt;br&gt;ver 1.x" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=15;sketch=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
<mxGeometry x="690" y="110" width="60" height="80" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-5" value="Grafana&lt;br&gt;Dashboards &amp;amp; Alerting" style="shape=document;whiteSpace=wrap;html=1;boundedLbl=1;sketch=1;fillColor=#fff2cc;strokeColor=#d6b656;" parent="1" vertex="1">
<mxGeometry x="800" y="110" width="120" height="80" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-8" value="Other OS'es" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="310" y="210" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-17" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-9" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-9" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="320" y="90" width="50" height="20" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-15" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-10" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-10" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="250" y="180" width="50" height="20" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-16" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-11" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-11" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="380" y="250" width="50" height="20" as="geometry"/>
</mxCell>
</root>
</mxGraphModel>
</diagram>
</mxfile>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 138 KiB

View file

@ -1,9 +1,6 @@
version = 1.1.4
pf4jVersion = 3.9.0
slf4jVersion = 2.0.9
camelVersion = 3.14.9
groovyVersion = 3.0.18
picocliVersion = 4.7.5
oshiVersion = 6.4.7
spockVersion = 2.3-groovy-3.0
tomljVersion = 1.1.0
version=0.0.9
pf4jVersion=3.6.0
slf4jVersion=1.7.32
camelVersion=3.11.1
picocliVersion=4.6.1
oshiVersion=5.8.2

Binary file not shown.

View file

@ -1,6 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
networkTimeout=10000
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

269
gradlew vendored
View file

@ -1,7 +1,7 @@
#!/bin/sh
#!/usr/bin/env sh
#
# Copyright © 2015-2021 the original authors.
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -17,101 +17,67 @@
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
MAX_FD="maximum"
warn () {
echo "$*"
} >&2
}
die () {
echo
echo "$*"
echo
exit 1
} >&2
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
@ -121,9 +87,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD=$JAVA_HOME/bin/java
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
@ -132,7 +98,7 @@ Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
@ -140,105 +106,80 @@ location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=$( cygpath --unix "$JAVACMD" )
JAVACMD=`cygpath --unix "$JAVACMD"`
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

15
gradlew.bat vendored
View file

@ -14,7 +14,7 @@
@rem limitations under the License.
@rem
@if "%DEBUG%"=="" @echo off
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@ -25,8 +25,7 @@
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@ -41,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -76,15 +75,13 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View file

@ -1,6 +1,7 @@
# System Monitor Plugins
Collection of standard sysmon plugins for use with the client.
Collection of standard sysmon plugins.
- [base](base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
- [power](power/README.md) - IBM Power specific metrics
- [base](os-base/) - base OS metrics
- [aix](os-aix/) - AIX (and Power) specific metrics
- [linux](os-linux/) - Linux specific metrics

View file

@ -1,85 +0,0 @@
# Base Plugin
The base plugins uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
## Processor Extension
Reports the following metrics seen:
- **system** -CPU time (in ms) spend on system processes.
- **user** - CPU time (in ms) spend on user processes.
- **nice** - CPU time (in ms) spend on user processes running at lower priority.
- **iowait** - CPU time (in ms) spend waiting (for i/o).
- **steal** - CPU time (in ms) stolen by hypervisor and given to other virtual systems.
- **irq** - CPU time (in ms) spend by kernel on interrupt requests.
- **softirq** - CPU (in ms) time spend by kernel on soft interrupt requests.
- **idle** - CPU time (in ms) spend idling (doing nothing).
- **busy** - CPU time (in ms) spend working.
## Memory Extension
Reports the following metrics (in bytes):
- **available** - Estimation of how much memory is available for starting new applications, without swapping.
- **total** - The total amount of (installed) memory.
- **usage** - Percentage of memory used out of the total amount of memory.
- **paged** - ...
- **virtual** - ...
## Disk Extension
Metrics reported are:
- **reads** - The total number of bytes read.
- **writes** - The total number of bytes written.
- **iotime** - Time spent on IO in milliseconds.
- **queue** - Length of disk IO queue.
## Filesystem Extension
### Metrics
- **free_bytes** - Free bytes for filesystem.
- **total_bytes** - Total bytes for filesystem.
- **free_inodes** - Free inodes for filesystem.
- **total_inodes** - Total inodes for filesystem.
### Configuration
```toml
[extension.base_filesystem]
enabled = true
interval = "10s"
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
```
## Process Extension
Reports metrics on one or more running processes.
- **mem_rss** - Resident set memory in bytes.
- **mem_vsz** - Virtual memory in bytes.
- **kernel_time** - Time spent (in milliseconds) in kernel space.
- **user_time** - Time used (in milliseconds) in user space.
- **read_bytes** - Bytes read by process.
- **write_bytes** - Bytes written by process.
- **files** - Files currently open by process.
- **threads** - Running threads.
- **user** - User running the process.
- **group** - Group running the process
- **prio** - Process priority.
### Configuration
The **include** option let's you specify what processes to report for.
```toml
[extension.base_process]
enabled = true # true or false
interval = "10s"
include = [ "java", "influxd", "grafana-server" ]
```

View file

@ -1,120 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseDiskExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
// Extension details
private final String name = "base_disk";
private final String description = "Base Disk Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<HWDiskStore> diskStores;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
if(diskStores == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of disk stores");
diskStores = hardwareAbstractionLayer.getDiskStores();
refreshCounter = 0;
}
for(HWDiskStore store : diskStores) {
store.updateAttributes();
String name = store.getName();
if (name.matches("h?disk[0-9]+") ||
//name.matches("/dev/dm-[0-9]+") ||
name.matches("/dev/x?[sv]d[a-z]") ||
name.matches("/dev/nvme[0-9]n[0-9]") ||
name.startsWith("\\\\.\\PHYSICALDRIVE")
) {
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("read", store.getReadBytes());
put("write", store.getWriteBytes());
put("iotime", store.getTransferTime());
put("queue", store.getCurrentQueueLength());
}};
log.debug("getMetrics() - tags: {}, fields: {}", tagsMap, fieldsMap);
measurementList.add(new Measurement(tagsMap, fieldsMap));
} else {
log.debug("getMetrics() - skipping device: {}", name);
}
}
return new MetricResult(name, measurementList);
}
}

View file

@ -1,148 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSFileStore;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseFilesystemExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
// Extension details
private final String name = "base_filesystem";
private final String description = "Base Filesystem Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private List<?> excludeType = new ArrayList<String>() {{
add("tmpfs");
add("ahafs");
}};
private List<?> excludeMount = new ArrayList<String>() {{
add("/boot/efi");
}};
private SystemInfo systemInfo;
private List<OSFileStore> fileStores;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
//hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("exclude_type")) {
excludeType = (List<?>) map.get("exclude_type");
}
if(map.containsKey("exclude_mount")) {
excludeMount = (List<?>) map.get("exclude_mount");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<String> alreadyProcessed = new ArrayList<>();
ArrayList<Measurement> measurementList = new ArrayList<>();
if(fileStores == null || refreshCounter++ > 360) {
fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
}
for(OSFileStore store : fileStores) {
String name = store.getName();
String type = store.getType();
String mount = store.getMount();
if(excludeType.contains(type)) {
log.debug("Excluding type: " + type);
continue;
}
if(excludeMount.contains(mount)) {
log.debug("Excluding mount: " + mount);
continue;
}
if(alreadyProcessed.contains(name)) {
log.debug("Skipping name: " + name);
continue;
}
alreadyProcessed.add(name);
store.updateAttributes();
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
put("type", type);
put("mount", mount);
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("free_bytes", store.getFreeSpace());
put("total_bytes", store.getTotalSpace());
put("free_inodes", store.getFreeInodes());
put("total_inodes", store.getTotalInodes());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
return new MetricResult(name, measurementList);
}
}

View file

@ -1,90 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseInfoExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseInfoExtension.class);
// Extension details
private final String name = "base_info";
private final String description = "Base System Information";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "60m";
private HashMap<String, String> tags = new HashMap<>();
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() { return interval; }
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if (map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if (map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("os_manufacturer", systemInfo.getOperatingSystem().getManufacturer()); // GNU/Linux / IBM
put("os_family", systemInfo.getOperatingSystem().getFamily()); // Freedesktop.org / AIX
put("os_codename", systemInfo.getOperatingSystem().getVersionInfo().getCodeName()); // Flatpak runtime / ppc64
put("os_version", systemInfo.getOperatingSystem().getVersionInfo().getVersion()); // 21.08.4 / 7.2
put("os_build", systemInfo.getOperatingSystem().getVersionInfo().getBuildNumber()); // 5.13.0-7620-generic / 2045B_72V
put("boot_time", systemInfo.getOperatingSystem().getSystemBootTime());
}};
return new MetricResult(name, new Measurement(tags, fieldsMap));
}
}

View file

@ -1,88 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseLoadExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseLoadExtension.class);
// Extension details
private final String name = "base_load";
private final String description = "Base Load Average Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
double[] loadAvg = hardwareAbstractionLayer.getProcessor().getSystemLoadAverage(3);
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("1min", loadAvg[0]);
put("5min", loadAvg[1]);
put("15min", loadAvg[2]);
}};
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View file

@ -1,107 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetstatExtension.class);
// Extension details
private final String name = "base_netstat";
private final String description = "Base Netstat Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if (map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("ip_conn_total", systemInfo.getOperatingSystem().getInternetProtocolStats().getConnections().size());
put("tcp4_conn_active", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsActive());
put("tcp4_conn_passive", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsPassive());
put("tcp4_conn_established", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsEstablished());
put("tcp4_conn_failures", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionFailures());
put("tcp4_conn_reset", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsReset());
put("tcp6_conn_active", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsActive());
put("tcp6_conn_passive", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsPassive());
put("tcp6_conn_established", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsEstablished());
put("tcp6_conn_failures", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionFailures());
put("tcp6_conn_reset", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsReset());
put("udp4_data_sent", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsSent());
put("udp4_data_recv", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsReceived());
put("udp4_data_recv_error", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsReceivedErrors());
put("udp6_data_sent", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsSent());
put("udp6_data_recv", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsReceived());
put("udp6_data_recv_error", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsReceivedErrors());
}};
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View file

@ -1,109 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseNetworkExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetworkExtension.class);
// Extension details
private final String name = "base_network";
private final String description = "Base Network Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<NetworkIF> interfaces;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
if(interfaces == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of network interfaces");
interfaces = hardwareAbstractionLayer.getNetworkIFs();
refreshCounter = 0;
}
for(NetworkIF netif : interfaces) {
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", netif.getName());
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("rx_pkts", netif.getPacketsRecv());
put("tx_pkts", netif.getPacketsSent());
put("rx_bytes", netif.getBytesRecv());
put("tx_bytes", netif.getBytesSent());
put("rx_errs", netif.getInErrors());
put("tx_errs", netif.getOutErrors());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
return new MetricResult(name, measurementList);
}
}

View file

@ -1,154 +0,0 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSProcess;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseProcessExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// Extension details
private final String name = "base_process";
private final String description = "Base Process Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "60s";
private List<?> includeList = new ArrayList<Object>() {{
add("java");
add("node");
add("httpd");
add("mongod");
add("mysqld");
add("influxd");
add("haproxy");
add("beam.smp");
add("filebeat");
add("corosync");
add("rsyslogd");
add("postgres");
add("mariadbd");
add("memcached");
add("db2sysc");
add("dsmserv");
add("mmfsd");
add("systemd");
add("nginx");
}};
private final long minUptimeInSeconds = 600;
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if(map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("include")) {
includeList = (List<?>) map.get("include");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
List<OSProcess> processList = systemInfo.getOperatingSystem().getProcesses();
for(OSProcess p : processList) {
// Skip all the kernel processes
if(p.getResidentSetSize() < 1) {
continue;
}
// Skip short-lived processes
if(p.getUpTime() < (minUptimeInSeconds * 1000)) {
continue;
}
// Skip process names not found in our includeList, only if the list is not empty or null
if(includeList != null && !includeList.isEmpty() && !includeList.contains(p.getName())) {
continue;
}
log.debug("pid: " + p.getProcessID() + ", name: " + p.getName() + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("pid", String.valueOf(p.getProcessID()));
put("name", p.getName());
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("mem_rss", p.getResidentSetSize());
put("mem_vsz", p.getVirtualSize());
put("kernel_time", p.getKernelTime());
put("user_time", p.getUserTime());
put("read_bytes", p.getBytesRead());
put("write_bytes", p.getBytesWritten());
put("files", p.getOpenFiles());
put("threads", p.getThreadCount());
put("user", p.getUser());
put("group", p.getGroup());
put("prio", p.getPriority());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
//log.info("Size of measurements: " + measurementList.size());
return new MetricResult(name, measurementList);
}
}

View file

@ -1,7 +1,7 @@
import org.redline_rpm.header.Os
plugins {
id "com.netflix.nebula.ospackage" version "11.3.0"
id "nebula.ospackage" version "8.6.1"
}
@ -10,7 +10,7 @@ subprojects {
apply plugin: 'groovy'
dependencies {
testImplementation "org.spockframework:spock-core:${spockVersion}"
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation "org.slf4j:slf4j-api:${slf4jVersion}"
testImplementation "org.slf4j:slf4j-simple:${slf4jVersion}"
testImplementation project(':shared')
@ -25,10 +25,10 @@ subprojects {
compileOnly(group: 'com.github.oshi', name: 'oshi-core', version: oshiVersion) {
exclude(group: "org.slf4j")
}
}
task uberJar(type: Jar) {
duplicatesStrategy DuplicatesStrategy.EXCLUDE
from sourceSets.main.output
dependsOn configurations.runtimeClasspath
from {
@ -48,7 +48,7 @@ subprojects {
attributes(
'Plugin-Id' : "${pluginId}",
'Plugin-Class' : "${pluginClass}",
'Plugin-Version' : "${archiveVersion}",
'Plugin-Version' : "${version}",
'Plugin-Provider' : "System Monitor",
'Plugin-Description': "${pluginDescription}"
)
@ -82,6 +82,7 @@ tasks.clean.dependsOn(tasks.customCleanUp)
def projectName = "sysmon-plugins"
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'
@ -110,12 +111,3 @@ task buildRpmAix(type: Rpm) {
packageName = "${projectName}-AIX"
os = Os.AIX
}
task buildZip(type: Zip) {
subprojects.each {
dependsOn("${it.name}:copyJar")
}
from "output"
setArchivesBaseName(projectName as String)
setArchiveVersion(project.property("version") as String)
}

View file

@ -1,39 +0,0 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 56.250
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 35.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 37.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 0.681
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 37.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View file

@ -1,39 +0,0 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 53.875
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 36.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 41.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 1.281
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 42.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View file

@ -1,6 +1,6 @@
# IBM Power Plugin
# AIX Plugin
## Power LPAR Processor Extension
## LPAR Processor Extension
The processor extension works for both AIX and Linux on the Power ppc64/ppc64le architecture.

View file

@ -0,0 +1,6 @@
pluginId=sysmon-aix
pluginClass=sysmon.plugins.os_aix.AixPlugin
pluginVersion=0.0.1
pluginProvider=System Monitor
pluginDependencies=
pluginDescription=Collects AIX OS metrics.

View file

@ -0,0 +1,75 @@
package sysmon.plugins.os_aix;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
// Disabled
//@Extension
public class AixNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixNetstatExtension.class);
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("aix")) {
log.warn("Requires AIX.");
return false;
}
if(!PluginHelper.canExecute("netstat")) {
log.warn("Requires the 'netstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return "aix_network_netstat";
}
@Override
public String getProvides() {
return "network_netstat";
}
@Override
public String getDescription() {
return "AIX Netstat Metrics";
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
try (InputStream buf = PluginHelper.executeCommand("netstat -s -f inet")) {
AixNetstatParser parser = processCommandOutput(buf);
tagsMap = parser.getTags();
fieldsMap = parser.getFields();
}
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
protected AixNetstatParser processCommandOutput(InputStream input) throws IOException {
return new AixNetstatParser(input);
}
}

View file

@ -0,0 +1,156 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
public class AixNetstatParser {
private static final Logger log = LoggerFactory.getLogger(AixNetstatParser.class);
private long ipTotalPacketsReceived;
private long ipForwarded;
private long tcpConnectionsEstablished;
private long tcpPacketsReceved;
private long tcpPacketsSent;
private long udpPacketsReceived;
private long udpPacketsSent;
public AixNetstatParser(InputStream inputStream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while (reader.ready()) {
String line = reader.readLine();
log.debug("AixNetstatParser() - Line: " + line);
if(line.startsWith("tcp:")) {
parseTcp(reader);
}
if(line.startsWith("udp:")) {
parseUdp(reader);
}
if(line.startsWith("ip:")) {
parseIp(reader);
}
}
inputStream.close();
}
protected void parseIp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) total packets received")) {
ipTotalPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) packets forwarded")) {
ipForwarded = getFirstLong(line);
}
}
}
protected void parseTcp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) connections established \\(including accepts\\)")) {
tcpConnectionsEstablished = getFirstLong(line);
}
if(line.matches("(\\d+) packets received")) {
tcpPacketsReceved = getFirstLong(line);
}
if(line.matches("(\\d+) packets sent")) {
tcpPacketsSent = getFirstLong(line);
}
}
}
protected void parseUdp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) datagrams received")) {
udpPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) datagrams output")) {
udpPacketsSent = getFirstLong(line);
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("ip_forwarded", ipForwarded);
fields.put("ip_received", ipTotalPacketsReceived);
fields.put("tcp_connections", tcpConnectionsEstablished);
fields.put("tcp_pkts_recv", tcpPacketsReceved);
fields.put("tcp_pkts_sent", tcpPacketsSent);
fields.put("udp_pkts_recv", udpPacketsReceived);
fields.put("udp_pkts_sent", udpPacketsSent);
return fields;
}
private Long getFirstLong(String line) {
return Long.parseLong(line.substring(0, line.indexOf(" ")));
}
}

View file

@ -0,0 +1,18 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class AixPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(AixPlugin.class);
public AixPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View file

@ -0,0 +1,75 @@
package sysmon.plugins.os_aix;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class AixProcessorExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixProcessorExtension.class);
@Override
public boolean isSupported() {
String osArch = System.getProperty("os.arch").toLowerCase();
if(!osArch.startsWith("ppc64")) {
log.warn("Requires CPU Architecture ppc64 or ppc64le, this is: " + osArch);
return false;
}
if(!PluginHelper.canExecute("lparstat")) {
log.warn("Requires the 'lparstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return "aix_processor";
}
@Override
public String getProvides() {
return "processor_lpar";
}
@Override
public String getDescription() {
return "AIX Processor Metrics";
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
try (InputStream buf = PluginHelper.executeCommand("lparstat 5 1")) {
AixProcessorStat processorStat = processCommandOutput(buf);
tagsMap = processorStat.getTags();
fieldsMap = processorStat.getFields();
}
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
protected AixProcessorStat processCommandOutput(InputStream input) throws IOException {
return new AixProcessorStat(input);
}
}

View file

@ -1,4 +1,4 @@
package sysmon.plugins.power;
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -7,35 +7,32 @@ import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.Objects;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PowerProcessorStat {
public class AixProcessorStat {
private static final Logger log = LoggerFactory.getLogger(PowerProcessorStat.class);
private static final Logger log = LoggerFactory.getLogger(AixProcessorStat.class);
// System configuration: type=Shared mode=Uncapped smt=8 lcpu=8 mem=4096MB psize=19 ent=0.50
private static final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
private final Pattern patternAixShared = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB psize=(\\d+) ent=(\\d+\\.?\\d*)");
// System configuration: type=Dedicated mode=Capped smt=4 lcpu=12 mem=24576MB
// System configuration: type=Dedicated mode=Donating smt=8 lcpu=16 mem=4096MB
private static final Pattern patternAixDedicated = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB");
private final Pattern patternAixDedicated = Pattern.compile("^System configuration: type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+)MB");
// type=Shared mode=Uncapped smt=8 lcpu=4 mem=4101120 kB cpus=24 ent=4.00
private static final Pattern patternLinux = Pattern.compile("^type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+) kB cpus=(\\d+) ent=(\\d+\\.?\\d*)");
private final Pattern patternLinux = Pattern.compile("^type=(\\S+) mode=(\\S+) smt=(\\d+) lcpu=(\\d+) mem=(\\d+) kB cpus=(\\d+) ent=(\\d+\\.?\\d*)");
private String type; // Indicates the partition type. The value can be either dedicated or shared.
private String mode; // Indicates whether the partition processor capacity is capped uncapped.
private int smt; // Indicates whether simultaneous multithreading is enabled or disabled in the partition.
private int lcpu; // Indicates the number of online logical processors.
//private int psize; // Indicates the number of online physical processors in the pool.
private float ent; // Indicates the entitled processing capacity in processor units (shared mode only).
private String type;
private String mode;
private int smt;
private int lcpu;
private int psize;
private float ent;
private final float user; // Indicates the percentage of the entitled processing capacity used while executing at the user level (application).
private final float sys; // Indicates the percentage of the entitled processing capacity used while executing at the system level (kernel).
@ -46,13 +43,12 @@ public class PowerProcessorStat {
private final float lbusy; // Indicates the percentage of logical processor(s) utilization that occurred while executing at the user and system level.
public PowerProcessorStat(InputStream inputStream) throws IOException {
public AixProcessorStat(InputStream inputStream) throws IOException {
String lastLine = null;
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while(reader.ready()) {
String line = reader.readLine();
log.trace("AixProcessorStat() - {}", line);
if (line.startsWith("System configuration:")) {
Matcher matcher = patternAixShared.matcher(line);
@ -61,7 +57,7 @@ public class PowerProcessorStat {
mode = matcher.group(2);
smt = Integer.parseInt(matcher.group(3));
lcpu = Integer.parseInt(matcher.group(4));
//psize = Integer.parseInt(matcher.group(6));
psize = Integer.parseInt(matcher.group(5));
ent = Float.parseFloat(matcher.group(7));
}
matcher = patternAixDedicated.matcher(line);
@ -80,8 +76,8 @@ public class PowerProcessorStat {
type = matcher.group(1);
mode = matcher.group(2);
smt = Integer.parseInt(matcher.group(3));
//psize = Integer.parseInt(matcher.group(4));
lcpu = Integer.parseInt(matcher.group(4));
psize = Integer.parseInt(matcher.group(6));
ent = Float.parseFloat(matcher.group(7));
}
}
@ -90,28 +86,18 @@ public class PowerProcessorStat {
}
//String lparstat = lines.get(lines.size() -1);
String[] splitStr = Objects.requireNonNull(lastLine).trim().split("\\s+");
if(type == null ||
(mode.equalsIgnoreCase("Capped") && splitStr.length < 4) ||
(type.equalsIgnoreCase("Shared") && splitStr.length < 9) ||
(type.equalsIgnoreCase("Dedicated") && mode.equalsIgnoreCase("Donating") && splitStr.length < 8)
) {
log.error("lparstat parse error - mode: {}, type: {}, content: {}", mode, type, Arrays.toString(splitStr));
throw new UnsupportedOperationException("lparstat parse error.");
String[] splitStr = lastLine.trim().split("\\s+");
if(type.equalsIgnoreCase("shared") && splitStr.length < 9 ||
type.equalsIgnoreCase("dedicated") && splitStr.length < 8) {
throw new UnsupportedOperationException("lparstat string error: " + lastLine);
}
this.user = Float.parseFloat(splitStr[0]);
this.sys = Float.parseFloat(splitStr[1]);
this.wait = Float.parseFloat(splitStr[2]);
this.idle = Float.parseFloat(splitStr[3]);
if(mode.equalsIgnoreCase("Uncapped") || mode.equalsIgnoreCase("Donating")) {
this.physc = Float.parseFloat(splitStr[4]);
} else {
this.physc = 0f;
}
if(type.equalsIgnoreCase("Shared")) {
if(type.equalsIgnoreCase("shared")) {
this.entc = Float.parseFloat(splitStr[5]);
this.lbusy = Float.parseFloat(splitStr[6]);
} else {
@ -154,24 +140,23 @@ public class PowerProcessorStat {
return 100 - idle;
}
public TreeMap<String, String> getTags() {
return new TreeMap<>();
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public TreeMap<String, Object> getFields() {
return new TreeMap<String, Object>() {{
put("lcpu", lcpu);
put("ent", ent);
put("user", user);
put("sys", sys);
put("idle", idle);
put("wait", wait);
put("physc", physc);
put("entc", entc);
put("lbusy", lbusy);
put("mode", mode);
put("type", type);
put("smt", smt);
}};
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("lcpu", lcpu);
fields.put("ent", ent);
fields.put("user", user);
fields.put("sys", sys);
fields.put("idle", idle);
fields.put("wait", wait);
fields.put("physc", physc);
fields.put("entc", entc);
fields.put("lbusy", lbusy);
fields.put("mode", mode);
fields.put("type", type);
return fields;
}
}

View file

@ -0,0 +1,25 @@
import spock.lang.Specification
import sysmon.plugins.os_aix.AixNetstatParser
class AixNetstatTest extends Specification {
void "test netstat parsing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/netstat-aix.txt');
when:
AixNetstatParser parser = new AixNetstatParser(inputStream)
then:
parser.getFields().size() > 0
parser.getFields().get('ip_received') == 76229L
parser.getFields().get('ip_forwarded') == 24L
parser.getFields().get('tcp_connections') == 85L
parser.getFields().get('tcp_pkts_sent') == 31274L
parser.getFields().get('tcp_pkts_recv') == 39830L
parser.getFields().get('udp_pkts_sent') == 26332L
parser.getFields().get('udp_pkts_recv') == 34559L
}
}

View file

@ -0,0 +1,65 @@
import sysmon.plugins.os_aix.AixProcessorExtension
import sysmon.plugins.os_aix.AixProcessorStat
import spock.lang.Specification
class AixProcessorTest extends Specification {
void "test AIX lparstat shared output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-shared.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 83.7f
stats.getSys() == 3.3f
stats.getWait() == 0.0f
stats.getIdle() == 13.0f
stats.getFields().get("ent") == 0.50f
stats.getFields().get("type") == "Shared"
}
void "test AIX lparstat dedicated output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.1f
stats.getSys() == 0.2f
stats.getWait() == 0.0f
stats.getIdle() == 99.7f
stats.getFields().get("physc") == 0.07f
stats.getFields().get("type") == "Dedicated"
}
void "test Linux lparstat output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-linux.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.03f
stats.getSys() == 0.0f
stats.getWait() == 0.0f
stats.getIdle() == 99.97f
stats.getFields().get("ent") == 4.00f
stats.getFields().get("mode") == "Uncapped"
stats.getFields().get("type") == "Shared"
}
}

View file

@ -0,0 +1,157 @@
icmp:
12 calls to icmp_error
0 errors not generated because old message was icmp
Output histogram:
destination unreachable: 12
0 messages with bad code fields
0 messages < minimum length
0 bad checksums
0 messages with bad length
Input histogram:
destination unreachable: 3
0 message responses generated
igmp:
0 messages received
0 messages received with too few bytes
0 messages received with bad checksum
0 membership queries received
0 membership queries received with invalid field(s)
0 membership reports received
0 membership reports received with invalid field(s)
0 membership reports received for groups to which we belong
2 membership reports sent
tcp:
31274 packets sent
27328 data packets (82928168 bytes)
86 data packets (108992 bytes) retransmitted
2938 ack-only packets (2698 delayed)
0 URG only packets
0 window probe packets
784 window update packets
138 control packets
3812 large sends
74913716 bytes sent using largesend
64069 bytes is the biggest largesend
39830 packets received
22701 acks (for 82928732 bytes)
112 duplicate acks
0 acks for unsent data
15579 packets (5876585 bytes) received in-sequence
62 completely duplicate packets (320 bytes)
57 old duplicate packets
0 packets with some dup. data (0 bytes duped)
75 out-of-order packets (6408 bytes)
0 packets (0 bytes) of data after window
0 window probes
1723 window update packets
0 packets received after close
0 packets with bad hardware assisted checksum
0 discarded for bad checksums
0 discarded for bad header offset fields
0 discarded because packet too short
1 discarded by listeners
0 discarded due to listener's queue full
3207 ack packet headers correctly predicted
15050 data packet headers correctly predicted
63 connection requests
23 connection accepts
85 connections established (including accepts)
114 connections closed (including 0 drops)
0 connections with ECN capability
0 times responded to ECN
0 embryonic connections dropped
20314 segments updated rtt (of 16791 attempts)
0 segments with congestion window reduced bit set
0 segments with congestion experienced bit set
0 resends due to path MTU discovery
2 path MTU discovery terminations due to retransmits
25 retransmit timeouts
0 connections dropped by rexmit timeout
4 fast retransmits
1 when congestion window less than 4 segments
28 newreno retransmits
4 times avoided false fast retransmits
0 persist timeouts
0 connections dropped due to persist timeout
0 keepalive timeouts
0 keepalive probes sent
0 connections dropped by keepalive
0 times SACK blocks array is extended
0 times SACK holes array is extended
0 packets dropped due to memory allocation failure
0 connections in timewait reused
0 delayed ACKs for SYN
0 delayed ACKs for FIN
0 send_and_disconnects
0 spliced connections
0 spliced connections closed
0 spliced connections reset
0 spliced connections timeout
0 spliced connections persist timeout
0 spliced connections keepalive timeout
0 TCP checksum offload disabled during retransmit
0 Connections dropped due to bad ACKs
0 Connections dropped due to duplicate SYN packets
0 fastpath loopback connections
0 fastpath loopback sent packets (0 bytes)
0 fastpath loopback received packets (0 bytes)
0 fake SYN segments dropped
0 fake RST segments dropped
0 data injection segments dropped
0 TCPTR maximum connections dropped
0 TCPTR connections dropped for no memory
0 TCPTR maximum per host connections dropped
0 connections dropped due to max assembly queue depth
udp:
34559 datagrams received
0 incomplete headers
0 bad data length fields
0 bad checksums
1849 dropped due to no socket
8218 broadcast/multicast datagrams dropped due to no socket
0 socket buffer overflows
24492 delivered
26332 datagrams output
ip:
76229 total packets received
0 bad header checksums
0 with size smaller than minimum
0 with data size < data length
0 with header length < data size
0 with data length < header length
0 with bad options
0 with incorrect version number
0 fragments received
0 fragments dropped (dup or out of space)
0 fragments dropped after timeout
0 packets reassembled ok
72552 packets for this host
3 packets for unknown/unsupported protocol
24 packets forwarded
0 packets not forwardable
0 redirects sent
55784 packets sent from this host
0 packets sent with fabricated ip header
0 output packets dropped due to no bufs, etc.
0 output packets discarded due to no route
0 output datagrams fragmented
0 fragments created
0 datagrams that can't be fragmented
0 IP Multicast packets dropped due to no receiver
0 successful path MTU discovery cycles
0 path MTU rediscovery cycles attempted
0 path MTU discovery no-response estimates
0 path MTU discovery response timeouts
0 path MTU discovery decreases detected
0 path MTU discovery packets sent
0 path MTU discovery memory allocation failures
0 ipintrq overflows
0 with illegal source
0 packets processed by threads
0 packets dropped by threads
0 packets dropped due to the full socket receive buffer
0 dead gateway detection packets sent
0 dead gateway detection packet allocation failures
0 dead gateway detection gateway allocation failures
0 incoming packets dropped due to MLS filters
0 packets not sent due to MLS filters

38
plugins/os-base/README.md Normal file
View file

@ -0,0 +1,38 @@
# Base Plugin
The base plugin uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
## Processor Extension
Reports the following metrics seen:
- **system** -CPU time spend on system processes.
- **user** - CPU time spend on user processes.
- **nice** - CPU time spend on user processes running at lower priority.
- **iowait** - CPU time spend waiting (for i/o).
- **steal** - CPU time stolen by hypervisor and given to other virtual systems.
- **irq** - CPU time spend by kernel on interrupt requests.
- **softirq** - CPU time spend by kernel on soft interrupt requests.
- **idle** - CPU time spend idling (doing nothing).
- **busy** - CPU time spend working.
## Memory Extension
Reports the following metrics (in bytes):
- **available** - Estimation of how much memory is available for starting new applications, without swapping.
- **total** - The total amount of (installed) memory.
- **usage** - Percentage of memory used out of the total amount of memory.
- **paged** - ...
- **virtual** - ...
## Disk Extension
Metrics reported are:
- **reads** - The total number of bytes read.
- **writes** - The total number of bytes written.
- **iotime** - Time spent on IO in milliseconds.
- **queue** - Lenght of IO queue.

View file

@ -1,5 +1,5 @@
pluginId=sysmon-base
pluginClass=sysmon.plugins.base.BasePlugin
pluginClass=sysmon.plugins.os_base.BasePlugin
pluginDependencies=
pluginDescription=Base OS metrics where supported.

View file

@ -0,0 +1,79 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class BaseDiskExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return "base_disk";
}
@Override
public String getProvides() {
return "disk";
}
@Override
public String getDescription() {
return "Base Disk Metrics";
}
@Override
public MetricResult getMetrics() {
long writeBytes = 0L;
long readBytes = 0L;
long transferTime = 0L;
long queueLength = 0L;
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
List<HWDiskStore> diskStores = hardwareAbstractionLayer.getDiskStores();
for(HWDiskStore store : diskStores) {
String name = store.getName();
if (name.matches("hdisk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]{1}") || name.matches("/dev/nvme[0-9]n[0-9]")) {
log.debug("Using device: " + name);
writeBytes += store.getWriteBytes();
readBytes += store.getReadBytes();
transferTime += store.getTransferTime();
queueLength = store.getCurrentQueueLength();
}
}
fieldsMap.put("reads", readBytes);
fieldsMap.put("writes", writeBytes);
fieldsMap.put("iotime", transferTime);
fieldsMap.put("queue", queueLength);
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -1,43 +1,24 @@
package sysmon.plugins.base;
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseMemoryExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseMemoryExtension.class);
// Extension details
private final String name = "base_memory";
private final String description = "Base Memory Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
@ -46,37 +27,24 @@ public class BaseMemoryExtension implements MetricExtension {
@Override
public String getName() {
return name;
return "base_memory";
}
@Override
public String getInterval() {
return interval;
public String getProvides() {
return "memory";
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
return "Base Memory Metrics";
}
@Override
public MetricResult getMetrics() {
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
long total = hardwareAbstractionLayer.getMemory().getTotal();
long available = hardwareAbstractionLayer.getMemory().getAvailable();
@ -89,7 +57,7 @@ public class BaseMemoryExtension implements MetricExtension {
fieldsMap.put("virtual", hardwareAbstractionLayer.getMemory().getVirtualMemory().getVirtualInUse());
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}

View file

@ -0,0 +1,81 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class BaseNetworkExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetworkExtension.class);
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return "base_network";
}
@Override
public String getProvides() {
return "network";
}
@Override
public String getDescription() {
return "Base Network Metrics";
}
@Override
public MetricResult getMetrics() {
long rxBytes = 0L;
long rxPackets = 0L;
long rxErrs = 0L;
long txBytes = 0L;
long txPackets = 0L;
long txErrs = 0L;
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
List<NetworkIF> interfaces = hardwareAbstractionLayer.getNetworkIFs();
for(NetworkIF netif : interfaces) {
//String name = netif.getName();
//log.warn("Device: " + name);
rxPackets += netif.getPacketsRecv();
txPackets += netif.getPacketsSent();
rxBytes += netif.getBytesRecv();
txBytes += netif.getBytesSent();
rxErrs += netif.getInErrors();
txErrs += netif.getOutErrors();
}
fieldsMap.put("rxPackets", rxPackets);
fieldsMap.put("txPackets", txPackets);
fieldsMap.put("rxBytes", rxBytes);
fieldsMap.put("txBytes", txBytes);
fieldsMap.put("rxErrors", rxErrs);
fieldsMap.put("txErrors", txErrs);
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -1,8 +1,9 @@
package sysmon.plugins.base;
package sysmon.plugins.os_base;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
@ -14,6 +15,9 @@ public class BasePlugin extends Plugin {
private static SystemInfo systemInfo;
private static HardwareAbstractionLayer hardwareAbstractionLayer;
public BasePlugin(PluginWrapper wrapper) {
super(wrapper);
}
public static HardwareAbstractionLayer getHardwareAbstractionLayer() {
@ -26,8 +30,7 @@ public class BasePlugin extends Plugin {
}
} catch (UnsupportedOperationException e) {
log.warn("getHardwareAbstractionLayer() - {}", e.getMessage());
return null;
log.warn(e.getMessage());
}
return hardwareAbstractionLayer;
@ -40,10 +43,9 @@ public class BasePlugin extends Plugin {
if(systemInfo == null) {
systemInfo = new SystemInfo();
}
systemInfo.getOperatingSystem();
} catch (UnsupportedOperationException e) {
log.warn("getSystemInfo() - {}", e.getMessage());
return null;
log.warn(e.getMessage());
}
return systemInfo;

View file

@ -0,0 +1,103 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSProcess;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@Extension
public class BaseProcessExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// TODO: configurable include-list and/or exclude-list of process names
private final List<String> includeList = new ArrayList<String>() {{
add("java");
add("nginx");
add("influxd");
add("dockerd");
add("containerd");
add("mysqld");
add("postgres");
add("grafana-server");
}};
private SystemInfo systemInfo;
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return "base_process";
}
@Override
public String getProvides() {
return "process";
}
@Override
public String getDescription() {
return "Base Process Metrics";
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
List<OSProcess> processList = systemInfo.getOperatingSystem().getProcesses();
for(OSProcess p : processList) {
// Skip all the kernel processes
if(p.getResidentSetSize() < 1) {
continue;
}
String name = p.getName();
if(!includeList.contains(name)) {
continue;
}
log.debug("pid: " + p.getProcessID() + ", name: " + name + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize() + " cmd: " + p.getCommandLine());
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("pid", String.valueOf(p.getProcessID()));
tagsMap.put("name", name);
fieldsMap.put("mem_rss", p.getResidentSetSize());
fieldsMap.put("mem_vsz", p.getVirtualSize());
fieldsMap.put("kernel_time", p.getKernelTime());
fieldsMap.put("user_time", p.getUserTime());
fieldsMap.put("read_bytes", p.getBytesRead());
fieldsMap.put("write_bytes", p.getBytesWritten());
fieldsMap.put("files", p.getOpenFiles());
fieldsMap.put("threads", p.getThreadCount());
fieldsMap.put("user", p.getUser());
fieldsMap.put("group", p.getGroup());
fieldsMap.put("prio", p.getPriority());
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
//log.info("Size of measurements: " + measurementList.size());
return new MetricResult(getName(), measurementList);
}
}

View file

@ -1,45 +1,32 @@
package sysmon.plugins.base;
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseProcessorExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// Extension details
private final String name = "base_processor";
private final String description = "Base Processor Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private long[] oldTicks;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
@ -48,37 +35,25 @@ public class BaseProcessorExtension implements MetricExtension {
@Override
public String getName() {
return name;
return "base_processor";
}
@Override
public String getInterval() {
return interval;
public String getProvides() {
return "processor";
}
@Override
public String getDescription() {
return description;
return "Base Processor Metrics";
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
long[] ticks = hardwareAbstractionLayer.getProcessor().getSystemCpuLoadTicks();
if(oldTicks == null || oldTicks.length != ticks.length) {
@ -99,19 +74,19 @@ public class BaseProcessorExtension implements MetricExtension {
long nonBusy = idle + iowait;
long total = busy + nonBusy;
fieldsMap.put("system", PluginHelper.round(((double) system / (double) total) * 100, 2));
fieldsMap.put("user", PluginHelper.round(((double) user / (double) total) * 100, 2));
fieldsMap.put("nice", PluginHelper.round(((double) nice / (double) total) * 100, 2));
fieldsMap.put("iowait", PluginHelper.round(((double) iowait / (double) total) * 100, 2));
fieldsMap.put("steal", PluginHelper.round(((double) steal / (double) total) * 100, 2));
fieldsMap.put("irq", PluginHelper.round(((double) irq / (double) total) * 100, 2));
fieldsMap.put("softirq", PluginHelper.round(((double) softirq / (double) total) * 100, 2));
fieldsMap.put("idle", PluginHelper.round(((double) idle / (double) total) * 100, 2));
fieldsMap.put("busy", PluginHelper.round(((double) busy / (double) total) * 100, 2));
fieldsMap.put("system", ((float) system / (float) total) * 100);
fieldsMap.put("user", ((float) user / (float) total) * 100);
fieldsMap.put("nice", ((float) nice / (float) total) * 100);
fieldsMap.put("iowait", ((float) iowait / (float) total) * 100);
fieldsMap.put("steal", ((float) steal / (float) total) * 100);
fieldsMap.put("irq", ((float) irq / (float) total) * 100);
fieldsMap.put("softirq", ((float) softirq / (float) total) * 100);
fieldsMap.put("idle", ((float) idle / (float) total) * 100);
fieldsMap.put("busy", ((float) busy / (float) total) * 100);
oldTicks = ticks;
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -0,0 +1,4 @@
# IBM i Plugin
TODO. Nothing here yet.

View file

@ -0,0 +1,7 @@
plugins {
}
dependencies {
// https://mvnrepository.com/artifact/net.sf.jt400/jt400
implementation group: 'net.sf.jt400', name: 'jt400', version: '10.6'
}

View file

@ -0,0 +1,4 @@
pluginId=sysmon-ibmi
pluginClass=sysmon.plugins.os_ibmi.IbmIPlugin
pluginDependencies=
pluginDescription=Collects IBM-i OS metrics.

View file

@ -0,0 +1,18 @@
package sysmon.plugins.os_ibmi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class IbmIPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(IbmIPlugin.class);
public IbmIPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View file

@ -0,0 +1,94 @@
package sysmon.plugins.os_ibmi;
import com.ibm.as400.access.*;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.io.IOException;
// Disable for now...
//@Extension
public class TestExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(TestExtension.class);
private AS400 as400;
private SystemStatus systemStatus;
@Override
public boolean isSupported() {
String osArch = System.getProperty("os.arch").toLowerCase();
String osName = System.getProperty("os.name").toLowerCase();
System.err.println("OS Arch: " + osArch);
System.err.println("OS Name: " + osName);
try {
//as400 = new AS400("localhost", "CURRENT");
as400 = new AS400("10.32.64.142");
systemStatus = new SystemStatus(as400);
} catch (Exception exception) {
log.error(exception.getMessage());
}
if(as400.isLocal()) {
log.info("as400 isLocal() true");
} else {
log.info("as400 isLocal() FALSE");
}
return true;
}
@Override
public String getName() {
return "ibmi-test";
}
@Override
public String getProvides() {
return "test";
}
@Override
public String getDescription() {
return "IBM i Test Extension";
}
@Override
public MetricResult getMetrics() {
if(systemStatus == null) {
log.warn("getMetrics() - no system or status");
return null;
}
try {
int jobsInSystem = systemStatus.getJobsInSystem();
log.info("Jobs In System: " + jobsInSystem);
int batchJobsRunning = systemStatus.getBatchJobsRunning();
log.info("Batch Jobs Running: " + batchJobsRunning);
int activeThreads = systemStatus.getActiveThreadsInSystem();
log.info("Active Threads: " + activeThreads);
int activeJobs = systemStatus.getActiveJobsInSystem();
log.info("Active Jobs: " + activeJobs);
int onlineUsers = systemStatus.getUsersCurrentSignedOn();
log.info("Online Users: " + onlineUsers);
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException e) {
log.error(e.getMessage());
e.printStackTrace();
}
return null;
}
}

View file

@ -0,0 +1,7 @@
# Linux Plugins
## Components
### Network Sockets
Collects statistics from */proc/net/sockstats*.

View file

@ -0,0 +1,2 @@
plugins {
}

Some files were not shown because too many files have changed in this diff Show more