Compare commits

..

96 commits
v0.0.9 ... main

Author SHA1 Message Date
Mark Nellemann bb6a5f47c6 Update README.md 2024-05-17 06:19:16 +00:00
Mark Nellemann 5204142cb4 Remove ansible example
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-11-02 20:03:01 +01:00
Mark Nellemann 80066b0c2e Update dependencies and bump version. 2023-11-02 09:40:47 +01:00
Mark Nellemann 462822b7e5 Merge pull request 'updates' (#3) from updates into main
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Reviewed-on: #3
2023-08-13 16:56:51 +00:00
Mark Nellemann 6e05b5bb65 Update 3rd party build dependencies.
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing
2023-08-13 18:54:47 +02:00
Mark Nellemann c358e281ea Merge branch 'main' of git.data.coop:nellemann/sysmon 2023-08-13 18:15:51 +02:00
Mark Nellemann 3096ae450e Make a zip archive of plugins.
All checks were successful
continuous-integration/drone/push Build is passing
2023-06-24 21:32:37 +02:00
Mark Nellemann 55848ee590 Update 3rd party build dependencies.
All checks were successful
continuous-integration/drone/push Build is passing
2023-06-07 07:48:58 +02:00
Mark Nellemann a5e3b4afcd Merge pull request 'Just minor changes to build deps.' (#1) from power into main
Reviewed-on: #1
2023-05-10 09:04:25 +00:00
Mark Nellemann 27838ab6ec Update 3rd party build dependencies. 2023-05-10 11:02:22 +02:00
Mark Nellemann 7b9d27a124 Changes to reflect updated deps.
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-15 17:27:37 +01:00
Mark Nellemann 91c604e765 Merge branch 'main' into power
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-15 16:29:48 +01:00
Mark Nellemann 25e2f58264 Update dependencies.
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-15 16:28:50 +01:00
Mark Nellemann 11a22e84ba Initial work on power readings.
All checks were successful
continuous-integration/drone/push Build is passing
2023-03-15 16:21:07 +01:00
Mark Nellemann fb94b9e563 Add ansible example
All checks were successful
continuous-integration/drone/push Build is passing
2023-02-09 16:21:29 +01:00
Mark Nellemann 43d3e9babf Lowercase client hostnames, housekeeping, update dashboards.
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-02-06 19:47:36 +01:00
Mark Nellemann d48934b94c Update dashboards to reflect Power/AIX plugin rename and fix urls.
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-01-22 11:20:58 +01:00
Mark Nellemann 7ca1714198 Update dashboards to reflect Power/AIX plugin rename and fix urls.
All checks were successful
continuous-integration/drone/push Build is passing
2023-01-22 11:19:42 +01:00
Mark Nellemann 186d678861 Set ID of routes and fix bug when no configuration file is found.
All checks were successful
continuous-integration/drone/push Build is passing
2023-01-22 11:07:15 +01:00
Mark Nellemann ebd058a433 Update links.
All checks were successful
continuous-integration/drone/push Build is passing
2023-01-18 15:50:09 +01:00
Mark Nellemann e0a6499daa Update links and provide screenshots
All checks were successful
continuous-integration/drone/push Build is passing
2023-01-06 08:15:37 +01:00
Mark Nellemann f2d325425a Update links and provide screenshots. 2023-01-06 08:15:18 +01:00
Mark Nellemann 5faeb36000 Cleanup and fix username in drone pipeline.
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-01-04 15:00:31 +01:00
Mark Nellemann f9192bd223 Merged in scripts (pull request #19)
Scripts
2023-01-04 13:45:49 +00:00
Mark Nellemann 529d73890e Build updates. 2023-01-04 14:44:42 +01:00
Mark Nellemann fd43bee35d Update dependencies. 2023-01-04 12:35:00 +01:00
Mark Nellemann 79f3b3a81d Initial work on script support 2022-12-25 11:29:45 +01:00
Mark Nellemann 31b494163d Work on support for groovy scripts. 2022-12-21 17:12:15 +01:00
Mark Nellemann b07e949fb2 Update README with links to other related projects. 2022-12-17 10:35:05 +01:00
Mark Nellemann 6a6fdf6d25 Lower influx time precision. 2022-12-17 09:48:28 +01:00
Mark Nellemann 9b35a6f3dc Update dashboard. 2022-11-30 16:22:47 +01:00
Mark Nellemann d9cc633626 Bump OSHI dependency and version. 2022-11-30 08:58:07 +01:00
Mark Nellemann 6ffc943a51 Fix incorrect use of OSHI getDiskStores causing lots of reads on AIX. 2022-11-16 12:21:33 +01:00
Mark Nellemann c503032d94 Various small changes. 2022-11-07 17:44:14 +01:00
Mark Nellemann 8b82af339a Small improvements and bump to version 1.0.22 2022-11-07 13:33:35 +01:00
Mark Nellemann 56c47fa1fb Update oshi to fixed version. 2022-10-30 13:03:53 +01:00
Mark Nellemann 63d2fbd780 Update dashboards and README's 2022-10-28 17:10:22 +02:00
Mark Nellemann f38d6912b0 Version 1.0.20 - revert oshi update due to aix problem. 2022-10-28 11:37:43 +02:00
Mark Nellemann 8151af7772 Fix bad export 2022-10-28 10:49:17 +02:00
Mark Nellemann 997158f17b Version 1.0.19 - updated dashboards. 2022-10-28 10:17:51 +02:00
Mark Nellemann 7b90b20f3f Update 3rd party dependencies and version. 2022-10-24 15:28:20 +02:00
Mark Nellemann ed0bd5c2fa Update 3rd party dependencies and documentation. 2022-09-23 14:49:13 +02:00
Mark Nellemann 249238e7a5 Update documentation 2022-09-19 14:41:11 +02:00
Mark Nellemann 8bc02798d2 Add doc on installation on rpm Linux systems 2022-09-19 14:18:42 +02:00
Mark Nellemann ba0313f84a Update doc/instructions.
Update example dashboards.
Add more processes to default list.
2022-09-19 14:15:48 +02:00
Mark Nellemann dd475c6d23 Update dashboard to include system information and only submit this once an hour per. default.
Bump version to 1.0.x
2022-09-06 15:08:52 +02:00
Mark Nellemann abef741218 Update documentation on sysv init script setup. 2022-08-27 10:30:18 +02:00
Mark Nellemann 36aadbd5cc Dashboard updates 2022-08-20 10:02:05 +02:00
Mark Nellemann 0d9dda9a5c Bump version. 2022-08-09 10:38:47 +02:00
Mark Nellemann b0fa5c4523 Update 3rd party dependencies. 2022-08-09 10:36:19 +02:00
Mark Nellemann 526b37c099 Update oshi library and bump version. 2022-06-27 08:04:25 +02:00
Mark Nellemann 0e9c1152e1 Add editorconfig settings. 2022-04-20 12:33:37 +02:00
Mark Nellemann d491ff451e Add more process to the default include list. 2022-04-07 10:41:08 +02:00
Mark Nellemann c96274fa4f Detect windows disks. 2022-03-30 13:36:54 +02:00
Mark Nellemann b5ae2e29dc Bump version to 0.1.11
Removed groovy dependency from plugins.
2022-03-02 18:41:58 +01:00
Mark Nellemann be233d52b9 Improve AIX lparstat parsing
Bump version 0.1.10
Update dependencies
2022-03-01 20:07:43 +01:00
Mark Nellemann 484834ff35 Improve AIX lparstat parsing
Bump version 0.1.10
Update dependencies
2022-02-25 15:45:11 +01:00
Mark Nellemann 16525059aa Cleanup and updates to README files. 2022-02-16 12:34:51 +01:00
Mark Nellemann 17b8f7f2ab Merged in development (pull request #18)
Update deps
2022-02-15 13:12:52 +00:00
Mark Nellemann ab78168c54 Merge branch 'development' of bitbucket.org:mnellemann/sysmon into development 2022-02-14 13:03:50 +01:00
Mark Nellemann 09906666c8 Update deps 2022-02-14 13:03:43 +01:00
Mark Nellemann 459926f3dc Merged in development (pull request #17)
- Update 3rd party dependencies
2022-01-25 08:38:28 +00:00
Mark Nellemann 580c9493ef Merged master into development 2022-01-25 08:37:52 +00:00
Mark Nellemann 75f6c0724f - Update 3rd party dependencies 2022-01-25 09:37:02 +01:00
Mark Nellemann d6e309cfa6 Merged in development (pull request #16)
Development
2022-01-11 10:24:50 +00:00
Mark Nellemann 0332732401 Misc. small changes.
- Update 3rd party dependencies
2022-01-11 11:23:06 +01:00
Mark Nellemann 8b426931e7 Misc. small changes.
- Add os details from base (oshi)
- Add netstat from base (oshi)
- Update 3rd party dependencies
- Bump version
2021-12-05 21:17:16 +01:00
Mark Nellemann a0ff6f44d3 Merged in development (pull request #15)
More data from oshi
2021-11-06 19:34:36 +00:00
Mark Nellemann 132619bb51 Add os details from base (oshi)
Add netstat from base (oshi)
Update 3rd party dependencies
Bump version
2021-11-06 20:32:54 +01:00
Mark Nellemann 0dfd307f2a Merged in development (pull request #14)
Development
2021-10-14 15:35:24 +00:00
Mark Nellemann c755c964e0 Merge remote-tracking branch 'origin/development' into development 2021-10-14 17:34:20 +02:00
Mark Nellemann 4eb40f2530 Add SMT mode. 2021-10-14 17:34:11 +02:00
Mark Nellemann 9a6ebfe5c3 Merged master into development 2021-10-08 11:12:36 +02:00
Mark Nellemann bc9b9b9a3b Add system load average metrics. 2021-10-08 09:47:35 +02:00
Mark Nellemann 75c15f3fc9 Merged in development (pull request #13)
Improve robustness, logging and example dashboards
2021-09-22 13:51:14 +00:00
Mark Nellemann 75c76af01f README.md edited online with Bitbucket 2021-09-22 13:48:10 +00:00
Mark Nellemann 33533e0885 Improve robustness, logging and example dashboards 2021-09-22 15:46:48 +02:00
Mark Nellemann 0f42e8dcc6 Cleanup. 2021-09-22 10:07:31 +02:00
Mark Nellemann ab938338b8 Merged master into development 2021-09-17 10:42:44 +00:00
Mark Nellemann 79a2fa0616 Merged in comboresults (pull request #12)
Combine results in one combo package that can be sent from client to server.
2021-09-17 09:58:40 +00:00
Mark Nellemann 924281d354 Merged master into comboresults 2021-09-17 11:56:49 +02:00
Mark Nellemann 905d38cd45 Merged in development (pull request #11)
Development
2021-09-17 09:55:55 +00:00
Mark Nellemann db2f31b346 Combine results in one combo package that can be sent from client to server. 2021-09-17 11:53:57 +02:00
Mark Nellemann 61081518eb Merged master into development 2021-09-15 12:38:51 +02:00
Mark Nellemann 1a77edfe81 Cleanup. 2021-09-14 12:46:21 +02:00
Mark Nellemann 6ce3e0252d Merge remote-tracking branch 'origin/development' into development 2021-09-14 09:00:45 +02:00
Mark Nellemann 4243a7f5ee In base-process - skip short-lived processed to not clutter the influx database. 2021-09-14 09:00:34 +02:00
Mark Nellemann fd546f9f52 Merged in development (pull request #10)
Development
2021-09-13 15:16:16 +00:00
Mark Nellemann 88071199bb Merged master into development 2021-09-13 17:14:45 +02:00
Mark Nellemann ea5d17bc5c Cleanup and option for debug logging. 2021-09-13 17:13:58 +02:00
Mark Nellemann 731d8b8d10 Improvements to network and disk, plus some cleanup and refactoring. 2021-09-11 21:55:48 +02:00
Mark Nellemann 39d3127437 Merged in development (pull request #9)
Support for configuring extensions.
2021-09-10 20:00:13 +00:00
Mark Nellemann 203c5daf3e Some cleanup. 2021-09-10 21:58:14 +02:00
Mark Nellemann 710f32e32b Merged master into development 2021-09-10 12:17:10 +02:00
Mark Nellemann f4d940a1f8 Merge branch 'development' of bitbucket.org:mnellemann/sysmon into development 2021-09-10 12:15:59 +02:00
Mark Nellemann 0361331268 Support for configuring extensions. 2021-09-10 12:15:33 +02:00
139 changed files with 7058 additions and 4467 deletions

26
.drone.yml Normal file
View file

@ -0,0 +1,26 @@
---
kind: pipeline
name: default
type: docker
steps:
- name: test
image: eclipse-temurin:8-jdk
commands:
- ./gradlew test
- name: build
image: eclipse-temurin:8-jdk
environment:
AUTH_TOKEN: # Gitea access token ENV variable
from_secret: auth # Name of DroneCI secret exposed above
commands:
- ./gradlew build packages shared:publishLibraryPublicationToGiteaRepository
- for file in server/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in server/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in client/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.deb ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
- for file in plugins/build/distributions/*.rpm ; do curl --user "${DRONE_REPO_OWNER}:$${AUTH_TOKEN}" --upload-file "$${file}" "https://git.data.coop/api/packages/${DRONE_REPO_OWNER}/generic/${DRONE_REPO_NAME}/${DRONE_TAG}/$(basename $file)" ; done
when:
event:
- tag

11
.editorconfig Normal file
View file

@ -0,0 +1,11 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
[*.yml]
indent_size = 2

1
.gitignore vendored
View file

@ -3,6 +3,7 @@
.classpath
.project
.gradle
.vscode
output
build
bin

68
CHANGELOG.md Normal file
View file

@ -0,0 +1,68 @@
# Changelog
All notable changes to this project will be documented in this file.
## [1.1.2] - 2023-02-06
- Lowercase client hostnames
## [1.1.1] - 2023-01-22
- Simplify plugin naming
- Initial support for executing (groovy) scripts
- Fixed bug when no config file were found
- Update the default [dashboards](doc/dashboards/)
## [1.1.0] - 2022-12-17
- Lower influx time precision from milliseconds to seconds
- requires you to update server and clients to this version.
- Update *oshi* dependency (for AIX improvements).
## [1.0.24] - 2022-11-16
- Fix incorrect use of OSHI getDiskStores()
- Update dashboards
## [1.0.23] - 2022-11-07
- Update dashboards.
- Lower default interval for most plugins.
- Simplify metrics-results to influx points code.
- Remove logging of skipped disk devices (eg. cd0).
## [1.0.21] - 2022-10-30
- Update dashboard
- Add IP connections
## [1.0.18] - 2022-10-24
- Bump version to 1.x to indicate stable release.
- Update 3rd party dependencies.
## [0.1.13] - 2022-06-27
## [0.1.11] - 2022-03-02
### Changed
- (plugins) Removed groovy dependency from build.gradle (it increased size and was not needed).
## [0.1.10] - 2022-03-01
### Added
- (client) More debug options.
- (plugins/linux) Re-enabled network socket-statistics extension.
### Changed
- Updated the oshi dependency to v. 6.1.4.
- (plugins/aix) Improved AIX lparstat parsing.
- (plugins/aix) More debug output from (Power) processor extension.
- (plugins/base) More debug output from plugins-base disk extension.
## [0.1.9] - 2022-02-15
### Changed
- Updated 3rd party dependencies.
<!--
[1.1.0]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.1.0%0Dv0.1.24
[1.0.24]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.24%0Dv0.1.23
[1.0.23]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.23%0Dv0.1.21
[1.0.21]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.21%0Dv0.1.18
[1.0.18]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v1.0.18%0Dv0.1.13
[0.1.13]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.13%0Dv0.1.11
[0.1.11]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.11%0Dv0.1.10
[0.1.10]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.10%0Dv0.1.9
[0.1.9]: https://bitbucket.org/mnellemann/sysmon/branches/compare/v0.1.9%0Dv0.1.8
-->

202
LICENSE Normal file
View file

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,47 +1,3 @@
# System Monitor
# Repository moved
Java based system monitoring solution with support for plugins.
- Example dashboards are provided in the [doc/](doc) folder, which can be imported into your Grafana installation.
- Screenshots are available in the [downloads](https://bitbucket.org/mnellemann/sysmon/downloads/) section.
## Components
### Client
Runs on your hosts and collects metrics, which are sent to the central *server*.
[More information](client/README.md).
### Server
Receives aggregated metrics from clients and saves these into InfluxDB.
[More information](server/README.md).
### Plugins
Loaded by the client and provides extensions for doing the actual collecting of metrics.
[More information](plugins/README.md).
## Known problems
### Correct timezone and clock
- Ensure you have **correct timezone and date/time** and NTPd (or similar) running to keep it accurate!
### Naming collision
You can't have hosts with the same name, as these cannot be distinguished when metrics are
written to InfluxDB (which uses the hostname as key).
### Renaming hosts
If you rename a host, the metrics in InfluxDB will still be available by the old hostname, and new metrics will be written with the new hostname. There is no easy way to migrate the old data, but you can delete it easily:
```text
USE sysmon;
DELETE WHERE hostname = 'unknown';
```
Please visit [github.com/mnellemann/sysmon](https://github.com/mnellemann/sysmon)

View file

@ -1,4 +1,4 @@
image: openjdk:8
image: eclipse-temurin:8-jdk
pipelines:
branches:

View file

@ -13,13 +13,13 @@ subprojects {
apply plugin: 'groovy'
dependencies {
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation "org.spockframework:spock-core:${spockVersion}"
testImplementation "org.slf4j:slf4j-api:${slf4jVersion}"
testImplementation "org.slf4j:slf4j-simple:${slf4jVersion}"
implementation "org.slf4j:slf4j-api:${slf4jVersion}"
implementation "org.slf4j:slf4j-simple:${slf4jVersion}"
implementation "org.tomlj:tomlj:${tomljVersion}"
}
repositories {
@ -27,11 +27,14 @@ subprojects {
mavenCentral()
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
}
tasks.create("packages") {
tasks.register("packages") {
group "build"
dependsOn ":client:buildDeb"
@ -42,5 +45,5 @@ tasks.create("packages") {
dependsOn ":plugins:buildDeb"
dependsOn ":plugins:buildRpm"
dependsOn ":plugins:buildZip"
}

View file

@ -1,6 +1,12 @@
# Client
# Client / Agent
Client component.
This is the client/agent component of sysmon, which you install (together with sysmon-plugins) on your hosts.
## Installation
Download *.deb* or *.rpm* packages for sysmon-client *and* sysmon-plugins, and install.
See the [doc/systemd.md](doc/systemd.md) or [doc/sysv-init.md](doc/sysv-init.md) files for further instructions on running as a system service.
## Development

View file

@ -3,9 +3,9 @@ import org.redline_rpm.header.Os
plugins {
id 'application'
id "com.github.johnrengelman.shadow" version "7.0.0"
id "net.nemerosa.versioning" version "2.14.0"
id "nebula.ospackage" version "8.6.1"
id "net.nemerosa.versioning" version "2.15.1"
id "com.github.johnrengelman.shadow" version "7.1.2"
id "com.netflix.nebula.ospackage" version "11.3.0"
}
dependencies {
@ -17,12 +17,15 @@ dependencies {
annotationProcessor "info.picocli:picocli-codegen:${picocliVersion}"
implementation "info.picocli:picocli:${picocliVersion}"
implementation 'org.tomlj:tomlj:1.0.0'
implementation "org.tomlj:tomlj:${tomljVersion}"
runtimeOnly(group: 'com.github.oshi', name: 'oshi-core', version: oshiVersion) {
exclude(group: "org.slf4j")
}
//implementation "org.apache.groovy:groovy-all:${groovyVersion}" // From version 4.+
implementation "org.codehaus.groovy:groovy:${groovyVersion}"
implementation group: 'org.apache.camel', name: 'camel-core', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-main', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-http', version: camelVersion
@ -30,6 +33,7 @@ dependencies {
implementation group: 'org.apache.camel', name: 'camel-bean', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-timer', version: camelVersion
implementation group: 'org.apache.camel', name: 'camel-stream', version: camelVersion
}
def projectName = "sysmon-client"
@ -37,11 +41,13 @@ def projectName = "sysmon-client"
application {
// Define the main class for the application.
mainClass.set('sysmon.client.Application')
applicationDefaultJvmArgs = [ "-server", "-XX:+UseG1GC", "-Xmx32m" ]
applicationDefaultJvmArgs = [ "-Xms64m", "-Xmx64m", "-XX:+ExitOnOutOfMemoryError", "-XX:+AlwaysPreTouch" ]
}
run {
systemProperty 'pf4j.pluginsDir', '../plugins/output/'
systemProperty 'sysmon.pluginsDir', '../plugins/output/'
systemProperty 'sysmon.cfgFile', 'doc/sysmon-client.toml'
systemProperty 'sysmon.debug', '1'
}
tasks.named('test') {
@ -70,7 +76,6 @@ shadowJar {
mergeServiceFiles() // Tell plugin to merge duplicate service files
}
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'

View file

@ -1,21 +0,0 @@
# AIX Notes
Works on IBM Power VIO (Virtual IO) servers, as well as regular IBM Power AIX installations.
## Installation
We require Java 8, which should already be installed.
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
```shell
rpm -i --ignoreos sysmon-client.rpm sysmon-plugins.rpm
```
## Run automatically at boot
Change the *sysmon-server* URL for your environment.
```shell
mkitab 'sysmon:2:respawn:env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics >/tmp/sysmon.log 2>&1'
init q
```

37
client/doc/readme-aix.md Normal file
View file

@ -0,0 +1,37 @@
# AIX Notes
Works on IBM Power VIO (Virtual IO) servers, as well as regular IBM Power AIX installations.
## Installation
We require Java 8, which should already be installed on AIX, or is available to install.
The RPM packages are *"noarch"* Java bytecode, so we can use the **--ignoreos** option to install:
```shell
rpm -ivh --ignoreos sysmon-client-*.rpm sysmon-plugins-*.rpm
```
### Run automatically at boot
See the [sysv-init.md](sysv-init.md) file for instructions, or run from inittab:
```shell
mkitab "sysmon:2:respawn:env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s http://10.x.y.z:9925/metrics"
init q
```
## Upgrades
To upgrade the packages:
```shell
rpm -Uvh --ignoreos sysmon-*.noarch.rpm
```
To restart sysmon-client process after upgrade:
```shell
/etc/rc.d/init.d/sysmon-client stop; /etc/rc.d/init.d/sysmon-client start
# or, if running from inittab:
kill -HUP `ps -e -o pid,comm,args | grep sysmon-client | grep java | awk '{print $1}'`
```

View file

@ -0,0 +1,43 @@
# Instruction for RedHat / CentOS / AlmaLinux Systems
Please note that the software versions referenced in this document might have changed and might not be available/working unless updated.
More details are available in the [README.md](../README.md) file.
## Requirements
Java 8 (or later) runtime is required.
```shell
sudo dnf install java-11-openjdk-headless
```
Use *yum* if *dnf* is not available.
## Installation
[Download](https://git.data.coop/nellemann/-/packages/generic/sysmon/) the latest client and plugins rpm files and install:
```shell
rpm -ivh sysmon-client-*.noarch.rpm sysmon-plugins-*.noarch.rpm
cp /opt/sysmon/client/doc/sysmon-client.service /etc/systemd/system/
systemctl daemon-reload
```
Now edit the **/etc/systemd/system/sysmon-client.service** file and change the URL so that it points to *your* sysmon-server.
````
# Modify below line in /etc/systemd/system/sysmon-client.service
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
````
Now enable and start the sysmon-client service:
```shell
systemctl enable sysmon-client
systemctl start sysmon-client
```
Check logs for errors with: ```journalctl -u sysmon-client```

View file

@ -0,0 +1,19 @@
class ExampleScript implements MetricScript {
MetricResult getMetrics() {
Map<String,String> tags = new TreeMap<>();
Map<String,Object> fields = new TreeMap<>();
tags.put("type", "temp");
fields.put("sensor1", 23.2);
fields.put("sensor2", 25.8);
Measurement measurement = new Measurement(tags, fields);
return new MetricResult("script_sensors", measurement);
}
}

View file

@ -0,0 +1,3 @@
# Example Scripts
TODO.

View file

@ -1,9 +1,12 @@
[Unit]
Description=Sysmon Client Service
Description=Sysmon Client
[Service]
TimeoutStartSec=0
Restart=always
#User=nobody
#Group=nobody
TimeoutSec=20
Restart=on-failure
# BELOW: Specify sysmon-server URL, add '-n hostname' if needed
ExecStart=/opt/sysmon/client/bin/client -s http://10.20.30.40:9925/metrics
[Install]

106
client/doc/sysmon-client.sh Normal file
View file

@ -0,0 +1,106 @@
#!/bin/sh
### BEGIN INIT INFO
# Provides:
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start daemon at boot time
# Description: Enable service provided by daemon.
### END INIT INFO
dir="/opt/sysmon/client"
cmd="/opt/sysmon/client/bin/client"
args="-s http://10.20.30.40:9925/metrics" # <- HERE: Specify sysmon-server URL, add '-n hostname' if needed
user=""
name="sysmon-client"
pid_file="/var/run/$name.pid"
stdout_log="/var/log/$name.log"
stderr_log="/var/log/$name.err"
# Java 8+ runtime required - Uncomment and export JAVA_HOME if needed
#JAVA_HOME=/usr/java8_64
#JAVA_HOME=/opt/ibm-semeru-open-XX-jre
#JAVA_HOME=/opt/ibm-semeru-open-XX-jdk
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jre
#JAVA_HOME=/opt/ibm/ibm-semeru-certified-XX-jdk
#export JAVA_HOME
get_pid() {
cat "$pid_file"
}
is_running() {
[ -f "$pid_file" ] && ps -p $(get_pid) > /dev/null 2>&1
}
case "$1" in
start)
if is_running; then
echo "Already started"
else
echo "Starting $name"
cd "$dir" || exit 1
if [ -z "$user" ]; then
$cmd $args >> "$stdout_log" 2>> "$stderr_log" &
else
sudo -u "$user" $cmd $args >> "$stdout_log" 2>> "$stderr_log" &
fi
echo $! > "$pid_file"
if ! is_running; then
echo "Unable to start, see $stdout_log and $stderr_log"
exit 1
fi
fi
;;
stop)
if is_running; then
echo "Stopping $name.."
kill $(get_pid)
for i in 1 2 3 4 5 6 7 8 9 10
# for i in `seq 10`
do
if ! is_running; then
break
fi
sleep 1
done
echo
if is_running; then
echo "Not stopped; may still be shutting down or shutdown may have failed"
exit 1
else
echo "Stopped"
if [ -f "$pid_file" ]; then
rm "$pid_file"
fi
fi
else
echo "Not running"
fi
;;
restart)
$0 stop
if is_running; then
echo "Unable to stop, will not attempt to start"
exit 1
fi
$0 start
;;
status)
if is_running; then
echo "Running"
else
echo "Stopped"
exit 1
fi
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
;;
esac
exit 0

View file

@ -0,0 +1,32 @@
###
### Sysmon Client
###
### Example configuration with some default values.
###
# Local path for Groovy scripts
scripts = "/opt/sysmon/scripts"
[extension.base_info]
enabled = true
interval = '60m'
[extension.base_disk]
enabled = true
interval = '10s'
[extension.base_filesystem]
enabled = true
interval = '10s'
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
[extension.base_process]
enabled = true
interval = '5m'
include = [
"java", "node", "httpd", "mongod", "mysqld",
"postgres", "influxd", "haproxy", "beam.smp",
"filebeat", "corosync", "rsyslogd", "memcached",
"db2sysc", "dsmserv", "mmfsd",
]

View file

@ -1,4 +1,4 @@
# SystemD Notes
# Linux systemd notes
Edit the *sysmon-client.service* file and change the sysmon-server URL accordingly to your environment.
@ -9,4 +9,4 @@ cp sysmon-client.service /etc/systemd/system/
systemctl daemon-reload
systemctl enable sysmon-client
systemctl restart sysmon-client
```
```

18
client/doc/sysv-init.md Normal file
View file

@ -0,0 +1,18 @@
# SysV init Notes
- Copy the *sysmon-client.sh* into *sysmon-client* in the correct location for init scripts on your operating system.
- Edit the file and specify the sysmon-server URL in the *args* variable.
- Edit the file and uncomment *JAVA_HOME* if required
- SymLink to the required run-levels.
## AIX & VIO
```shell
# Remember to edit and set JAVA_HOME to eg. /usr/java8_64
cp sysmon-client.sh /etc/rc.d/init.d/sysmon-client
chmod +x /etc/rc.d/init.d/sysmon-client
ln -s /etc/rc.d/init.d/sysmon-client /etc/rc.d/rc2.d/Ssysmon-client
ln -s /etc/rc.d/init.d/sysmon-client /etc/rc.d/rc2.d/Ksysmon-client
```

View file

@ -4,21 +4,18 @@
package sysmon.client;
import org.apache.camel.main.Main;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.simple.SimpleLogger;
import picocli.CommandLine;
import java.io.IOException;
import java.net.InetAddress;
import java.io.File;
import java.net.URL;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.Callable;
@CommandLine.Command(name = "sysmon-client", mixinStandardHelpOptions = true)
public class Application implements Callable<Integer> {
private static final Logger log = LoggerFactory.getLogger(Application.class);
@CommandLine.Option(names = { "-s", "--server-url" }, description = "Server URL (default: ${DEFAULT-VALUE}).", defaultValue = "http://127.0.0.1:9925/metrics", paramLabel = "<url>")
private URL serverUrl;
@ -28,6 +25,15 @@ public class Application implements Callable<Integer> {
@CommandLine.Option(names = { "-p", "--plugin-dir" }, description = "Plugin jar path (default: ${DEFAULT-VALUE}).", paramLabel = "<path>", defaultValue = "/opt/sysmon/plugins")
private String pluginPath;
@CommandLine.Option(names = { "-c", "--conf" }, description = "Configuration file [default: '/etc/sysmon-client.toml'].", paramLabel = "<file>", defaultValue = "/etc/sysmon-client.toml")
private File configurationFile;
//@CommandLine.Option(names = { "-d", "--debug" }, description = "Enable debugging (default: ${DEFAULT_VALUE}).")
//private boolean enableDebug = false;
@CommandLine.Option(names = { "-d", "--debug" }, description = "Enable debugging (default: ${DEFAULT_VALUE}).")
private boolean[] enableDebug = new boolean[0];
public static void main(String... args) {
int exitCode = new CommandLine(new Application()).execute(args);
System.exit(exitCode);
@ -35,26 +41,60 @@ public class Application implements Callable<Integer> {
@Override
public Integer call() throws IOException {
public Integer call() {
String sysmonDebug = System.getProperty("sysmon.debug");
if(sysmonDebug != null) {
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "INFO");
}
switch (enableDebug.length) {
case 1:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "INFO");
break;
case 2:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG");
break;
case 3:
System.setProperty(SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "TRACE");
break;
}
String sysmonCfgFile = System.getProperty("sysmon.cfgFile");
if(sysmonCfgFile != null) {
configurationFile = new File(sysmonCfgFile);
}
String sysmonPluginsDir = System.getProperty("sysmon.pluginsDir");
if(sysmonPluginsDir != null) {
pluginPath = sysmonPluginsDir;
}
if(hostname == null || hostname.isEmpty()) {
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
System.err.println("Could not detect hostname. Use the '-n' or '--hostname' option to specify it.");
System.err.println("Could not detect hostname. Use the '-n' or '--hostname' option to specify.");
return -1;
}
}
String pf4jPluginsDir = System.getProperty("pf4j.pluginsDir");
if(pf4jPluginsDir != null) {
pluginPath = pf4jPluginsDir;
Configuration configuration = new Configuration();
if(configurationFile.exists()) {
try {
configuration.parse(configurationFile.toPath());
} catch (Exception e) {
System.err.println("Could not parse configuration file: " + e.getMessage());
return 1;
}
}
Main main = new Main();
main.bind("pluginPath", pluginPath);
main.bind("myServerUrl", serverUrl.toString());
main.bind("myHostname", hostname);
main.bind("configuration", configuration);
main.configure().addRoutesBuilder(ClientRouteBuilder.class);
// now keep the application running until the JVM is terminated (ctrl + c or sigterm)
@ -62,6 +102,7 @@ public class Application implements Callable<Integer> {
main.run();
} catch (Exception e) {
System.err.println(e.getMessage());
return 1;
}
return 0;

View file

@ -1,87 +1,181 @@
package sysmon.client;
import org.apache.camel.Exchange;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.AggregationStrategies;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.dataformat.JsonLibrary;
import org.apache.camel.component.jackson.JacksonDataFormat;
import org.apache.camel.spi.Registry;
import org.pf4j.JarPluginManager;
import org.pf4j.PluginManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.ComboResult;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import javax.script.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ClientRouteBuilder extends RouteBuilder {
private static final Logger log = LoggerFactory.getLogger(ClientRouteBuilder.class);
private final Set<String> scriptFiles = new HashSet<>();
@Override
public void configure() {
Registry registry = getContext().getRegistry();
Configuration configuration = (Configuration) registry.lookupByName("configuration");
Path[] pluginpaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginpaths);
Path[] pluginPaths = { Paths.get(registry.lookupByNameAndType("pluginPath", String.class)) };
PluginManager pluginManager = new JarPluginManager(pluginPaths);
pluginManager.loadPlugins();
pluginManager.startPlugins();
List<String> providers = new ArrayList<>();
List<MetricExtension> metricExtensions = pluginManager.getExtensions(MetricExtension.class);
for (MetricExtension ext : metricExtensions) {
final String name = ext.getName();
if(ext.isSupported()) {
String provides = ext.getProvides();
if(providers.contains(provides)) {
log.warn("Skipping extension (already provided): " + ext.getName());
continue;
}
log.info(">>> Enabling extension: " + ext.getDescription());
providers.add(provides);
// TODO: Make timer thread configurable
// Setup Camel route for this extension
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
//from("timer:"+provides+"?fixedRate=true&period=30s")
from("timer:extensions?fixedRate=true&period=30s")
.bean(ext, "getMetrics")
//.doTry()
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log("Skipping empty measurement.")
.stop()
.otherwise()
.to("seda:metrics?discardWhenFull=true");
} else {
log.info(">>> Skipping extension (not supported here): " + ext.getDescription());
// Load configuration if available
if(configuration.isForExtension(name)) {
log.info("Loading configuring for extension: " + ext.getDescription());
ext.setConfiguration(configuration.getForExtension(name));
}
if(ext.isSupported() && ext.isEnabled()) {
addExtensionRoute(ext);
} else {
log.info("Skipping extension (not supported or disabled): " + ext.getDescription());
}
}
// TODO: Make 'concurrentConsumers' configurable
from("seda:metrics?concurrentConsumers=1")
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
//.setHeader(Exchange.CONTENT_TYPE, constant("application/json"))
from("seda:metrics?purgeWhenStopping=true")
.routeId("aggregation")
.aggregate(constant(true), AggregationStrategies.beanAllowNull(ComboAppender.class, "append"))
.completionTimeout(5000L)
.doTry()
//.process(new MetricProcessor())
.marshal().json(JsonLibrary.Jackson, MetricResult.class)
.to((String)registry.lookupByName("myServerUrl"))
.to("seda:outbound?discardWhenFull=true")
.log("Aggregating ${body} before sending to server.")
.doCatch(Exception.class)
.log("Error: ${exception.message}")
//.log("Error sending metric to collector: ${body}")
.log(LoggingLevel.WARN, "Error: ${exception.message}.")
.end();
from("seda:outbound?purgeWhenStopping=true")
.routeId("outbound")
.setHeader(Exchange.HTTP_METHOD, constant("POST"))
.doTry()
.marshal(new JacksonDataFormat(ComboResult.class))
.to((String)registry.lookupByName("myServerUrl"))
.log("${body}")
.doCatch(Exception.class)
.log(LoggingLevel.WARN,"Error: ${exception.message}.")
.end();
// Find all local scripts
String scriptsPath = configuration.getScriptPath();
if(scriptsPath != null && Files.isDirectory(Paths.get(scriptsPath))) {
try {
scriptFiles.addAll(listFilesByExtension(scriptsPath, "groovy"));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// Enable the local scripts
for (String scriptFile : scriptFiles) {
try {
ScriptWrapper scriptWrapper = new ScriptWrapper(scriptsPath, scriptFile);
addScriptRoute(scriptWrapper);
} catch(Exception e) {
log.error("configure() - script error: {}", e.getMessage());
}
}
}
void addScriptRoute(ScriptWrapper script) {
Registry registry = getContext().getRegistry();
from("timer:scripts?fixedRate=true&period=30s")
.routeId(script.toString())
.bean(script, "run")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
void addExtensionRoute(MetricExtension ext) {
Registry registry = getContext().getRegistry();
// Setup Camel route for this extension
// a unique timer name gives the timer it's own thread, otherwise it's a shared thread for other timers with same name.
String timerName = ext.isThreaded() ? ext.getName() : "default";
String timerInterval = (ext.getInterval() != null) ? ext.getInterval() : "30s";
from("timer:" + timerName + "?fixedRate=true&period=" + timerInterval)
.routeId(ext.getName())
.bean(ext, "getMetrics")
.outputType(MetricResult.class)
.process(new MetricEnrichProcessor(registry))
.choice().when(exchangeProperty("skip").isEqualTo(true))
.log(LoggingLevel.WARN, "Skipping empty measurement.")
.stop()
.otherwise()
.log("${body}")
.to("seda:metrics?discardWhenFull=true");
}
List<String> findScripts(String location) {
log.info("Looking for scripts in: {}", location);
List<String> scripts = new ArrayList<>();
ScriptEngineManager manager = new ScriptEngineManager();
List<ScriptEngineFactory> factoryList = manager.getEngineFactories();
for (ScriptEngineFactory factory : factoryList) {
log.info("findScripts() - Supporting: {}", factory.getLanguageName());
for(String ex : factory.getExtensions()) {
log.info("findScripts() - Extension: {}", ex);
try {
scripts.addAll(listFilesByExtension(location, ex));
log.warn(scripts.toString());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
return scripts;
}
Set<String> listFilesByExtension(String dir, String ext) throws IOException {
try (Stream<Path> stream = Files.list(Paths.get(dir))) {
return stream
.filter(file -> !Files.isDirectory(file))
.map(Path::getFileName)
.map(Path::toString)
.filter(s -> s.endsWith(ext))
.collect(Collectors.toSet());
}
}
}

View file

@ -0,0 +1,18 @@
package sysmon.client;
import sysmon.shared.ComboResult;
import sysmon.shared.MetricResult;
public class ComboAppender {
public ComboResult append(ComboResult comboResult, MetricResult metricResult) {
if (comboResult == null) {
comboResult = new ComboResult();
}
comboResult.getMetricResults().add(metricResult);
return comboResult;
}
}

View file

@ -0,0 +1,78 @@
package sysmon.client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tomlj.Toml;
import org.tomlj.TomlParseResult;
import org.tomlj.TomlTable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public final class Configuration {
private final static Logger log = LoggerFactory.getLogger(Configuration.class);
private TomlParseResult result;
void parse(Path configurationFile) throws IOException {
log.info("Parsing configuration file: " + configurationFile);
result = Toml.parse(configurationFile);
result.errors().forEach(error -> log.error(error.toString()));
}
boolean isForExtension(String extName) {
if(result == null) {
return false;
}
String key = String.format("extension.%s", extName);
return result.contains(key);
}
Map<String, Object> getForExtension(String extName) {
if(result == null) {
log.debug("No configuration file loaded ...");
return null;
}
Map<String, Object> map = new HashMap<>();
String key = String.format("extension.%s", extName);
TomlTable table = result.getTableOrEmpty(key);
table.keySet().forEach( k -> {
if(table.isBoolean(k)) {
map.put(k, table.getBoolean(k));
} else if(table.isString(k)) {
map.put(k, table.getString(k));
} else if(table.isLong(k)) {
map.put(k, table.getLong(k));
} else if(table.isDouble(k)) {
map.put(k, table.getDouble(k));
} else if(table.isArray(k)) {
map.put(k, Objects.requireNonNull(table.getArray(k)).toList());
} else if(table.isTable(k)) {
map.put(k, table.getTable(k));
}
});
return map;
}
String getScriptPath() {
if(result == null) {
log.debug("No configuration file loaded ...");
return null;
}
return result.getString("scripts");
}
}

View file

@ -0,0 +1,14 @@
package sysmon.client;
import org.apache.camel.Exchange;
import org.apache.camel.processor.aggregate.AbstractListAggregationStrategy;
import sysmon.shared.MetricResult;
public class ListOfResultsStrategy extends AbstractListAggregationStrategy<MetricResult> {
@Override
public MetricResult getValue(Exchange exchange) {
return exchange.getIn().getBody(MetricResult.class);
}
}

View file

@ -32,4 +32,4 @@ public class MetricEnrichProcessor implements Processor {
exchange.getIn().setBody(metricResult);
}
}
}

View file

@ -0,0 +1,47 @@
package sysmon.client;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.MetricResult;
import sysmon.shared.MetricScript;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
public class ScriptWrapper {
private static final Logger log = LoggerFactory.getLogger(ScriptWrapper.class);
private final static GroovyClassLoader loader = new GroovyClassLoader();
private GroovyObject script;
private final String name;
public ScriptWrapper(String scriptPath, String scriptFile) {
name = scriptFile;
try {
Class<?> scriptClass = loader.parseClass(new File(scriptPath, scriptFile));
script = (GroovyObject) scriptClass.getDeclaredConstructor().newInstance();
} catch (IOException |InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
log.error("ScriptWrapper() - error: {}", e.getMessage());
}
}
MetricResult run() {
MetricResult result = null;
if (script != null && script instanceof MetricScript) {
result = (MetricResult) script.invokeMethod("getMetrics", null);
}
return result;
}
@Override
public String toString() {
return name;
}
}

View file

@ -15,30 +15,7 @@
## limitations under the License.
## ---------------------------------------------------------------------------
# to configure camel main
# here you can configure options on camel main (see MainConfigurationProperties class)
camel.main.name = sysmon-client
# enable tracing
#camel.main.tracing = true
# bean introspection to log reflection based configuration
#camel.main.beanIntrospectionExtendedStatistics=true
#camel.main.beanIntrospectionLoggingLevel=INFO
# run in lightweight mode to be tiny as possible
camel.main.jmxEnabled = false
camel.main.lightweight = true
# and eager load classes
#camel.main.eager-classloading = true
# use object pooling to reduce JVM garbage collection
#camel.main.exchange-factory = pooled
#camel.main.exchange-factory-statistics-enabled = true
# can be used to not start the route
# camel.main.auto-startup = false
# configure beans
#camel.beans.metricProcessor = #class:org.sysmon.client.MetricProcessor
#camel.dataformat.json-jackson.use-list = true
camel.component.seda.queue-size = 100

View file

@ -3,4 +3,6 @@ org.slf4j.simpleLogger.showDateTime=true
org.slf4j.simpleLogger.showShortLogName=true
org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS
org.slf4j.simpleLogger.levelInBrackets=true
org.slf4j.simpleLogger.defaultLogLevel=info
org.slf4j.simpleLogger.defaultLogLevel=warn
org.slf4j.simpleLogger.showThreadName=false
org.slf4j.simpleLogger.showLogName=false

File diff suppressed because it is too large Load diff

View file

@ -1,3 +0,0 @@
# Example Ansible Playbooks
For installing on AIX and RPM-based Linux.

View file

@ -1,63 +0,0 @@
---
#
# Example ansible playbook for installation of sysmon client on AIX.
# More information at: https://bitbucket.org/mnellemann/sysmon
#
# ansible-galaxy collection install community.general
# ansible-playbook -i aixhost, -u root sysmon-client-aix.yml
#
# NOTE: Ensure correct timezone and time
- name: "Install Sysmon Client and Plugins on AIX"
hosts: all
gather_facts: yes
vars:
server_url: http://sysmon-server:9925/metrics
tasks:
- name: Sysmon Client | Ensure hostname resolves
ansible.builtin.lineinfile:
path: /etc/hosts
state: present
line: "127.0.1.1 {{ ansible_hostname }}"
- name: Sysmon Client | Copy sysmon-client.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-client.rpm
with_fileglob:
- ../../client/build/distributions/sysmon-client-*.noarch.rpm
- name: Sysmon Client | Copy sysmon-plugins.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-plugins.rpm
with_fileglob:
- ../../plugins/build/distributions/sysmon-plugins-*.noarch.rpm
- name: Sysmon Client | Install sysmon-client.rpm
ansible.builtin.command: /usr/bin/rpm -i --ignoreos /opt/sysmon-client.rpm
args:
creates: /opt/sysmon/client
- name: Sysmon Client | Install sysmon-plugins.rpm
ansible.builtin.command: /usr/bin/rpm -i --ignoreos /opt/sysmon-plugins.rpm
args:
creates: /opt/sysmon/plugins
- name: Sysmon Client | Create inittab entry for sysmon-client
community.general.aix_inittab:
name: sysmon
runlevel: '2'
action: respawn
command: env JAVA_HOME=/usr/java8_64 /opt/sysmon/client/bin/client -s {{ server_url }} >/tmp/sysmon.log 2>&1
state: present
become: yes
notify:
- reload inittab
handlers:
- name: reload inittab
command: init q

View file

@ -1,61 +0,0 @@
---
#
# Example ansible playbook for installation of sysmon client on Linux (RPM based).
# More information at: https://bitbucket.org/mnellemann/sysmon
#
# ansible-playbook -i linuxhost, -u root sysmon-client-linux.yml
#
# NOTE: Ensure correct timezone and time
- name: "Install Sysmon Client and Plugins on Linux (RPM based)"
hosts: all
gather_facts: no
vars:
server_url: http://sysmon-server:9925/metrics
tasks:
- name: Sysmon Client | Copy sysmon-client.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-client.rpm
with_fileglob:
- ../../client/build/distributions/sysmon-client-*.noarch.rpm
- name: Sysmon Client | Copy sysmon-plugins.rpm
ansible.builtin.copy:
src: "{{ item }}"
dest: /opt/sysmon-plugins.rpm
with_fileglob:
- ../../plugins/build/distributions/sysmon-plugins-*.noarch.rpm
- name: Sysmon Client | Install OpenJDK (headless)
yum:
name: "java-11-openjdk-headless"
state: present
- name: Sysmon Client | Install sysmon-client.rpm
yum:
name: /opt/sysmon-client.rpm
state: present
disable_gpg_check: true
- name: Sysmon Plugins | Install sysmon-plugins.rpm
yum:
name: /opt/sysmon-plugins.rpm
state: present
disable_gpg_check: true
- name: Sysmon Client | Create service file
template: src=sysmon-client.service.j2 dest=/lib/systemd/system/sysmon-client.service mode=644
notify:
- reload systemctl
- name: Sysmon Client | Start service
service: name=sysmon-client.service state=started enabled=yes
handlers:
- name: reload systemctl
command: systemctl daemon-reload

View file

@ -1,10 +0,0 @@
[Unit]
Description=Sysmon Client Service
[Service]
TimeoutStartSec=3
Restart=always
ExecStart=/opt/sysmon/client/bin/client -s {{server_url}}
[Install]
WantedBy=default.target

View file

@ -1,33 +0,0 @@
---
#
# Example ansible playbook for timezone and NTP setup on AIX.
#
# ansible-playbook -i aixhost, -u root timezone-aix.yml
#
- name: "Timezone and NTP on AIX"
hosts: all
gather_facts: no
vars:
timezone: Europe/Copenhagen
ntp_server: dk.pool.ntp.org
tasks:
- name: Configure timezone
ansible.builtin.replace:
path: /etc/environment
regexp: '^TZ=(.*)$'
replace: "TZ={{ timezone }}"
- name: Update time from NTP server
ansible.builtin.command: "env TZ={{ timezone }} /usr/sbin/ntpdate {{ ntp_server }}"
- name: Create cron entry for updating time periodically
ansible.builtin.cron:
name: ntpdate
weekday: "*"
minute: "1"
hour: "*"
user: root
job: "/usr/sbin/ntpdate {{ ntp_server }}"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,738 @@
{
"__inputs": [
{
"name": "DS_SYSMON",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "9.1.6"
},
{
"type": "datasource",
"id": "influxdb",
"name": "InfluxDB",
"version": "1.0.0"
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from sysmon agent.",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"gridPos": {
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 28,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Load average as reported by OS.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 13,
"w": 12,
"x": 0,
"y": 3
},
"id": 2,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname - $col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "base_load",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"5min"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
},
{
"params": [
"5min"
],
"type": "alias"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "System Load Average",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Shows the number of physical processors consumed.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 13,
"w": 12,
"x": 12,
"y": 3
},
"id": 17,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"physc"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Physical Cores Consumed",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Percentage of the entitled capacity consumed.",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 14,
"w": 12,
"x": 0,
"y": 16
},
"id": 30,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"entc"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Entitled Capacity Consumed",
"type": "timeseries"
},
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "Percentage of logical processor(s) utilization",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"axisSoftMin": 0,
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 3600000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 14,
"w": 12,
"x": 12,
"y": 16
},
"id": 31,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"pluginVersion": "9.1.3",
"targets": [
{
"alias": "$tag_hostname",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"hostname"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "power_processor",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"lbusy"
],
"type": "field"
},
{
"params": [],
"type": "mean"
},
{
"params": [
10
],
"type": "moving_average"
}
]
],
"tags": [
{
"key": "hostname",
"operator": "=~",
"value": "/^$hostname$/"
}
]
}
],
"title": "Power - Shared Processors - Logical Processor Utilization",
"type": "timeseries"
}
],
"refresh": "30s",
"schemaVersion": 37,
"style": "dark",
"tags": [
"sysmon",
"Power"
],
"templating": {
"list": [
{
"current": {},
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"hide": 0,
"includeAll": true,
"label": "Host",
"multi": true,
"name": "hostname",
"options": [],
"query": "SHOW TAG VALUES FROM \"power_processor\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
"sort": 5,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-7d",
"to": "now-10s"
},
"timepicker": {
"nowDelay": "10s",
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "Sysmon - IBM Power",
"uid": "3zPCIbN4z",
"version": 7,
"weekStart": ""
}

View file

@ -1,26 +1,21 @@
{
"__inputs": [
{
"name": "DS_INFLUXDB-SYSMON",
"label": "InfluxDB-sysmon",
"name": "DS_SYSMON",
"label": "Database",
"description": "",
"type": "datasource",
"pluginId": "influxdb",
"pluginName": "InfluxDB"
}
],
"__elements": {},
"__requires": [
{
"type": "panel",
"id": "gauge",
"name": "Gauge",
"version": ""
},
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "8.0.6"
"version": "9.1.6"
},
{
"type": "datasource",
@ -40,6 +35,12 @@
"name": "Table",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
@ -51,40 +52,93 @@
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "Metrics from within host / guest / partition.",
"description": "https://git.data.coop/nellemann/sysmon/ - Metrics from within host / guest / partition.",
"editable": true,
"gnetId": null,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"iteration": 1631013505736,
"links": [],
"liveNow": false,
"panels": [
{
"collapsed": false,
"datasource": null,
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"gridPos": {
"h": 1,
"h": 3,
"w": 24,
"x": 0,
"y": 0
},
"id": 30,
"options": {
"content": "## Metrics are collected by an agent running inside of each LPAR / VM / Host.\n \n For more information visit: [git.data.coop/nellemann/sysmon](https://git.data.coop/nellemann/sysmon)\n ",
"mode": "markdown"
},
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"refId": "A"
}
],
"transparent": true,
"type": "text"
},
{
"collapsed": false,
"datasource": {
"type": "influxdb",
"uid": "5KYZifB7z"
},
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 3
},
"id": 4,
"panels": [],
"repeat": "hostname",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "5KYZifB7z"
},
"refId": "A"
}
],
"title": "${hostname}",
"type": "row"
},
{
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -93,7 +147,8 @@
},
"custom": {
"align": "left",
"displayMode": "auto"
"displayMode": "auto",
"inspect": false
},
"mappings": [],
"thresholds": {
@ -113,16 +168,27 @@
"h": 6,
"w": 12,
"x": 0,
"y": 1
"y": 4
},
"id": 16,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "8.0.6",
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -213,17 +279,18 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Details",
"type": "table"
},
{
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
@ -268,7 +335,7 @@
"h": 6,
"w": 7,
"x": 12,
"y": 1
"y": 4
},
"id": 19,
"options": {
@ -286,9 +353,13 @@
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.0.6",
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -298,7 +369,7 @@
},
{
"params": [
"linear"
"null"
],
"type": "fill"
}
@ -367,17 +438,18 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Memory Metrics",
"type": "stat"
},
{
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "continuous-GrYlRd"
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
@ -422,10 +494,14 @@
"h": 6,
"w": 5,
"x": 19,
"y": 1
"y": 4
},
"id": 26,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
@ -433,13 +509,16 @@
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true,
"text": {}
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.0.6",
"pluginVersion": "9.1.6",
"targets": [
{
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -449,7 +528,7 @@
},
{
"params": [
"previous"
"null"
],
"type": "fill"
}
@ -471,10 +550,6 @@
"params": [],
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
},
{
"params": [
"read"
@ -493,10 +568,6 @@
"params": [],
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
},
{
"params": [
"write"
@ -526,13 +597,14 @@
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "Disk Metrics",
"type": "gauge"
"type": "stat"
},
{
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -540,6 +612,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -585,23 +659,29 @@
"h": 11,
"w": 12,
"x": 0,
"y": 7
"y": 10
},
"id": 24,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "8.0.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -617,7 +697,7 @@
},
{
"params": [
"null"
"none"
],
"type": "fill"
}
@ -642,8 +722,10 @@
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -664,8 +746,10 @@
"type": "sum"
},
{
"params": [],
"type": "non_negative_difference"
"params": [
"$__interval"
],
"type": "non_negative_derivative"
},
{
"params": [
@ -700,7 +784,10 @@
"type": "timeseries"
},
{
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"description": "",
"fieldConfig": {
"defaults": {
@ -708,6 +795,8 @@
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
@ -760,23 +849,29 @@
"h": 11,
"w": 12,
"x": 12,
"y": 7
"y": 10
},
"id": 25,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi"
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "8.0.6",
"targets": [
{
"alias": "$col",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"groupBy": [
{
"params": [
@ -868,18 +963,20 @@
}
],
"refresh": "1m",
"schemaVersion": 30,
"schemaVersion": 37,
"style": "dark",
"tags": [],
"tags": [
"sysmon"
],
"templating": {
"list": [
{
"allValue": null,
"current": {},
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SHOW TAG VALUES FROM \"base_memory\" WITH KEY = \"hostname\" WHERE time > now() - 60m",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"label": "Host",
@ -890,19 +987,19 @@
"refresh": 2,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"sort": 5,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
},
{
"allValue": null,
"current": {},
"datasource": "${DS_INFLUXDB-SYSMON}",
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"name\" WHERE hostname =~ /$hostname/AND time > now() - 60m",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"label": "Process",
@ -913,33 +1010,33 @@
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"sort": 5,
"type": "query"
},
{
"allValue": "",
"current": {},
"datasource": "${DS_INFLUXDB-SYSMON}",
"definition": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"pid\" WHERE hostname =~ /$hostname/AND \"name\" =~ /$process/ AND time > now() - 60m",
"description": null,
"error": null,
"datasource": {
"type": "influxdb",
"uid": "${DS_SYSMON}"
},
"definition": "SELECT DISTINCT(\"pid\") FROM (SELECT * FROM \"base_process\" WHERE time > now() - 60m AND \"hostname\" =~ /$hostname/ AND \"name\" =~ /$process/)",
"hide": 0,
"includeAll": true,
"label": "PID",
"multi": false,
"name": "pid",
"options": [],
"query": "SHOW TAG VALUES FROM \"base_process\" WITH KEY = \"pid\" WHERE hostname =~ /$hostname/AND \"name\" =~ /$process/ AND time > now() - 60m",
"refresh": 1,
"query": "SELECT DISTINCT(\"pid\") FROM (SELECT * FROM \"base_process\" WHERE time > now() - 60m AND \"hostname\" =~ /$hostname/ AND \"name\" =~ /$process/)",
"refresh": 2,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"sort": 3,
"type": "query"
}
]
},
"time": {
"from": "now-3h",
"from": "now-6h",
"to": "now-30s"
},
"timepicker": {
@ -958,5 +1055,6 @@
"timezone": "",
"title": "Sysmon - Process Explorer",
"uid": "Vjut5mS7k",
"version": 11
}
"version": 5,
"weekStart": ""
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

View file

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 512.001 512.001" style="enable-background:new 0 0 512.001 512.001;" xml:space="preserve">
<polygon style="fill:#88ACB5;" points="306.111,381.102 306.111,435.57 256,457.357 205.889,435.57 205.889,381.102 "/>
<g>
<rect x="205.889" y="435.571" style="fill:#A7CBCF;" width="100.221" height="49.564"/>
<path style="fill:#A7CBCF;" d="M492.566,26.09h-32.681v336.471l43.945-21.787V37.354C503.83,31.166,498.764,26.09,492.566,26.09z"
/>
</g>
<path style="fill:#C7E7EB;" d="M459.885,26.09H19.434c-6.198,0-11.264,5.076-11.264,11.264v303.42l32.681,21.787h430.298V37.354
C471.149,31.166,466.083,26.09,459.885,26.09z"/>
<path style="fill:#367596;" d="M471.149,340.774l-11.264,66.473h32.681c6.198,0,11.264-5.066,11.264-11.264v-55.209H471.149z"/>
<path style="fill:#5195AF;" d="M8.17,340.774v55.209c0,6.198,5.066,11.264,11.264,11.264h440.451
c6.198,0,11.264-5.066,11.264-11.264v-55.209H8.17z"/>
<path style="fill:#FFFFFF;" d="M256,69.668c-84.662,0-158.638,45.698-198.646,113.764C97.361,251.498,171.338,297.197,256,297.197
s158.639-45.698,198.646-113.764C414.638,115.366,340.662,69.668,256,69.668z"/>
<path style="fill:#27467A;" d="M256,69.668v227.528c62.881,0,113.764-50.883,113.764-113.764S318.881,69.668,256,69.668z"/>
<path style="fill:#367596;" d="M256,69.667c-62.881,0-113.764,50.883-113.764,113.764S193.119,297.196,256,297.196
c44.817,0,81.083-50.883,81.083-113.764S300.817,69.667,256,69.667z"/>
<circle style="fill:#111449;" cx="256" cy="183.34" r="46.298"/>
<path d="M492.565,17.826H19.435C8.718,17.826,0,26.544,0,37.261v358.623c0,10.717,8.718,19.435,19.435,19.435H197.88v62.515h-77.06
c-4.512,0-8.17,3.657-8.17,8.17c0,4.513,3.658,8.17,8.17,8.17h270.36c4.513,0,8.17-3.657,8.17-8.17c0-4.513-3.657-8.17-8.17-8.17
h-77.059V415.32h178.445c10.717,0,19.435-8.718,19.435-19.435V37.261C512,26.544,503.282,17.826,492.565,17.826z M297.779,477.835
H214.22V415.32h83.559V477.835z M495.66,395.884c0,1.678-1.417,3.095-3.095,3.095H19.435c-1.678,0-3.095-1.417-3.095-3.095V37.261
c0-1.678,1.417-3.095,3.095-3.095h473.129c1.678,0,3.095,1.417,3.095,3.095V395.884z"/>
<path d="M470.036,332.504H41.965c-4.512,0-8.17,3.657-8.17,8.17c0,4.513,3.658,8.17,8.17,8.17h428.07c4.513,0,8.17-3.657,8.17-8.17
C478.206,336.161,474.548,332.504,470.036,332.504z"/>
<path d="M135.806,272.794C172.129,294.04,213.691,305.27,256,305.27c42.31,0,83.871-11.23,120.194-32.476
c35.241-20.612,64.804-50.115,85.496-85.318c1.502-2.557,1.502-5.725,0-8.281c-20.692-35.203-50.257-64.706-85.496-85.319
C339.871,72.63,298.31,61.4,256,61.4s-83.872,11.23-120.194,32.475c-35.241,20.613-64.805,50.116-85.496,85.319
c-1.502,2.557-1.502,5.725,0,8.281C71.001,222.679,100.566,252.182,135.806,272.794z M252.818,288.877
c-56.759-1.689-102.412-48.382-102.412-105.542c0-57.161,45.654-103.854,102.412-105.543c1.061-0.015,2.119-0.052,3.182-0.052
c1.063,0,2.121,0.037,3.182,0.052c56.758,1.689,102.412,48.382,102.412,105.543c0,57.16-45.654,103.852-102.412,105.542
c-1.061,0.015-2.119,0.052-3.182,0.052C254.937,288.93,253.879,288.893,252.818,288.877z M445.109,183.336
c-25.232,40.845-62.884,71.925-106.353,89.465c24.078-22.288,39.179-54.143,39.179-89.465s-15.102-67.177-39.18-89.466
C382.223,111.41,419.877,142.49,445.109,183.336z M173.246,93.87c-24.079,22.289-39.18,54.145-39.18,89.466
c0,35.32,15.101,67.175,39.18,89.465c-43.469-17.54-81.122-48.619-106.353-89.465C92.123,142.49,129.776,111.41,173.246,93.87z"/>
<path d="M310.379,183.335c0-7.773-1.621-15.299-4.818-22.371c-1.857-4.11-6.696-5.938-10.81-4.08
c-4.111,1.858-5.938,6.697-4.08,10.81c2.234,4.944,3.367,10.205,3.367,15.641c0,20.975-17.064,38.038-38.038,38.038
s-38.038-17.064-38.038-38.038s17.064-38.038,38.038-38.038c5.435,0,10.698,1.133,15.642,3.368c4.112,1.861,8.951,0.032,10.81-4.08
c1.858-4.111,0.032-8.951-4.08-10.81c-7.073-3.198-14.601-4.819-22.372-4.819c-29.985,0-54.379,24.395-54.379,54.379
s24.394,54.379,54.379,54.379C285.984,237.713,310.379,213.318,310.379,183.335z"/>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.2 KiB

BIN
doc/screenshots/sysmon1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 265 KiB

BIN
doc/screenshots/sysmon2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 128 KiB

52
doc/sysmon.drawio Normal file
View file

@ -0,0 +1,52 @@
<mxfile host="65bd71144e" scale="1" border="15">
<diagram id="JBJC25AnoTCSJF4dnfuA" name="Page-1">
<mxGraphModel dx="1761" dy="1167" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
<root>
<mxCell id="0"/>
<mxCell id="1" parent="0"/>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-1" value="Linux" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="250" y="50" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-2" value="AIX" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="180" y="140" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-6" style="edgeStyle=orthogonalEdgeStyle;rounded=1;orthogonalLoop=1;jettySize=auto;html=1;sketch=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-3" target="n2YpyNFSe_BwzrgFeqL7-4" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-3" value="Sysmon Server" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#e1d5e7;strokeColor=#9673a6;" parent="1" vertex="1">
<mxGeometry x="510" y="120" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-7" style="edgeStyle=orthogonalEdgeStyle;rounded=1;orthogonalLoop=1;jettySize=auto;html=1;sketch=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-4" target="n2YpyNFSe_BwzrgFeqL7-5" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-4" value="InfluxDB&lt;br&gt;ver 1.x" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=15;sketch=1;fillColor=#ffe6cc;strokeColor=#d79b00;" parent="1" vertex="1">
<mxGeometry x="690" y="110" width="60" height="80" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-5" value="Grafana&lt;br&gt;Dashboards &amp;amp; Alerting" style="shape=document;whiteSpace=wrap;html=1;boundedLbl=1;sketch=1;fillColor=#fff2cc;strokeColor=#d6b656;" parent="1" vertex="1">
<mxGeometry x="800" y="110" width="120" height="80" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-8" value="Other OS'es" style="rounded=1;whiteSpace=wrap;html=1;sketch=1;fillColor=#f5f5f5;fontColor=#333333;strokeColor=#666666;" parent="1" vertex="1">
<mxGeometry x="310" y="210" width="120" height="60" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-17" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-9" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-9" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="320" y="90" width="50" height="20" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-15" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-10" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-10" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="250" y="180" width="50" height="20" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-16" style="edgeStyle=orthogonalEdgeStyle;rounded=1;sketch=1;orthogonalLoop=1;jettySize=auto;html=1;" parent="1" source="n2YpyNFSe_BwzrgFeqL7-11" target="n2YpyNFSe_BwzrgFeqL7-3" edge="1">
<mxGeometry relative="1" as="geometry"/>
</mxCell>
<mxCell id="n2YpyNFSe_BwzrgFeqL7-11" value="Agent" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;sketch=1;" parent="1" vertex="1">
<mxGeometry x="380" y="250" width="50" height="20" as="geometry"/>
</mxCell>
</root>
</mxGraphModel>
</diagram>
</mxfile>

BIN
doc/sysmon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 138 KiB

View file

@ -1,6 +1,9 @@
version=0.0.9
pf4jVersion=3.6.0
slf4jVersion=1.7.32
camelVersion=3.11.1
picocliVersion=4.6.1
oshiVersion=5.8.2
version = 1.1.4
pf4jVersion = 3.9.0
slf4jVersion = 2.0.9
camelVersion = 3.14.9
groovyVersion = 3.0.18
picocliVersion = 4.7.5
oshiVersion = 6.4.7
spockVersion = 2.3-groovy-3.0
tomljVersion = 1.1.0

Binary file not shown.

View file

@ -1,5 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
networkTimeout=10000
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

285
gradlew vendored
View file

@ -1,7 +1,7 @@
#!/usr/bin/env sh
#!/bin/sh
#
# Copyright 2015 the original author or authors.
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -17,67 +17,101 @@
#
##############################################################################
##
## Gradle start up script for UN*X
##
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
MAX_FD=maximum
warn () {
echo "$*"
}
} >&2
die () {
echo
echo "$*"
echo
exit 1
}
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
@ -87,9 +121,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
@ -98,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
@ -106,80 +140,105 @@ location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

15
gradlew.bat vendored
View file

@ -14,7 +14,7 @@
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@ -25,7 +25,8 @@
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View file

@ -1,7 +1,6 @@
# System Monitor Plugins
Collection of standard sysmon plugins.
Collection of standard sysmon plugins for use with the client.
- [base](os-base/) - base OS metrics
- [aix](os-aix/) - AIX (and Power) specific metrics
- [linux](os-linux/) - Linux specific metrics
- [base](base/README.md) - Base OS metrics (uses [oshi](https://github.com/oshi/oshi))
- [power](power/README.md) - IBM Power specific metrics

85
plugins/base/README.md Normal file
View file

@ -0,0 +1,85 @@
# Base Plugin
The base plugins uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
## Processor Extension
Reports the following metrics seen:
- **system** -CPU time (in ms) spend on system processes.
- **user** - CPU time (in ms) spend on user processes.
- **nice** - CPU time (in ms) spend on user processes running at lower priority.
- **iowait** - CPU time (in ms) spend waiting (for i/o).
- **steal** - CPU time (in ms) stolen by hypervisor and given to other virtual systems.
- **irq** - CPU time (in ms) spend by kernel on interrupt requests.
- **softirq** - CPU (in ms) time spend by kernel on soft interrupt requests.
- **idle** - CPU time (in ms) spend idling (doing nothing).
- **busy** - CPU time (in ms) spend working.
## Memory Extension
Reports the following metrics (in bytes):
- **available** - Estimation of how much memory is available for starting new applications, without swapping.
- **total** - The total amount of (installed) memory.
- **usage** - Percentage of memory used out of the total amount of memory.
- **paged** - ...
- **virtual** - ...
## Disk Extension
Metrics reported are:
- **reads** - The total number of bytes read.
- **writes** - The total number of bytes written.
- **iotime** - Time spent on IO in milliseconds.
- **queue** - Length of disk IO queue.
## Filesystem Extension
### Metrics
- **free_bytes** - Free bytes for filesystem.
- **total_bytes** - Total bytes for filesystem.
- **free_inodes** - Free inodes for filesystem.
- **total_inodes** - Total inodes for filesystem.
### Configuration
```toml
[extension.base_filesystem]
enabled = true
interval = "10s"
exclude_type = [ "tmpfs", "ahafs" ]
exclude_mount = [ "/boot/efi" ]
```
## Process Extension
Reports metrics on one or more running processes.
- **mem_rss** - Resident set memory in bytes.
- **mem_vsz** - Virtual memory in bytes.
- **kernel_time** - Time spent (in milliseconds) in kernel space.
- **user_time** - Time used (in milliseconds) in user space.
- **read_bytes** - Bytes read by process.
- **write_bytes** - Bytes written by process.
- **files** - Files currently open by process.
- **threads** - Running threads.
- **user** - User running the process.
- **group** - Group running the process
- **prio** - Process priority.
### Configuration
The **include** option let's you specify what processes to report for.
```toml
[extension.base_process]
enabled = true # true or false
interval = "10s"
include = [ "java", "influxd", "grafana-server" ]
```

View file

@ -1,5 +1,5 @@
pluginId=sysmon-base
pluginClass=sysmon.plugins.os_base.BasePlugin
pluginClass=sysmon.plugins.base.BasePlugin
pluginDependencies=
pluginDescription=Base OS metrics where supported.

View file

@ -0,0 +1,120 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseDiskExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
// Extension details
private final String name = "base_disk";
private final String description = "Base Disk Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<HWDiskStore> diskStores;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
if(diskStores == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of disk stores");
diskStores = hardwareAbstractionLayer.getDiskStores();
refreshCounter = 0;
}
for(HWDiskStore store : diskStores) {
store.updateAttributes();
String name = store.getName();
if (name.matches("h?disk[0-9]+") ||
//name.matches("/dev/dm-[0-9]+") ||
name.matches("/dev/x?[sv]d[a-z]") ||
name.matches("/dev/nvme[0-9]n[0-9]") ||
name.startsWith("\\\\.\\PHYSICALDRIVE")
) {
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("read", store.getReadBytes());
put("write", store.getWriteBytes());
put("iotime", store.getTransferTime());
put("queue", store.getCurrentQueueLength());
}};
log.debug("getMetrics() - tags: {}, fields: {}", tagsMap, fieldsMap);
measurementList.add(new Measurement(tagsMap, fieldsMap));
} else {
log.debug("getMetrics() - skipping device: {}", name);
}
}
return new MetricResult(name, measurementList);
}
}

View file

@ -0,0 +1,148 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSFileStore;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseFilesystemExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
// Extension details
private final String name = "base_filesystem";
private final String description = "Base Filesystem Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private List<?> excludeType = new ArrayList<String>() {{
add("tmpfs");
add("ahafs");
}};
private List<?> excludeMount = new ArrayList<String>() {{
add("/boot/efi");
}};
private SystemInfo systemInfo;
private List<OSFileStore> fileStores;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
//hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("exclude_type")) {
excludeType = (List<?>) map.get("exclude_type");
}
if(map.containsKey("exclude_mount")) {
excludeMount = (List<?>) map.get("exclude_mount");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<String> alreadyProcessed = new ArrayList<>();
ArrayList<Measurement> measurementList = new ArrayList<>();
if(fileStores == null || refreshCounter++ > 360) {
fileStores = systemInfo.getOperatingSystem().getFileSystem().getFileStores(true);
}
for(OSFileStore store : fileStores) {
String name = store.getName();
String type = store.getType();
String mount = store.getMount();
if(excludeType.contains(type)) {
log.debug("Excluding type: " + type);
continue;
}
if(excludeMount.contains(mount)) {
log.debug("Excluding mount: " + mount);
continue;
}
if(alreadyProcessed.contains(name)) {
log.debug("Skipping name: " + name);
continue;
}
alreadyProcessed.add(name);
store.updateAttributes();
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", name);
put("type", type);
put("mount", mount);
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("free_bytes", store.getFreeSpace());
put("total_bytes", store.getTotalSpace());
put("free_inodes", store.getFreeInodes());
put("total_inodes", store.getTotalInodes());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
return new MetricResult(name, measurementList);
}
}

View file

@ -0,0 +1,90 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseInfoExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseInfoExtension.class);
// Extension details
private final String name = "base_info";
private final String description = "Base System Information";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "60m";
private HashMap<String, String> tags = new HashMap<>();
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() { return interval; }
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if (map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if (map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("os_manufacturer", systemInfo.getOperatingSystem().getManufacturer()); // GNU/Linux / IBM
put("os_family", systemInfo.getOperatingSystem().getFamily()); // Freedesktop.org / AIX
put("os_codename", systemInfo.getOperatingSystem().getVersionInfo().getCodeName()); // Flatpak runtime / ppc64
put("os_version", systemInfo.getOperatingSystem().getVersionInfo().getVersion()); // 21.08.4 / 7.2
put("os_build", systemInfo.getOperatingSystem().getVersionInfo().getBuildNumber()); // 5.13.0-7620-generic / 2045B_72V
put("boot_time", systemInfo.getOperatingSystem().getSystemBootTime());
}};
return new MetricResult(name, new Measurement(tags, fieldsMap));
}
}

View file

@ -0,0 +1,88 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseLoadExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseLoadExtension.class);
// Extension details
private final String name = "base_load";
private final String description = "Base Load Average Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
double[] loadAvg = hardwareAbstractionLayer.getProcessor().getSystemLoadAverage(3);
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("1min", loadAvg[0]);
put("5min", loadAvg[1]);
put("15min", loadAvg[2]);
}};
log.debug(fieldsMap.toString());
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View file

@ -1,24 +1,43 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseMemoryExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseMemoryExtension.class);
// Extension details
private final String name = "base_memory";
private final String description = "Base Memory Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
@ -27,24 +46,37 @@ public class BaseMemoryExtension implements MetricExtension {
@Override
public String getName() {
return "base_memory";
return name;
}
@Override
public String getProvides() {
return "memory";
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return "Base Memory Metrics";
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
long total = hardwareAbstractionLayer.getMemory().getTotal();
long available = hardwareAbstractionLayer.getMemory().getAvailable();
@ -57,7 +89,7 @@ public class BaseMemoryExtension implements MetricExtension {
fieldsMap.put("virtual", hardwareAbstractionLayer.getMemory().getVirtualMemory().getVirtualInUse());
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
}

View file

@ -0,0 +1,107 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetstatExtension.class);
// Extension details
private final String name = "base_netstat";
private final String description = "Base Netstat Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if (map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("ip_conn_total", systemInfo.getOperatingSystem().getInternetProtocolStats().getConnections().size());
put("tcp4_conn_active", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsActive());
put("tcp4_conn_passive", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsPassive());
put("tcp4_conn_established", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsEstablished());
put("tcp4_conn_failures", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionFailures());
put("tcp4_conn_reset", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv4Stats().getConnectionsReset());
put("tcp6_conn_active", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsActive());
put("tcp6_conn_passive", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsPassive());
put("tcp6_conn_established", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsEstablished());
put("tcp6_conn_failures", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionFailures());
put("tcp6_conn_reset", systemInfo.getOperatingSystem().getInternetProtocolStats().getTCPv6Stats().getConnectionsReset());
put("udp4_data_sent", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsSent());
put("udp4_data_recv", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsReceived());
put("udp4_data_recv_error", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv4Stats().getDatagramsReceivedErrors());
put("udp6_data_sent", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsSent());
put("udp6_data_recv", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsReceived());
put("udp6_data_recv_error", systemInfo.getOperatingSystem().getInternetProtocolStats().getUDPv6Stats().getDatagramsReceivedErrors());
}};
return new MetricResult(name, new Measurement(new TreeMap<>(), fieldsMap));
}
}

View file

@ -0,0 +1,109 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseNetworkExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetworkExtension.class);
// Extension details
private final String name = "base_network";
private final String description = "Base Network Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private List<NetworkIF> interfaces;
private int refreshCounter = 0;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
if(interfaces == null || refreshCounter++ > 360) {
log.debug("getMetrics() - refreshing list of network interfaces");
interfaces = hardwareAbstractionLayer.getNetworkIFs();
refreshCounter = 0;
}
for(NetworkIF netif : interfaces) {
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("name", netif.getName());
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("rx_pkts", netif.getPacketsRecv());
put("tx_pkts", netif.getPacketsSent());
put("rx_bytes", netif.getBytesRecv());
put("tx_bytes", netif.getBytesSent());
put("rx_errs", netif.getInErrors());
put("tx_errs", netif.getOutErrors());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
return new MetricResult(name, measurementList);
}
}

View file

@ -1,9 +1,8 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
@ -15,9 +14,6 @@ public class BasePlugin extends Plugin {
private static SystemInfo systemInfo;
private static HardwareAbstractionLayer hardwareAbstractionLayer;
public BasePlugin(PluginWrapper wrapper) {
super(wrapper);
}
public static HardwareAbstractionLayer getHardwareAbstractionLayer() {
@ -30,7 +26,8 @@ public class BasePlugin extends Plugin {
}
} catch (UnsupportedOperationException e) {
log.warn(e.getMessage());
log.warn("getHardwareAbstractionLayer() - {}", e.getMessage());
return null;
}
return hardwareAbstractionLayer;
@ -43,9 +40,10 @@ public class BasePlugin extends Plugin {
if(systemInfo == null) {
systemInfo = new SystemInfo();
}
systemInfo.getOperatingSystem();
} catch (UnsupportedOperationException e) {
log.warn(e.getMessage());
log.warn("getSystemInfo() - {}", e.getMessage());
return null;
}
return systemInfo;

View file

@ -0,0 +1,154 @@
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSProcess;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.*;
@Extension
public class BaseProcessExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// Extension details
private final String name = "base_process";
private final String description = "Base Process Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "60s";
private List<?> includeList = new ArrayList<Object>() {{
add("java");
add("node");
add("httpd");
add("mongod");
add("mysqld");
add("influxd");
add("haproxy");
add("beam.smp");
add("filebeat");
add("corosync");
add("rsyslogd");
add("postgres");
add("mariadbd");
add("memcached");
add("db2sysc");
add("dsmserv");
add("mmfsd");
add("systemd");
add("nginx");
}};
private final long minUptimeInSeconds = 600;
private SystemInfo systemInfo;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return name;
}
@Override
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if(map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
if(map.containsKey("include")) {
includeList = (List<?>) map.get("include");
}
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
List<OSProcess> processList = systemInfo.getOperatingSystem().getProcesses();
for(OSProcess p : processList) {
// Skip all the kernel processes
if(p.getResidentSetSize() < 1) {
continue;
}
// Skip short-lived processes
if(p.getUpTime() < (minUptimeInSeconds * 1000)) {
continue;
}
// Skip process names not found in our includeList, only if the list is not empty or null
if(includeList != null && !includeList.isEmpty() && !includeList.contains(p.getName())) {
continue;
}
log.debug("pid: " + p.getProcessID() + ", name: " + p.getName() + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize());
TreeMap<String, String> tagsMap = new TreeMap<String, String>() {{
put("pid", String.valueOf(p.getProcessID()));
put("name", p.getName());
}};
TreeMap<String, Object> fieldsMap = new TreeMap<String, Object>() {{
put("mem_rss", p.getResidentSetSize());
put("mem_vsz", p.getVirtualSize());
put("kernel_time", p.getKernelTime());
put("user_time", p.getUserTime());
put("read_bytes", p.getBytesRead());
put("write_bytes", p.getBytesWritten());
put("files", p.getOpenFiles());
put("threads", p.getThreadCount());
put("user", p.getUser());
put("group", p.getGroup());
put("prio", p.getPriority());
}};
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
//log.info("Size of measurements: " + measurementList.size());
return new MetricResult(name, measurementList);
}
}

View file

@ -1,32 +1,45 @@
package sysmon.plugins.os_base;
package sysmon.plugins.base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@Extension
public class BaseProcessorExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// Extension details
private final String name = "base_processor";
private final String description = "Base Processor Metrics";
// Configuration / Options
private boolean enabled = true;
private boolean threaded = false;
private String interval = "10s";
private HardwareAbstractionLayer hardwareAbstractionLayer;
private long[] oldTicks;
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public boolean isThreaded() {
return threaded;
}
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
@ -35,25 +48,37 @@ public class BaseProcessorExtension implements MetricExtension {
@Override
public String getName() {
return "base_processor";
return name;
}
@Override
public String getProvides() {
return "processor";
public String getInterval() {
return interval;
}
@Override
public String getDescription() {
return "Base Processor Metrics";
return description;
}
@Override
public void setConfiguration(Map<String, Object> map) {
if (map.containsKey("enabled")) {
enabled = (boolean) map.get("enabled");
}
if(map.containsKey("threaded")) {
threaded = (boolean) map.get("threaded");
}
if(map.containsKey("interval")) {
interval = (String) map.get("interval");
}
}
@Override
public MetricResult getMetrics() {
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
TreeMap<String, String> tagsMap = new TreeMap<>();
TreeMap<String, Object> fieldsMap = new TreeMap<>();
long[] ticks = hardwareAbstractionLayer.getProcessor().getSystemCpuLoadTicks();
if(oldTicks == null || oldTicks.length != ticks.length) {
@ -74,19 +99,19 @@ public class BaseProcessorExtension implements MetricExtension {
long nonBusy = idle + iowait;
long total = busy + nonBusy;
fieldsMap.put("system", ((float) system / (float) total) * 100);
fieldsMap.put("user", ((float) user / (float) total) * 100);
fieldsMap.put("nice", ((float) nice / (float) total) * 100);
fieldsMap.put("iowait", ((float) iowait / (float) total) * 100);
fieldsMap.put("steal", ((float) steal / (float) total) * 100);
fieldsMap.put("irq", ((float) irq / (float) total) * 100);
fieldsMap.put("softirq", ((float) softirq / (float) total) * 100);
fieldsMap.put("idle", ((float) idle / (float) total) * 100);
fieldsMap.put("busy", ((float) busy / (float) total) * 100);
fieldsMap.put("system", PluginHelper.round(((double) system / (double) total) * 100, 2));
fieldsMap.put("user", PluginHelper.round(((double) user / (double) total) * 100, 2));
fieldsMap.put("nice", PluginHelper.round(((double) nice / (double) total) * 100, 2));
fieldsMap.put("iowait", PluginHelper.round(((double) iowait / (double) total) * 100, 2));
fieldsMap.put("steal", PluginHelper.round(((double) steal / (double) total) * 100, 2));
fieldsMap.put("irq", PluginHelper.round(((double) irq / (double) total) * 100, 2));
fieldsMap.put("softirq", PluginHelper.round(((double) softirq / (double) total) * 100, 2));
fieldsMap.put("idle", PluginHelper.round(((double) idle / (double) total) * 100, 2));
fieldsMap.put("busy", PluginHelper.round(((double) busy / (double) total) * 100, 2));
oldTicks = ticks;
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
return new MetricResult(name, new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -1,7 +1,7 @@
import org.redline_rpm.header.Os
plugins {
id "nebula.ospackage" version "8.6.1"
id "com.netflix.nebula.ospackage" version "11.3.0"
}
@ -10,7 +10,7 @@ subprojects {
apply plugin: 'groovy'
dependencies {
testImplementation 'org.spockframework:spock-core:2.0-groovy-3.0'
testImplementation "org.spockframework:spock-core:${spockVersion}"
testImplementation "org.slf4j:slf4j-api:${slf4jVersion}"
testImplementation "org.slf4j:slf4j-simple:${slf4jVersion}"
testImplementation project(':shared')
@ -25,10 +25,10 @@ subprojects {
compileOnly(group: 'com.github.oshi', name: 'oshi-core', version: oshiVersion) {
exclude(group: "org.slf4j")
}
}
task uberJar(type: Jar) {
duplicatesStrategy DuplicatesStrategy.EXCLUDE
from sourceSets.main.output
dependsOn configurations.runtimeClasspath
from {
@ -48,7 +48,7 @@ subprojects {
attributes(
'Plugin-Id' : "${pluginId}",
'Plugin-Class' : "${pluginClass}",
'Plugin-Version' : "${version}",
'Plugin-Version' : "${archiveVersion}",
'Plugin-Provider' : "System Monitor",
'Plugin-Description': "${pluginDescription}"
)
@ -82,7 +82,6 @@ tasks.clean.dependsOn(tasks.customCleanUp)
def projectName = "sysmon-plugins"
apply plugin: 'nebula.ospackage'
ospackage {
packageName = projectName
release = '1'
@ -111,3 +110,12 @@ task buildRpmAix(type: Rpm) {
packageName = "${projectName}-AIX"
os = Os.AIX
}
task buildZip(type: Zip) {
subprojects.each {
dependsOn("${it.name}:copyJar")
}
from "output"
setArchivesBaseName(projectName as String)
setArchiveVersion(project.property("version") as String)
}

View file

@ -0,0 +1,39 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 56.250
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 35.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 37.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 0.681
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 37.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View file

@ -0,0 +1,39 @@
{
"k10temp-pci-00c3":{
"Adapter": "PCI adapter",
"Tctl":{
"temp1_input": 53.875
}
},
"nvme-pci-0400":{
"Adapter": "PCI adapter",
"Composite":{
"temp1_input": 36.850,
"temp1_max": 74.850,
"temp1_min": -20.150,
"temp1_crit": 79.850,
"temp1_alarm": 0.000
}
},
"iwlwifi_1-virtual-0":{
"Adapter": "Virtual device",
"temp1":{
"temp1_input": 41.000
}
},
"amdgpu-pci-0500":{
"Adapter": "PCI adapter",
"vddgfx":{
"in0_input": 1.281
},
"vddnb":{
"in1_input": 0.712
},
"edge":{
"temp1_input": 42.000
},
"PPT":{
"power1_average": 0.000
}
}
}

View file

@ -1,6 +0,0 @@
pluginId=sysmon-aix
pluginClass=sysmon.plugins.os_aix.AixPlugin
pluginVersion=0.0.1
pluginProvider=System Monitor
pluginDependencies=
pluginDescription=Collects AIX OS metrics.

View file

@ -1,75 +0,0 @@
package sysmon.plugins.os_aix;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
// Disabled
//@Extension
public class AixNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixNetstatExtension.class);
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("aix")) {
log.warn("Requires AIX.");
return false;
}
if(!PluginHelper.canExecute("netstat")) {
log.warn("Requires the 'netstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return "aix_network_netstat";
}
@Override
public String getProvides() {
return "network_netstat";
}
@Override
public String getDescription() {
return "AIX Netstat Metrics";
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
try (InputStream buf = PluginHelper.executeCommand("netstat -s -f inet")) {
AixNetstatParser parser = processCommandOutput(buf);
tagsMap = parser.getTags();
fieldsMap = parser.getFields();
}
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
protected AixNetstatParser processCommandOutput(InputStream input) throws IOException {
return new AixNetstatParser(input);
}
}

View file

@ -1,156 +0,0 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
public class AixNetstatParser {
private static final Logger log = LoggerFactory.getLogger(AixNetstatParser.class);
private long ipTotalPacketsReceived;
private long ipForwarded;
private long tcpConnectionsEstablished;
private long tcpPacketsReceved;
private long tcpPacketsSent;
private long udpPacketsReceived;
private long udpPacketsSent;
public AixNetstatParser(InputStream inputStream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while (reader.ready()) {
String line = reader.readLine();
log.debug("AixNetstatParser() - Line: " + line);
if(line.startsWith("tcp:")) {
parseTcp(reader);
}
if(line.startsWith("udp:")) {
parseUdp(reader);
}
if(line.startsWith("ip:")) {
parseIp(reader);
}
}
inputStream.close();
}
protected void parseIp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) total packets received")) {
ipTotalPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) packets forwarded")) {
ipForwarded = getFirstLong(line);
}
}
}
protected void parseTcp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) connections established \\(including accepts\\)")) {
tcpConnectionsEstablished = getFirstLong(line);
}
if(line.matches("(\\d+) packets received")) {
tcpPacketsReceved = getFirstLong(line);
}
if(line.matches("(\\d+) packets sent")) {
tcpPacketsSent = getFirstLong(line);
}
}
}
protected void parseUdp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) datagrams received")) {
udpPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) datagrams output")) {
udpPacketsSent = getFirstLong(line);
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("ip_forwarded", ipForwarded);
fields.put("ip_received", ipTotalPacketsReceived);
fields.put("tcp_connections", tcpConnectionsEstablished);
fields.put("tcp_pkts_recv", tcpPacketsReceved);
fields.put("tcp_pkts_sent", tcpPacketsSent);
fields.put("udp_pkts_recv", udpPacketsReceived);
fields.put("udp_pkts_sent", udpPacketsSent);
return fields;
}
private Long getFirstLong(String line) {
return Long.parseLong(line.substring(0, line.indexOf(" ")));
}
}

View file

@ -1,18 +0,0 @@
package sysmon.plugins.os_aix;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class AixPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(AixPlugin.class);
public AixPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View file

@ -1,75 +0,0 @@
package sysmon.plugins.os_aix;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class AixProcessorExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(AixProcessorExtension.class);
@Override
public boolean isSupported() {
String osArch = System.getProperty("os.arch").toLowerCase();
if(!osArch.startsWith("ppc64")) {
log.warn("Requires CPU Architecture ppc64 or ppc64le, this is: " + osArch);
return false;
}
if(!PluginHelper.canExecute("lparstat")) {
log.warn("Requires the 'lparstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return "aix_processor";
}
@Override
public String getProvides() {
return "processor_lpar";
}
@Override
public String getDescription() {
return "AIX Processor Metrics";
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
try (InputStream buf = PluginHelper.executeCommand("lparstat 5 1")) {
AixProcessorStat processorStat = processCommandOutput(buf);
tagsMap = processorStat.getTags();
fieldsMap = processorStat.getFields();
}
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
protected AixProcessorStat processCommandOutput(InputStream input) throws IOException {
return new AixProcessorStat(input);
}
}

View file

@ -1,25 +0,0 @@
import spock.lang.Specification
import sysmon.plugins.os_aix.AixNetstatParser
class AixNetstatTest extends Specification {
void "test netstat parsing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/netstat-aix.txt');
when:
AixNetstatParser parser = new AixNetstatParser(inputStream)
then:
parser.getFields().size() > 0
parser.getFields().get('ip_received') == 76229L
parser.getFields().get('ip_forwarded') == 24L
parser.getFields().get('tcp_connections') == 85L
parser.getFields().get('tcp_pkts_sent') == 31274L
parser.getFields().get('tcp_pkts_recv') == 39830L
parser.getFields().get('udp_pkts_sent') == 26332L
parser.getFields().get('udp_pkts_recv') == 34559L
}
}

View file

@ -1,65 +0,0 @@
import sysmon.plugins.os_aix.AixProcessorExtension
import sysmon.plugins.os_aix.AixProcessorStat
import spock.lang.Specification
class AixProcessorTest extends Specification {
void "test AIX lparstat shared output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-shared.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 83.7f
stats.getSys() == 3.3f
stats.getWait() == 0.0f
stats.getIdle() == 13.0f
stats.getFields().get("ent") == 0.50f
stats.getFields().get("type") == "Shared"
}
void "test AIX lparstat dedicated output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-aix-dedicated.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.1f
stats.getSys() == 0.2f
stats.getWait() == 0.0f
stats.getIdle() == 99.7f
stats.getFields().get("physc") == 0.07f
stats.getFields().get("type") == "Dedicated"
}
void "test Linux lparstat output processing"() {
setup:
InputStream inputStream = getClass().getResourceAsStream('/lparstat-linux.txt');
when:
AixProcessorExtension extension = new AixProcessorExtension()
AixProcessorStat stats = extension.processCommandOutput(inputStream)
then:
stats.getUser() == 0.03f
stats.getSys() == 0.0f
stats.getWait() == 0.0f
stats.getIdle() == 99.97f
stats.getFields().get("ent") == 4.00f
stats.getFields().get("mode") == "Uncapped"
stats.getFields().get("type") == "Shared"
}
}

View file

@ -1,157 +0,0 @@
icmp:
12 calls to icmp_error
0 errors not generated because old message was icmp
Output histogram:
destination unreachable: 12
0 messages with bad code fields
0 messages < minimum length
0 bad checksums
0 messages with bad length
Input histogram:
destination unreachable: 3
0 message responses generated
igmp:
0 messages received
0 messages received with too few bytes
0 messages received with bad checksum
0 membership queries received
0 membership queries received with invalid field(s)
0 membership reports received
0 membership reports received with invalid field(s)
0 membership reports received for groups to which we belong
2 membership reports sent
tcp:
31274 packets sent
27328 data packets (82928168 bytes)
86 data packets (108992 bytes) retransmitted
2938 ack-only packets (2698 delayed)
0 URG only packets
0 window probe packets
784 window update packets
138 control packets
3812 large sends
74913716 bytes sent using largesend
64069 bytes is the biggest largesend
39830 packets received
22701 acks (for 82928732 bytes)
112 duplicate acks
0 acks for unsent data
15579 packets (5876585 bytes) received in-sequence
62 completely duplicate packets (320 bytes)
57 old duplicate packets
0 packets with some dup. data (0 bytes duped)
75 out-of-order packets (6408 bytes)
0 packets (0 bytes) of data after window
0 window probes
1723 window update packets
0 packets received after close
0 packets with bad hardware assisted checksum
0 discarded for bad checksums
0 discarded for bad header offset fields
0 discarded because packet too short
1 discarded by listeners
0 discarded due to listener's queue full
3207 ack packet headers correctly predicted
15050 data packet headers correctly predicted
63 connection requests
23 connection accepts
85 connections established (including accepts)
114 connections closed (including 0 drops)
0 connections with ECN capability
0 times responded to ECN
0 embryonic connections dropped
20314 segments updated rtt (of 16791 attempts)
0 segments with congestion window reduced bit set
0 segments with congestion experienced bit set
0 resends due to path MTU discovery
2 path MTU discovery terminations due to retransmits
25 retransmit timeouts
0 connections dropped by rexmit timeout
4 fast retransmits
1 when congestion window less than 4 segments
28 newreno retransmits
4 times avoided false fast retransmits
0 persist timeouts
0 connections dropped due to persist timeout
0 keepalive timeouts
0 keepalive probes sent
0 connections dropped by keepalive
0 times SACK blocks array is extended
0 times SACK holes array is extended
0 packets dropped due to memory allocation failure
0 connections in timewait reused
0 delayed ACKs for SYN
0 delayed ACKs for FIN
0 send_and_disconnects
0 spliced connections
0 spliced connections closed
0 spliced connections reset
0 spliced connections timeout
0 spliced connections persist timeout
0 spliced connections keepalive timeout
0 TCP checksum offload disabled during retransmit
0 Connections dropped due to bad ACKs
0 Connections dropped due to duplicate SYN packets
0 fastpath loopback connections
0 fastpath loopback sent packets (0 bytes)
0 fastpath loopback received packets (0 bytes)
0 fake SYN segments dropped
0 fake RST segments dropped
0 data injection segments dropped
0 TCPTR maximum connections dropped
0 TCPTR connections dropped for no memory
0 TCPTR maximum per host connections dropped
0 connections dropped due to max assembly queue depth
udp:
34559 datagrams received
0 incomplete headers
0 bad data length fields
0 bad checksums
1849 dropped due to no socket
8218 broadcast/multicast datagrams dropped due to no socket
0 socket buffer overflows
24492 delivered
26332 datagrams output
ip:
76229 total packets received
0 bad header checksums
0 with size smaller than minimum
0 with data size < data length
0 with header length < data size
0 with data length < header length
0 with bad options
0 with incorrect version number
0 fragments received
0 fragments dropped (dup or out of space)
0 fragments dropped after timeout
0 packets reassembled ok
72552 packets for this host
3 packets for unknown/unsupported protocol
24 packets forwarded
0 packets not forwardable
0 redirects sent
55784 packets sent from this host
0 packets sent with fabricated ip header
0 output packets dropped due to no bufs, etc.
0 output packets discarded due to no route
0 output datagrams fragmented
0 fragments created
0 datagrams that can't be fragmented
0 IP Multicast packets dropped due to no receiver
0 successful path MTU discovery cycles
0 path MTU rediscovery cycles attempted
0 path MTU discovery no-response estimates
0 path MTU discovery response timeouts
0 path MTU discovery decreases detected
0 path MTU discovery packets sent
0 path MTU discovery memory allocation failures
0 ipintrq overflows
0 with illegal source
0 packets processed by threads
0 packets dropped by threads
0 packets dropped due to the full socket receive buffer
0 dead gateway detection packets sent
0 dead gateway detection packet allocation failures
0 dead gateway detection gateway allocation failures
0 incoming packets dropped due to MLS filters
0 packets not sent due to MLS filters

View file

@ -1,38 +0,0 @@
# Base Plugin
The base plugin uses the [oshi](https://github.com/oshi/oshi) library to get it's metrics.
## Processor Extension
Reports the following metrics seen:
- **system** -CPU time spend on system processes.
- **user** - CPU time spend on user processes.
- **nice** - CPU time spend on user processes running at lower priority.
- **iowait** - CPU time spend waiting (for i/o).
- **steal** - CPU time stolen by hypervisor and given to other virtual systems.
- **irq** - CPU time spend by kernel on interrupt requests.
- **softirq** - CPU time spend by kernel on soft interrupt requests.
- **idle** - CPU time spend idling (doing nothing).
- **busy** - CPU time spend working.
## Memory Extension
Reports the following metrics (in bytes):
- **available** - Estimation of how much memory is available for starting new applications, without swapping.
- **total** - The total amount of (installed) memory.
- **usage** - Percentage of memory used out of the total amount of memory.
- **paged** - ...
- **virtual** - ...
## Disk Extension
Metrics reported are:
- **reads** - The total number of bytes read.
- **writes** - The total number of bytes written.
- **iotime** - Time spent on IO in milliseconds.
- **queue** - Lenght of IO queue.

View file

@ -1,79 +0,0 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HardwareAbstractionLayer;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class BaseDiskExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseDiskExtension.class);
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return "base_disk";
}
@Override
public String getProvides() {
return "disk";
}
@Override
public String getDescription() {
return "Base Disk Metrics";
}
@Override
public MetricResult getMetrics() {
long writeBytes = 0L;
long readBytes = 0L;
long transferTime = 0L;
long queueLength = 0L;
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
List<HWDiskStore> diskStores = hardwareAbstractionLayer.getDiskStores();
for(HWDiskStore store : diskStores) {
String name = store.getName();
if (name.matches("hdisk[0-9]+") || name.matches("/dev/x?[sv]d[a-z]{1}") || name.matches("/dev/nvme[0-9]n[0-9]")) {
log.debug("Using device: " + name);
writeBytes += store.getWriteBytes();
readBytes += store.getReadBytes();
transferTime += store.getTransferTime();
queueLength = store.getCurrentQueueLength();
}
}
fieldsMap.put("reads", readBytes);
fieldsMap.put("writes", writeBytes);
fieldsMap.put("iotime", transferTime);
fieldsMap.put("queue", queueLength);
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -1,81 +0,0 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Extension
public class BaseNetworkExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseNetworkExtension.class);
private HardwareAbstractionLayer hardwareAbstractionLayer;
@Override
public boolean isSupported() {
hardwareAbstractionLayer = BasePlugin.getHardwareAbstractionLayer();
return hardwareAbstractionLayer != null;
}
@Override
public String getName() {
return "base_network";
}
@Override
public String getProvides() {
return "network";
}
@Override
public String getDescription() {
return "Base Network Metrics";
}
@Override
public MetricResult getMetrics() {
long rxBytes = 0L;
long rxPackets = 0L;
long rxErrs = 0L;
long txBytes = 0L;
long txPackets = 0L;
long txErrs = 0L;
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
List<NetworkIF> interfaces = hardwareAbstractionLayer.getNetworkIFs();
for(NetworkIF netif : interfaces) {
//String name = netif.getName();
//log.warn("Device: " + name);
rxPackets += netif.getPacketsRecv();
txPackets += netif.getPacketsSent();
rxBytes += netif.getBytesRecv();
txBytes += netif.getBytesSent();
rxErrs += netif.getInErrors();
txErrs += netif.getOutErrors();
}
fieldsMap.put("rxPackets", rxPackets);
fieldsMap.put("txPackets", txPackets);
fieldsMap.put("rxBytes", rxBytes);
fieldsMap.put("txBytes", txBytes);
fieldsMap.put("rxErrors", rxErrs);
fieldsMap.put("txErrors", txErrs);
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
}

View file

@ -1,103 +0,0 @@
package sysmon.plugins.os_base;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.software.os.OSProcess;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@Extension
public class BaseProcessExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(BaseProcessorExtension.class);
// TODO: configurable include-list and/or exclude-list of process names
private final List<String> includeList = new ArrayList<String>() {{
add("java");
add("nginx");
add("influxd");
add("dockerd");
add("containerd");
add("mysqld");
add("postgres");
add("grafana-server");
}};
private SystemInfo systemInfo;
@Override
public boolean isSupported() {
systemInfo = BasePlugin.getSystemInfo();
return systemInfo != null;
}
@Override
public String getName() {
return "base_process";
}
@Override
public String getProvides() {
return "process";
}
@Override
public String getDescription() {
return "Base Process Metrics";
}
@Override
public MetricResult getMetrics() {
ArrayList<Measurement> measurementList = new ArrayList<>();
List<OSProcess> processList = systemInfo.getOperatingSystem().getProcesses();
for(OSProcess p : processList) {
// Skip all the kernel processes
if(p.getResidentSetSize() < 1) {
continue;
}
String name = p.getName();
if(!includeList.contains(name)) {
continue;
}
log.debug("pid: " + p.getProcessID() + ", name: " + name + ", virt: " + p.getVirtualSize() + " rss: " + p.getResidentSetSize() + " cmd: " + p.getCommandLine());
HashMap<String, String> tagsMap = new HashMap<>();
HashMap<String, Object> fieldsMap = new HashMap<>();
tagsMap.put("pid", String.valueOf(p.getProcessID()));
tagsMap.put("name", name);
fieldsMap.put("mem_rss", p.getResidentSetSize());
fieldsMap.put("mem_vsz", p.getVirtualSize());
fieldsMap.put("kernel_time", p.getKernelTime());
fieldsMap.put("user_time", p.getUserTime());
fieldsMap.put("read_bytes", p.getBytesRead());
fieldsMap.put("write_bytes", p.getBytesWritten());
fieldsMap.put("files", p.getOpenFiles());
fieldsMap.put("threads", p.getThreadCount());
fieldsMap.put("user", p.getUser());
fieldsMap.put("group", p.getGroup());
fieldsMap.put("prio", p.getPriority());
measurementList.add(new Measurement(tagsMap, fieldsMap));
}
//log.info("Size of measurements: " + measurementList.size());
return new MetricResult(getName(), measurementList);
}
}

View file

@ -1,4 +0,0 @@
# IBM i Plugin
TODO. Nothing here yet.

View file

@ -1,7 +0,0 @@
plugins {
}
dependencies {
// https://mvnrepository.com/artifact/net.sf.jt400/jt400
implementation group: 'net.sf.jt400', name: 'jt400', version: '10.6'
}

View file

@ -1,4 +0,0 @@
pluginId=sysmon-ibmi
pluginClass=sysmon.plugins.os_ibmi.IbmIPlugin
pluginDependencies=
pluginDescription=Collects IBM-i OS metrics.

View file

@ -1,18 +0,0 @@
package sysmon.plugins.os_ibmi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class IbmIPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(IbmIPlugin.class);
public IbmIPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

View file

@ -1,94 +0,0 @@
package sysmon.plugins.os_ibmi;
import com.ibm.as400.access.*;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import java.io.IOException;
// Disable for now...
//@Extension
public class TestExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(TestExtension.class);
private AS400 as400;
private SystemStatus systemStatus;
@Override
public boolean isSupported() {
String osArch = System.getProperty("os.arch").toLowerCase();
String osName = System.getProperty("os.name").toLowerCase();
System.err.println("OS Arch: " + osArch);
System.err.println("OS Name: " + osName);
try {
//as400 = new AS400("localhost", "CURRENT");
as400 = new AS400("10.32.64.142");
systemStatus = new SystemStatus(as400);
} catch (Exception exception) {
log.error(exception.getMessage());
}
if(as400.isLocal()) {
log.info("as400 isLocal() true");
} else {
log.info("as400 isLocal() FALSE");
}
return true;
}
@Override
public String getName() {
return "ibmi-test";
}
@Override
public String getProvides() {
return "test";
}
@Override
public String getDescription() {
return "IBM i Test Extension";
}
@Override
public MetricResult getMetrics() {
if(systemStatus == null) {
log.warn("getMetrics() - no system or status");
return null;
}
try {
int jobsInSystem = systemStatus.getJobsInSystem();
log.info("Jobs In System: " + jobsInSystem);
int batchJobsRunning = systemStatus.getBatchJobsRunning();
log.info("Batch Jobs Running: " + batchJobsRunning);
int activeThreads = systemStatus.getActiveThreadsInSystem();
log.info("Active Threads: " + activeThreads);
int activeJobs = systemStatus.getActiveJobsInSystem();
log.info("Active Jobs: " + activeJobs);
int onlineUsers = systemStatus.getUsersCurrentSignedOn();
log.info("Online Users: " + onlineUsers);
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException e) {
log.error(e.getMessage());
e.printStackTrace();
}
return null;
}
}

View file

@ -1,7 +0,0 @@
# Linux Plugins
## Components
### Network Sockets
Collects statistics from */proc/net/sockstats*.

View file

@ -1,2 +0,0 @@
plugins {
}

View file

@ -1,5 +0,0 @@
pluginId=sysmon-linux
pluginClass=sysmon.plugins.os_linux.LinuxPlugin
pluginDependencies=
pluginDescription=Linux OS Metrics.

View file

@ -1,75 +0,0 @@
package sysmon.plugins.os_linux;
import org.pf4j.Extension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sysmon.shared.Measurement;
import sysmon.shared.MetricExtension;
import sysmon.shared.MetricResult;
import sysmon.shared.PluginHelper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
// Disabled
//@Extension
public class LinuxNetstatExtension implements MetricExtension {
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatExtension.class);
@Override
public boolean isSupported() {
if(!System.getProperty("os.name").toLowerCase().contains("linux")) {
log.warn("Requires Linux.");
return false;
}
if(!PluginHelper.canExecute("netstat")) {
log.warn("Requires the 'netstat' command.");
return false;
}
return true;
}
@Override
public String getName() {
return "linux_network_netstat";
}
@Override
public String getProvides() {
return "network_netstat";
}
@Override
public String getDescription() {
return "Linux Netstat Metrics";
}
@Override
public MetricResult getMetrics() throws Exception {
HashMap<String, String> tagsMap = null;
HashMap<String, Object> fieldsMap = null;
try (InputStream inputStream = PluginHelper.executeCommand("netstat -s")) {
LinuxNetstatParser parser = processCommandOutput(inputStream);
tagsMap = parser.getTags();
fieldsMap = parser.getFields();
}
log.debug(fieldsMap.toString());
return new MetricResult(getName(), new Measurement(tagsMap, fieldsMap));
}
protected LinuxNetstatParser processCommandOutput(InputStream input) throws IOException {
return new LinuxNetstatParser(input);
}
}

View file

@ -1,168 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
public class LinuxNetstatParser {
private static final Logger log = LoggerFactory.getLogger(LinuxNetstatParser.class);
private long ipTotalPacketsReceived;
private long ipForwarded;
private long ipIncomingPacketsDiscarded;
private long ipOutgoingPacketsDropped;
private long tcpConnectionsEstablished;
private long tcpSegmentsReceived;
private long tcpSegmentsSent;
private long udpPacketsReceived;
private long udpPacketsSent;
public LinuxNetstatParser(InputStream inputStream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
while (reader.ready()) {
String line = reader.readLine();
log.debug("LinuxNetstatParser() - Line: " + line);
if(line.startsWith("Ip:")) {
parseIp(reader);
}
if(line.startsWith("Tcp:")) {
parseTcp(reader);
}
if(line.startsWith("Udp:")) {
parseUdp(reader);
}
}
inputStream.close();
}
protected void parseIp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) total packets received")) {
ipTotalPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) forwarded")) {
ipForwarded = getFirstLong(line);
}
if(line.matches("(\\d+) incoming packets discarded")) {
ipIncomingPacketsDiscarded = getFirstLong(line);
}
if(line.matches("(\\d+) outgoing packets dropped")) {
ipOutgoingPacketsDropped = getFirstLong(line);
}
}
}
protected void parseTcp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) connections established")) {
tcpConnectionsEstablished = getFirstLong(line);
}
if(line.matches("(\\d+) segments received")) {
tcpSegmentsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) segments sent out")) {
tcpSegmentsSent = getFirstLong(line);
}
}
}
protected void parseUdp(BufferedReader reader) throws IOException {
while (reader.ready()) {
reader.mark(64);
String line = reader.readLine();
if(!line.startsWith(" ")) {
reader.reset();
return;
}
line = line.trim();
if(line.matches("(\\d+) packets received")) {
udpPacketsReceived = getFirstLong(line);
}
if(line.matches("(\\d+) packets sent")) {
udpPacketsSent = getFirstLong(line);
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("ip_forwarded", ipForwarded);
fields.put("ip_received", ipTotalPacketsReceived);
fields.put("ip_dropped", ipOutgoingPacketsDropped);
fields.put("ip_discarded", ipIncomingPacketsDiscarded);
fields.put("tcp_connections", tcpConnectionsEstablished);
fields.put("tcp_pkts_recv", tcpSegmentsReceived);
fields.put("tcp_pkts_sent", tcpSegmentsSent);
fields.put("udp_pkts_recv", udpPacketsReceived);
fields.put("udp_pkts_sent", udpPacketsSent);
return fields;
}
private Long getFirstLong(String line) {
return Long.parseLong(line.substring(0, line.indexOf(" ")));
}
}

View file

@ -1,97 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LinuxNetworkSockStat {
private static final Logger log = LoggerFactory.getLogger(LinuxNetworkSockStat.class);
private static final Pattern pattern1 = Pattern.compile("^sockets: used (\\d+)");
private static final Pattern pattern2 = Pattern.compile("^TCP: inuse (\\d+) orphan (\\d+) tw (\\d+) alloc (\\d+) mem (\\d+)");
private static final Pattern pattern3 = Pattern.compile("^UDP: inuse (\\d+) mem (\\d+)");
private long sockets;
private long tcp_inuse;
private long tcp_orphan;
private long tcp_tw;
private long tcp_alloc;
private long tcp_mem;
private long udp_inuse;
private long udp_mem;
/*
sockets: used 1238
TCP: inuse 52 orphan 0 tw 18 alloc 55 mem 7
UDP: inuse 11 mem 10
UDPLITE: inuse 0
RAW: inuse 0
FRAG: inuse 0 memory 0
*/
LinuxNetworkSockStat(List<String> lines) {
Matcher matcher;
for(String line : lines) {
String proto = line.substring(0, line.indexOf(':'));
switch (proto) {
case "sockets":
matcher = pattern1.matcher(line);
if (matcher.matches() && matcher.groupCount() == 1) {
sockets = Long.parseLong(matcher.group(1));
}
break;
case "TCP":
matcher = pattern2.matcher(line);
if (matcher.matches() && matcher.groupCount() == 5) {
tcp_inuse = Long.parseLong(matcher.group(1));
tcp_orphan = Long.parseLong(matcher.group(2));
tcp_tw = Long.parseLong(matcher.group(3));
tcp_alloc = Long.parseLong(matcher.group(4));
tcp_mem = Long.parseLong(matcher.group(5));
}
break;
case "UDP":
matcher = pattern3.matcher(line);
if (matcher.matches() && matcher.groupCount() == 2) {
udp_inuse = Long.parseLong(matcher.group(1));
udp_mem = Long.parseLong(matcher.group(2));
}
break;
}
}
}
public HashMap<String, String> getTags() {
return new HashMap<>();
}
public HashMap<String, Object> getFields() {
HashMap<String, Object> fields = new HashMap<>();
fields.put("sockets", sockets);
fields.put("tcp_inuse", tcp_inuse);
fields.put("tcp_alloc", tcp_alloc);
fields.put("tcp_orphan", tcp_orphan);
fields.put("tcp_mem", tcp_mem);
fields.put("tcp_tw", tcp_tw);
fields.put("udp_inuse", udp_inuse);
fields.put("udp_mem", udp_mem);
return fields;
}
}

View file

@ -1,17 +0,0 @@
package sysmon.plugins.os_linux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.pf4j.Plugin;
import org.pf4j.PluginWrapper;
public class LinuxPlugin extends Plugin {
private static final Logger log = LoggerFactory.getLogger(LinuxPlugin.class);
public LinuxPlugin(PluginWrapper wrapper) {
super(wrapper);
}
}

Some files were not shown because too many files have changed in this diff Show more