Compare commits

...

195 Commits

Author SHA1 Message Date
renovate[bot]
dc24e35786
chore(deps): update dependency eslint-plugin-vue to v9.27.0 2024-07-02 08:00:27 +00:00
Bharath Vignesh J K
6af2340c5c chg: plugin(percpu) - show times based on host OS 2024-07-01 10:27:33 +05:30
RazCrimson
46abd4b4c9
Merge pull request #2860 from nicolargo/2859-glances-411-on-windows-attributeerror-cpupercent-object-has-no-attribute-cpu_percent
fix: cpu_percent - initialization timer bugs
2024-07-01 09:33:57 +05:30
Bharath Vignesh J K
2801add7e2 fix: cpu_percent - initialization timer bugs
fixes #2859
2024-07-01 09:33:05 +05:30
nicolargo
9b853d857b Merge branch 'develop' of github.com:nicolargo/glances into develop 2024-06-30 10:18:29 +02:00
Bharath Vignesh J K
99e65079a0 chg: plugin(docker) - add some typing 2024-06-30 03:52:09 +05:30
Bharath Vignesh J K
0e1d462c69 chg: exports (influxdb) - log warning message if export fails 2024-06-30 01:33:51 +05:30
nicolargo
f4c7ca01fe Correct codespell 2024-06-29 19:19:26 +02:00
nicolargo
60591630df Improve lisibility of OK_LOG 2024-06-29 19:01:25 +02:00
nicolargo
48103f167e Only test latest Python version on MacOS 2024-06-29 16:43:47 +02:00
nicolargo
154ae8e610 On the road (again) to Glances 4.2.0 2024-06-29 16:36:22 +02:00
nicolargo
86c2cd1d4a Glances 4.1.1 2024-06-29 16:28:50 +02:00
nicolargo
452fd6497e Sensors data is not exported using InfluxDB2 exporter #2856 2024-06-29 16:03:27 +02:00
nicolargo
ccfd8f0aa9 On the road of Glances 4.2.0 2024-06-29 10:01:07 +02:00
nicolargo
d91bfa2f8b Update docs 2024-06-29 09:55:33 +02:00
nicolargo
582ca07716 Glances 4.1.0 2024-06-29 09:49:36 +02:00
Nicolas Hennion
036eb976e9
Merge pull request #2854 from CognitiveDisson/patch-1
Remove duplicate line from the README file
2024-06-27 06:37:58 +02:00
Vadim Smal
2c5a1150bd
Remove duplicate line from the README file 2024-06-26 23:35:37 +01:00
nicolargo
f6066e5d46 PsUtil 6+ no longer check PID reused #2755 2024-06-26 18:56:31 +02:00
nicolargo
cff2e9fc4a Call process_iter.clear_cache() (PsUtil 6+) when Glances user force a refresh (F5 or CTRL-R) #2753 2024-06-26 18:36:10 +02:00
nicolargo
e5d5351d31 Add cpu model for CPU info (Raspberry PI 5) 2024-06-26 18:12:56 +02:00
nicolargo
be89ee0025 Merge remote-tracking branch 'origin/2616-raspberry-pi-cpu-info-is-not-correct' into develop 2024-06-26 18:10:00 +02:00
nicolargo
f4cd350221 Update Security messag 2024-06-23 10:50:35 +02:00
nicolargo
3928007169 Merge branch 'issue2849' into develop 2024-06-23 10:23:10 +02:00
nicolargo
eebd769c46 perCPU and CPU consumption display time to time a total of 100% #2849 2024-06-23 10:22:37 +02:00
nicolargo
2ee3c86e2b Graph export is broken if there is no graph section in Glances configuration file #2839 2024-06-22 10:37:00 +02:00
nicolargo
f5ecbf7a1f Merge branch 'clean-fs-plugin-code' into develop 2024-06-22 10:23:55 +02:00
nicolargo
267e7e2ff0 Clean FS Plugin code 2024-06-22 10:23:37 +02:00
Bharath Vignesh J K
1e2e36af23 chg: cpu_percent - support CPU Name for Raspberry Pi
fixes #2616
2024-06-22 02:32:20 +05:30
Continuous Integration
c153e71aa3 Continuous Integration Build Artifacts 2024-06-18 00:13:48 +00:00
RazCrimson
f9915d93e8
Merge pull request #2842 from nicolargo/2841-glances-api-status-check-returns-error-405-method-not-allowed
fix: RESTful API - HEAD request not supported on `{version}/status` as mentioned in docs
2024-06-18 05:38:14 +05:30
Bharath Vignesh J K
69c3a948ba fix: RESTful API - HEAD request not supported on {version}/status as mentioned in docs
fixes #2841
2024-06-18 05:36:18 +05:30
Nicolas Hennion
8c520e1d94
Update network.rst
Correct alias example
2024-06-17 13:40:11 +02:00
Nicolas Hennion
1edaed52de
Update doc-requirements.txt
Add defusedxml
2024-06-17 13:36:17 +02:00
Nicolas Hennion
752bd40a04
Update doc-requirements.txt
Add PsUutl as a requirement to generate doc.
https://readthedocs.org/projects/glances/builds/24710902/
2024-06-17 13:34:13 +02:00
nicolargo
fce3aac34e Add .redthedocs.yaml configuration file 2024-06-16 22:46:36 +02:00
nicolargo
9c482196f3 Add .redthedocs.yaml configuration file 2024-06-16 22:43:55 +02:00
nicolargo
3beb08b66e Merge branch 'issue977' into develop 2024-06-16 17:06:11 +02:00
nicolargo
0874e13f1d Enhance Glances browser color #977 2024-06-16 17:05:53 +02:00
Bharath Vignesh J K
8b4ef8c235 fix: containers (podman) - handle dead infra container with active children 2024-06-16 06:25:46 +05:30
RazCrimson
993ef0107a
Merge pull request #2834 from nicolargo/2827-dont-display-rootless-podman-containers-keyerror
fix: containers (podman) - missing support for rootless mode
2024-06-16 06:11:45 +05:30
Bharath Vignesh J K
0bdda36143 fix: containers (podman) - missing support for rootless mode 2024-06-16 06:04:13 +05:30
nicolargo
7cffdcbff7 Add needs contributor workflow to Github actions 2024-06-15 09:44:05 +02:00
nicolargo
986c865718 Add inactive issues workflow to Github actions 2024-06-15 09:40:14 +02:00
nicolargo
0227d221da [snap] PermissionError: [Errno 13] Permission denied: '/proc/net/wireless' #2829 2024-06-15 08:42:32 +02:00
Continuous Integration
3f39522487 Continuous Integration Build Artifacts 2024-06-15 00:26:00 +00:00
RazCrimson
78b4518547
Merge pull request #2832 from nicolargo/2831-j-hot-key-crashs-glances
fix: plugin (processlist) - failure on programs view
2024-06-15 05:49:58 +05:30
Bharath Vignesh J K
ea4ce83d83 fix: plugin (processlist) - failure on programs view
#2831
2024-06-15 05:42:12 +05:30
nicolargo
051006e12f test test_107_fs_plugin_method fails on aarch64-linux #2819 2024-06-09 17:40:28 +02:00
nicolargo
4862264a2a Update bug report template 2024-06-09 17:27:51 +02:00
nicolargo
0791c7e9da --export-process-filter Filter using complete command #2824 2024-06-09 15:17:01 +02:00
nicolargo
12dafcf856 Exception when Glances is ran with limited plugin list #2822 2024-06-09 10:30:34 +02:00
nicolargo
4b071403da Disable separator option do not work... #2823 2024-06-09 10:08:16 +02:00
nicolargo
a6b7de8a4b API: Network module is disabled but appears in endpoint all (Glances v4 regression) #2815 2024-06-08 19:30:09 +02:00
nicolargo
e371c8f39d API is not compatible with requests containing spcial/encoding char #2820 2024-06-08 18:52:30 +02:00
nicolargo
810b0ec488 Remove some breaking test 2024-06-08 18:02:53 +02:00
nicolargo
f224cab225 Impoprove unittest 2024-06-08 17:50:42 +02:00
nicolargo
9b0d0a7ff0 Get back to Glances 4.1.0 2024-06-08 10:29:57 +02:00
nicolargo
8d3b3691e5 Add release note for Glances 4.0.8 2024-06-08 10:19:49 +02:00
nicolargo
13e2f73738 Merge branch 'secureamps' into develop 2024-06-01 18:33:11 +02:00
nicolargo
6077bb4a55 [BREAKING CHANGE in conf file] Secure call to AMPS, use && as commend separator instead of ; 2024-06-01 18:32:51 +02:00
nicolargo
5ad9e89658 Optimize 2024-06-01 18:08:46 +02:00
nicolargo
65f84fd1e9 Raspberry PI - CPU info is not correct #2616 2024-06-01 17:49:22 +02:00
nicolargo
b2e5cb58c4 Reduce code complexity #2801 2024-06-01 17:28:52 +02:00
nicolargo
4a60e826b5 Merge branch 'issue2799' into develop 2024-06-01 11:03:24 +02:00
nicolargo
d0de547e41 Add support for automatically hiding network interfaces that are down or that don't have any IP addresses #2799 2024-06-01 11:02:58 +02:00
nicolargo
d00be34255 Merge branch 'issue2812' into develop 2024-06-01 10:39:34 +02:00
nicolargo
6410ccab9b Make CORS option configurable #2812 2024-06-01 10:38:40 +02:00
nicolargo
350318fb6e Put * in default CORS configuration. Related to discussion #2802 2024-05-29 17:44:24 +02:00
nicolargo
5a78e72419 Optimize Glances configuration file path search 2024-05-28 13:49:11 +02:00
nicolargo
2379ec6b55 Merge branch 'develop' of github.com:nicolargo/glances into develop 2024-05-28 13:31:38 +02:00
nicolargo
a55970af83 When Glances is installed via venv, default configuration file is not used #2803 2024-05-28 13:31:22 +02:00
Nicolas Hennion
273a405a10
Update README.rst
Remove py-cpuinfo
2024-05-27 15:04:08 +02:00
nicolargo
c187c14b5f Update NEWS file 2024-05-25 16:18:53 +02:00
nicolargo
a129c1b337 GET /1272f6e9e8f9d6bfd6de.png results in 404 #2781 2024-05-25 15:45:34 +02:00
nicolargo
e5e5cf4c01 cpu_hz_current not available on NetBSD #2792 2024-05-25 12:30:10 +02:00
nicolargo
092ecf5cf8 Correct error witj orjson version: use the latest one 2024-05-25 12:15:38 +02:00
nicolargo
4b48304324 Merge branch 'issue2796' into develop 2024-05-25 12:02:40 +02:00
nicolargo
79d9aa742b No GPU info on Web View #2796 2024-05-25 12:01:40 +02:00
nicolargo
8d6fabc510 Retire ujson for being in maintenance mode #2791 2024-05-25 10:03:39 +02:00
nicolargo
23cd99d11e Merge branch 'develop' into issue2791 2024-05-25 10:03:21 +02:00
nicolargo
6d2ecf5cf0 Change client and browser call 2024-05-25 10:02:35 +02:00
nicolargo
c61d10bec7 Unittest OK 2024-05-25 09:42:02 +02:00
nicolargo
60d6be1718 Better 2024-05-20 19:35:56 +02:00
Bharath Vignesh J K
9ab3199e13 fix: build workflow - missing scope on cache-from 2024-05-19 09:37:53 +05:30
Bharath Vignesh J K
f6ddc5cf49 chg: build workflow - use scope to prevent multiple os caches from overwriting each other 2024-05-19 08:40:37 +05:30
Bharath Vignesh J K
c5ea3d292c chg: build workflow - use newer cache exporter - gha
Ref: https://docs.docker.com/build/cache/backends
2024-05-19 07:33:44 +05:30
Bharath Vignesh J K
e07bacf9a3 chg: build workflow - use latest official docker meta action 2024-05-19 07:22:37 +05:30
Bharath Vignesh J K
6743eea32d chg: requirements.txt(s) - remove unnecessary version constraints
- drop duplicated deps already present in base requirements.txt
2024-05-19 07:01:33 +05:30
Bharath Vignesh J K
53621f413b chg: deps - remove unused deps - scandir & wifi 2024-05-19 06:59:03 +05:30
Bharath Vignesh J K
f0fc27f952 chg: pyproject.toml - remove black config 2024-05-19 06:57:11 +05:30
nicolargo
ba88263b85 Replace UJson by ORJson - Client/Server do not work anymore 2024-05-18 14:09:30 +02:00
nicolargo
d2f1fa10d5 Reformat code 2024-05-18 11:24:54 +02:00
nicolargo
15b0a9bbab Update snapcraft 2024-05-18 11:18:01 +02:00
nicolargo
1183b4dabd Update docs 2024-05-18 10:58:39 +02:00
nicolargo
74f9606e65 Merge Ruff formating and solve some conflicts #2779 2024-05-18 10:37:57 +02:00
Nicolas Hennion
a99bf33b4b
Merge pull request #2779 from nicolargo/ruff-cleanup
chore: ruff cleanup
2024-05-18 10:32:13 +02:00
nicolargo
4f38bf384b Add some comment regarding the minimal Python version and exclude all the venv* folders in the linter configuration 2024-05-18 10:17:47 +02:00
nicolargo
5262524db3 Merge branch 'develop' of github.com:nicolargo/glances into develop 2024-05-18 09:32:32 +02:00
RazCrimson
3306fa0118
Merge pull request #2789 from nicolargo/2788-sensortype-change-in-rest-api-breaks-compatibility-in-404
fix: plugin(sensors) - serialization bug with raw stats
2024-05-18 05:01:09 +05:30
Bharath Vignesh J K
30ec25fa0a fix: plugin(sensors) - serialization bug with raw stats 2024-05-18 04:57:47 +05:30
Bharath Vignesh J K
6a0889851c chg: glances client - avoid logging user creds 2024-05-18 01:21:32 +05:30
Bharath Vignesh J K
f31b767e0e chg: github-actions to use ruff 2024-05-17 04:41:31 +05:30
Bharath Vignesh J K
14efbac82d chore: remove older lint configs 2024-05-17 04:17:33 +05:30
Bharath Vignesh J K
00c662f870 chg: Makefile - use ruff & pre-commit 2024-05-17 04:13:26 +05:30
Bharath Vignesh J K
cd69c4a0e8 chg: ruff - naming
exclude already ignores rules with flake8
2024-05-17 03:58:43 +05:30
Bharath Vignesh J K
28e7e5b74a chg: ruff - flake8-comprehensions 2024-05-17 03:37:24 +05:30
Bharath Vignesh J K
108ffcdfb6 chg: ruff - pyupgrade 2024-05-17 03:37:22 +05:30
Bharath Vignesh J K
f9a06a31f0 chg: ruff - mccabe (CC>21) 2024-05-17 03:36:55 +05:30
Bharath Vignesh J K
1f628642d4 fix: auto-removed imports in globals 2024-05-17 03:36:55 +05:30
Bharath Vignesh J K
b4c2a4c24a chg: ruff - flake-return fixes 2024-05-17 03:36:55 +05:30
Bharath Vignesh J K
d392caf6f6 chg: ruff - isort fixes 2024-05-17 03:36:53 +05:30
Bharath Vignesh J K
9890749d81 add: pre-commit - setup changes 2024-05-17 02:11:23 +05:30
Bharath Vignesh J K
acb5ef585f chore: ruff cleanup 2024-05-17 02:10:09 +05:30
RazCrimson
396e78c415
Merge pull request #2778 from nicolargo/2777-make-pydantic-dep-optional
chg: event_list - enable data validation only if pydantic available
2024-05-16 21:05:18 +05:30
Nicolas Hennion
6583109c73 Update the Grafana dashboard - Sensors is now displayed #2780 2024-05-16 14:14:16 +00:00
Nicolas Hennion
986cd99fbd Update the Grafana dashboard #2780 2024-05-16 13:44:51 +00:00
Bharath Vignesh J K
98f2bb7fbe chg: event_list - enable data validation only if pydantic available 2024-05-16 17:00:20 +05:30
Nicolas Hennion
05bc1449db Merge branch 'issue2776' into develop 2024-05-16 09:00:39 +00:00
Nicolas Hennion
d095e4ae34 4.0.4 - On Glances startup ERROR -- Can not init battery class ([WinError 3] even though sensors plugin disabled #2776 2024-05-16 09:00:17 +00:00
Nicolas Hennion
6acd0da704 Merge branch 'issue2773' into develop 2024-05-16 08:46:46 +00:00
Nicolas Hennion
f6545580db In codeSpace (with Python 3.8), an error occurs in ./unittest-restful.py #2773 2024-05-16 08:46:27 +00:00
Bharath Vignesh J K
d94b0b4e92 fix: plugin(sensors) - typehint for py38 2024-05-15 18:09:28 +05:30
Nicolas Hennion
0447e210e5 Catch exception when init colors in TMUX 2024-05-15 08:12:38 +00:00
RazCrimson
d8c5a72387
Merge pull request #2771 from nicolargo/2770-pydantic-v1-breaks-glances-v4
chg: requirements.txt - use pydantic v2
2024-05-15 10:37:45 +05:30
Bharath Vignesh J K
25ae493517 chg: requirements.txt - use pydantic v2
Co-authored-by: Saketh Raman KS <sakethramansundaram78@gmail.com>
2024-05-15 10:19:07 +05:30
RazCrimson
3aa9225da0
Merge pull request #2768 from nicolargo/2765-sensors-plugin-is-buggy
refactor: plugin(sensors) - cleanup + typing + fixes
2024-05-14 14:01:00 +05:30
Bharath Vignesh J K
7a25f7165e refactor: plugin(sensors) - cleanup + typing + fixes 2024-05-14 13:37:20 +05:30
nicolargo
c66c651205 Merge branch 'develop' of github.com:nicolargo/glances into develop 2024-05-13 23:00:58 +02:00
Nicolas Hennion
127f2e45ed
Merge pull request #2761 from chenrui333/fix-requirements.txt
fix: include requirements.txt and SECURITY.md for pypi dist
2024-05-13 22:49:46 +02:00
nicolargo
c98b7fcf1d Refactor setup.py 2024-05-13 22:48:07 +02:00
RazCrimson
8c1b74993d
Merge pull request #2766 from nicolargo/2765-sensors-plugin-is-buggy
hotfix: plugin(sensors) - race conditions btw fan_speed & temperature…
2024-05-14 01:29:17 +05:30
Bharath Vignesh J K
371885c34c hotfix: plugin(sensors) - race conditions btw fan_speed & temperature methods 2024-05-14 01:14:21 +05:30
Rui Chen
6d4e0884cd
AUTHORS: add Rui for homebrew package
Signed-off-by: Rui Chen <rui@chenrui.dev>
2024-05-12 14:48:51 -04:00
Rui Chen
fc63162515
fix: include requirements.txt and SECURITY.md for pypi dist
Signed-off-by: Rui Chen <rui@chenrui.dev>
2024-05-12 14:45:13 -04:00
nicolargo
516b3ab478 Force build for snap... 2024-05-12 19:27:56 +02:00
nicolargo
b54fbda064 On the road to Glances 4.1.0 2024-05-12 19:05:07 +02:00
nicolargo
7fbc598d5a Correct pydantic miss dep in tox 2024-05-12 18:32:32 +02:00
Nicolas Hennion
ddfa809280
Merge pull request #2760 from chenrui333/add-pydantic
fix(setup): add missing pydantic
2024-05-12 18:23:35 +02:00
Rui Chen
4bfe5f2c81
fix(setup): add missing pydantic
Signed-off-by: Rui Chen <rui@chenrui.dev>
2024-05-12 12:11:30 -04:00
nicolargo
afb06cb230 WebUI not needed to build in CI 2024-05-12 11:13:59 +02:00
nicolargo
4ff7c82559 Update docs and API 2024-05-12 11:10:59 +02:00
nicolargo
4a4ca76505 Do not build WebUI if tags 2024-05-12 11:05:13 +02:00
nicolargo
374bf0944b On the road of Glances 4.1.0 2024-05-12 10:49:45 +02:00
nicolargo
d9056e6027 Glances version 4.0.0 2024-05-12 10:45:03 +02:00
nicolargo
562780f7f0 Glances 4.0.0_RC04 2024-05-12 09:53:47 +02:00
nicolargo
e9abf1c9af ORJson is not mandatory 2024-05-12 09:52:17 +02:00
nicolargo
364a3ef884 Correct README with a correct example regarding the docker + specific config file (revert) example is ok with -C option 2024-05-11 20:10:25 +02:00
nicolargo
35015f7c19 Correct README with a correct example regarding the docker + specific config file 2024-05-11 20:09:22 +02:00
nicolargo
e7a3de5038 Hide password in the Glances browser form #503 2024-05-09 19:43:11 +02:00
nicolargo
b8304d8796 Glances version 4 RC03 2024-05-09 16:21:59 +02:00
nicolargo
893c1452a3 Allow user to define only careful limits 2024-05-09 16:14:01 +02:00
nicolargo
ada7f248ba Improve code 2024-05-09 10:41:24 +02:00
Nicolas Hennion
4ddff48c70
Merge pull request #2759 from nicolargo/issue2757
Improve tests by testing all the plugin/model.py methods - First part of Issue2757
2024-05-09 10:18:29 +02:00
nicolargo
f2d7e13cd8 Improve tests by testing all the plugin/model.py methods #2757 2024-05-09 10:09:05 +02:00
nicolargo
fd3625826b First work done for plugin model (stats only) 2024-05-07 15:26:16 +02:00
nicolargo
af4765db8a Remove unitest shell script - Use Makefile instead 2024-05-07 14:07:08 +02:00
nicolargo
8e8d4ce89b Rename unitest to unittest - Related to #2757 2024-05-07 14:01:36 +02:00
nicolargo
ee826a1e67 version 3.4.0.5 2024-03-23 14:47:29 +01:00
nicolargo
fb10c7f36d Remove Python 2 test from CI because Python 2 is no more available inside Gitaction images 2024-03-23 10:55:43 +01:00
nicolargo
370da44744 version 3.4.0.4 2024-03-23 10:16:34 +01:00
nicolargo
b97606247d Migration to Alpine 3.19 and update documentation regarding default glances.conf file in Docker images 2024-01-13 09:48:50 +01:00
nicolargo
a695e1a70e Update Alpine version to 3.18 (a link to the latest 3.18.x tag) 2024-01-13 09:24:53 +01:00
nicolargo
a3e08acdd2 Remove -C /etc/glances.conf from the Dockerfile CMD 2023-12-13 21:58:54 +01:00
nicolargo
048e1fc6de Upgrade Alpine version 2023-12-10 15:39:03 +01:00
nicolargo
2e6842cc18 Security update 2023-12-09 11:31:01 +01:00
nicolargo
5e777f0f42 Update Github build workflow 2023-12-09 11:25:55 +01:00
Nicolas Hennion
e7b0cf09dd
Merge pull request #2571 from nicolargo/revert-2563-patch-1
Revert "Update docker.rst to remove latest-alpine tag"
2023-10-07 10:42:07 +02:00
Nicolas Hennion
81b93e9f16
Revert "Update docker.rst to remove latest-alpine tag" 2023-10-07 10:41:37 +02:00
Nicolas Hennion
34f0e90130
Merge pull request #2563 from NLZ/patch-1
Update docker.rst to remove latest-alpine tag
2023-10-07 10:37:21 +02:00
Nagy László Zoltán
82e8d7bac7
Update docker.rst to remove latest-alpine tag
latest-alpine tad is decrepated, so updating the compose example to use latest instead
2023-09-30 11:32:57 +02:00
Nicolas Hennion
551c4d686b
Merge pull request #2488 from k26pl/patch-1
Fix typos
2023-07-08 17:33:45 +02:00
k26pl
0cc3dcbaa0
Fix typos 2023-07-05 10:05:39 +02:00
nicolargo
1470cd9f63 Merge branch 'master' into support/glancesv3 2023-05-20 16:47:20 +02:00
nicolargo
f360404a3c version 3.4.0.3 2023-05-20 16:46:56 +02:00
nicolargo
ac4da6fd5b Correct issue with the PR #2419 - Miss a chmod on the glances startup script 2023-05-20 16:36:11 +02:00
nicolargo
40fd7fe7c7 Add glances binary to '/usr/local/bin' + Update ENV PATH to include '/venv/bin' in Dockerfiles - Reported from devel branch #2419 2023-05-20 16:09:27 +02:00
nicolargo
87b8300cc6 Make also the job on memswap - related to #2420 2023-05-20 12:29:55 +02:00
nicolargo
28101e20fa No more threshold display in the WebUI cpu/mem and memswap plugins #2420 2023-05-20 12:23:48 +02:00
nicolargo
5bb1047151 Get back to Core20 because Snap is PISA again 2023-05-19 10:31:20 +02:00
nicolargo
f3cf3aa77b Get back to Core20 because Snap is PISA again 2023-05-19 10:31:05 +02:00
nicolargo
dbed127987 Get back to Core20 because Snap is PISA 2023-05-19 10:15:56 +02:00
nicolargo
ed46b6a3e2 Get back to Core20 because Snap is PISA 2023-05-19 10:15:38 +02:00
nicolargo
1d86888f76 SNAP is PITA 2023-05-19 10:04:43 +02:00
nicolargo
2da2cb714e SNAP is PITA 2023-05-19 10:04:19 +02:00
nicolargo
ef57106fac Snap is a pain in the ass 2023-05-19 09:49:12 +02:00
nicolargo
3874a16e03 Snap is a pain in the ass 2023-05-19 09:48:55 +02:00
nicolargo
3e367235d6 Try to migrate SnapCraft Core 20 to 22 (following guide: https://forum.snapcraft.io/t/micro-howto-migrate-from-core20-to-core22/30188) 2023-05-19 09:34:50 +02:00
nicolargo
c3c0cb6e6c Try to migrate SnapCraft Core 20 to 22 (following guide: https://forum.snapcraft.io/t/micro-howto-migrate-from-core20-to-core22/30188) 2023-05-19 09:21:29 +02:00
nicolargo
5f7e713254 No module named 'influxdb' on the snap version of glances #1738 2023-05-19 09:02:29 +02:00
nicolargo
6d7d3a914d version 3.4.0.2 2023-05-18 17:44:52 +02:00
nicolargo
fef2d86940 Cannot start Glances 3.4.0.1 on Windows 10: SIGHUP not defined #2408 2023-05-18 17:29:40 +02:00
nicolargo
2c5df09983 Influxdb2 export not working #2407 2023-05-18 17:29:24 +02:00
nicolargo
cb9dba3c18 Glances version 3.4.0.1 2023-05-17 11:26:47 +02:00
nicolargo
230c2fe437 3.4.0 crash on startupwith minimal deps #2401 2023-05-17 11:18:37 +02:00
178 changed files with 6033 additions and 5598 deletions

View File

@ -1,2 +0,0 @@
[bandit]
exclude: ./docs,./glances/outputs/static/node_modules

View File

@ -1,8 +0,0 @@
[flake8]
exclude = .git,__pycache__,docs/,build,dist
ignore =
W504, B007, B014, B008, B902, Q000,
N801, N802, N803, N806, N807, N811, N812, N813, N814, N815, N816, N817, N818
# lines should not exceed 120 characters
max-line-length = 120

View File

@ -6,6 +6,9 @@ labels: ''
assignees: ''
---
**Check the bug**
Before filling this bug report, please search if a similar issue already exists.
In this case, just add a comment on this existing issue.
**Describe the bug**
A clear and concise description of what the bug is.
@ -26,11 +29,7 @@ If applicable, add screenshots to help explain your problem.
- Operating System (lsb_release -a or OS name/version): `To be completed with result of: lsb_release -a`
- Glances & psutil versions: `To be completed with result of: glances -V`
- How do you install Glances (Pypi package, script, package manager, source): `To be completed`
- Glances test (only available with Glances 3.1.7 or higher):
```
To be completed with result of: glances --issue
```
- Glances test: ` To be completed with result of: glances --issue`
**Additional context**
Add any other context about the problem here.

View File

@ -123,19 +123,12 @@ jobs:
- name: Retrieve Repository Docker metadata
id: docker_meta
uses: crazy-max/ghaction-docker-meta@v5.0.0
uses: docker/metadata-action@v5
with:
images: ${{ env.DEFAULT_DOCKER_IMAGE }}
labels: |
org.opencontainers.image.url=https://nicolargo.github.io/glances/
- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ env.NODE_ENV }}-${{ matrix.os }}-${{ matrix.tag.tag }}
restore-keys: ${{ runner.os }}-buildx-${{ env.NODE_ENV }}-${{ matrix.os }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
@ -166,5 +159,7 @@ jobs:
platforms: ${{ matrix.os != 'ubuntu' && env.DOCKER_PLATFORMS || env.DOCKER_PLATFORMS_UBUNTU }}
target: ${{ matrix.tag.target }}
labels: ${{ steps.docker_meta.outputs.labels }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache,mode=max
# GHA default behaviour overwrites last build cache. Causes alpine and ubuntu cache to overwrite each other.
# Use `scope` with the os name to prevent that
cache-from: 'type=gha,scope=${{ matrix.os }}'
cache-to: 'type=gha,mode=max,scope=${{ matrix.os }}'

View File

@ -15,13 +15,13 @@ jobs:
uses: ./.github/workflows/test.yml
needs: [quality]
webui:
if: github.event_name != 'pull_request'
if: github.event_name != 'pull_request' && !contains(github.ref, 'refs/tags/')
uses: ./.github/workflows/webui.yml
needs: [quality, test]
cyber:
if: github.event_name != 'pull_request'
uses: ./.github/workflows/cyber.yml
needs: [quality, test, webui]
needs: [quality, test]
build:
if: github.event_name != 'pull_request'
uses: ./.github/workflows/build.yml
@ -30,4 +30,4 @@ jobs:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
needs: [quality, test, webui, cyber]
needs: [quality, test]

22
.github/workflows/inactive_issues.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Label inactive issues
on:
schedule:
- cron: "30 1 * * *"
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
days-before-issue-stale: 90
days-before-issue-close: -1
stale-issue-label: "inactive"
stale-issue-message: "This issue is stale because it has been open for 3 months with no activity."
close-issue-message: "This issue was closed because it has been inactive for 30 days since being marked as stale."
days-before-pr-stale: -1
days-before-pr-close: -1
repo-token: ${{ secrets.GITHUB_TOKEN }}

22
.github/workflows/needs_contributor.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Add a message when needs contributor tag is used
on:
issues:
types:
- labeled
jobs:
add-comment:
if: github.event.label.name == 'needs contributor'
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- name: Add comment
run: gh issue comment "$NUMBER" --body "$BODY"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
NUMBER: ${{ github.event.issue.number }}
BODY: >
This issue is available for anyone to work on.
**Make sure to reference this issue in your pull request.**
:sparkles: Thank you for your contribution ! :sparkles:

View File

@ -7,8 +7,32 @@ on:
jobs:
source-code-checks:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Check formatting with Ruff
uses: chartboost/ruff-action@v1
with:
args: 'format --check'
- name: Check linting with Ruff
uses: chartboost/ruff-action@v1
with:
args: 'check'
- name: Static type check
run: |
echo "Skipping static type check for the moment, too much error...";
# pip install pyright
# pyright glances
test-linux:
needs: source-code-checks
# https://github.com/actions/runner-images?tab=readme-ov-file#available-images
runs-on: ubuntu-22.04
strategy:
@ -17,36 +41,22 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# Stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude=.git,./docs,./glances/outputs/static
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --exclude=.git,./docs,./glances/outputs/static
- name: Static type check
run: |
echo "Skipping static type check for the moment, too much error...";
# pip install pyright
# pyright glances
- name: Unitary tests
run: |
python ./unitest.py
- name: Unitary tests
run: |
python ./unittest-core.py
# Error appear with h11, not related to Glances
# Should be tested if correction is done
@ -80,34 +90,36 @@ jobs:
# - name: Unitary tests
# run: |
# python ./unitest.py
# python ./unittest-core.py
test-macos:
needs: source-code-checks
# https://github.com/actions/runner-images?tab=readme-ov-file#available-images
runs-on: macos-14
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
# Only test the latest stable version
python-version: ["3.12"]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Unitary tests
run: |
python ./unitest.py
- name: Unitary tests
run: |
python ./unittest-core.py
# Error when trying to implement #2749
# pkg: No packages available to install matching 'py-pip' have been found in the repositories
@ -128,4 +140,4 @@ jobs:
# run: |
# set -e -x
# python3 -m pip install --user -r requirements.txt
# python ./unitest.py
# python ./unittest-core.py

22
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,22 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-ast
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
- id: check-shebang-scripts-are-executable
- id: check-toml
- id: check-yaml
- id: debug-statements
- id: detect-private-key
- id: mixed-line-ending
- id: requirements-txt-fixer
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.4
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix]

34
.readthedocs.yaml Normal file
View File

@ -0,0 +1,34 @@
# Read the Docs configuration file for Glances projects
# Required
version: 2
# Set the OS, Python version and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.12"
# You can also specify other tool versions:
# nodejs: "20"
# rust: "1.70"
# golang: "1.20"
# Build documentation in the "docs/" directory with Sphinx
sphinx:
configuration: docs/conf.py
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
# builder: "dirhtml"
# Fail on all warnings to avoid broken references
# fail_on_warning: true
# Optionally build your docs in additional formats such as PDF and ePub
# formats:
# - pdf
# - epub
# Optional but recommended, declare the Python requirements required
# to build your documentation
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- requirements: doc-requirements.txt

View File

@ -64,3 +64,7 @@ http://www.macports.org/ports.php?by=name&substr=glances
John Kirkham for the conda package (at conda-forge)
https://github.com/conda-forge/glances-feedstock
Rui Chen for the Homebrew package
https://chenrui.dev/
https://formulae.brew.sh/formula/glances

View File

@ -6,7 +6,7 @@ In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
education, socio-economic status, nationality, personal appearance, race,
education, socioeconomic status, nationality, personal appearance, race,
religion, or sexual identity and orientation.
## Our Standards

View File

@ -10,7 +10,6 @@ the developers managing and developing this open source project. In return,
they should reciprocate that respect in addressing your issue or assessing
patches and features.
## Using the issue tracker
The [issue tracker](https://github.com/nicolargo/glances/issues) is
@ -24,7 +23,6 @@ restrictions:
* Please **do not** derail or troll issues. Keep the discussion on topic and
respect the opinions of others.
## Bug reports
A bug is a _demonstrable problem_ that is caused by the code in the repository.
@ -65,19 +63,17 @@ Example:
> causing the bug, and potential solutions (and your opinions on their
> merits).
>
> You can also run Glances in debug mode (-d) and paste/bin the glances.conf file (https://glances.readthedocs.io/en/latest/config.html).
> You can also run Glances in debug mode (-d) and paste/bin the glances.conf file (<https://glances.readthedocs.io/en/latest/config.html>).
>
> Glances 3.2.0 or higher have also a --issue option to run a simple test. Please use it and copy/paste the output.
## Feature requests
Feature requests are welcome. But take a moment to find out whether your idea
fits with the scope and aims of the project. It's up to *you* to make a strong
fits with the scope and aims of the project. It's up to _you* to make a strong
case to convince the project's developers of the merits of this feature. Please
provide as much detail and context as possible.
## Pull requests
Good pull requests—patches, improvements, new features—are a fantastic
@ -133,6 +129,7 @@ included in the project:
5. Test you code using the Makefile:
* make format ==> Format your code thanks to the Ruff linter
* make run ==> Run Glances
* make run-webserver ==> Run a Glances Web Server
* make test ==> Run unit tests

View File

@ -3,7 +3,9 @@ include CONTRIBUTING.md
include COPYING
include NEWS.rst
include README.rst
include SECURITY.md
include conf/glances.conf
include requirements.txt
recursive-include docs *
recursive-include glances *.py
recursive-include glances/outputs/static *

View File

@ -52,6 +52,7 @@ venv-dev-python: ## Install Python 3 venv
venv-dev: venv-python ## Install Python 3 dev dependencies
./venv-dev/bin/pip install -r dev-requirements.txt
./venv-dev/bin/pip install -r doc-requirements.txt
./venv-dev/bin/pre-commit install --hook-type pre-commit
venv-dev-upgrade: ## Upgrade Python 3 dev dependencies
./venv-dev/bin/pip install --upgrade pip
@ -62,44 +63,37 @@ venv-dev-upgrade: ## Upgrade Python 3 dev dependencies
# Tests
# ===================================================================
test: ## Run unit tests
./venv/bin/python ./unitest.py
./venv/bin/python ./unitest-restful.py
./venv/bin/python ./unitest-xmlrpc.py
./venv-dev/bin/python -m black ./glances --check --exclude outputs/static
test-core: ## Run core unit tests
./venv/bin/python ./unittest-core.py
test-with-upgrade: venv-upgrade venv-dev-upgrade ## Upgrade deps and run unit tests
./venv/bin/python ./unitest.py
./venv/bin/python ./unitest-restful.py
./venv/bin/python ./unitest-xmlrpc.py
./venv/bin-dev/python -m black ./glances --check --exclude outputs/static
test-restful: ## Run Restful unit tests
./venv/bin/python ./unittest-restful.py
test-min: ## Run unit tests in minimal environment
./venv-min/bin/python ./unitest.py
test-xmlrpc: ## Run XMLRPC unit tests
./venv/bin/python ./unittest-xmlrpc.py
test: test-core test-restful test-xmlrpc ## Run unit tests
test-with-upgrade: venv-upgrade venv-dev-upgrade test ## Upgrade deps and run unit tests
test-min: ## Run core unit tests in minimal environment
./venv-min/bin/python ./unittest-core.py
test-min-with-upgrade: venv-min-upgrade ## Upgrade deps and run unit tests in minimal environment
./venv-min/bin/python ./unitest.py
test-restful-api: ## Run unit tests of the RESTful API
./venv/bin/python ./unitest-restful.py
./venv-min/bin/python ./unittest-core.py
# ===================================================================
# Linters, profilers and cyber security
# ===================================================================
format: ## Format the code
@git ls-files 'glances/*.py' | xargs ./venv-dev/bin/python -m autopep8 --in-place --jobs 0 --global-config=.flake8
@git ls-files 'glances/*.py' | xargs ./venv-dev/bin/python -m autoflake --in-place --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys --exclude="compat.py,globals.py"
./venv-dev/bin/python -m black ./glances --exclude outputs/static
./venv-dev/bin/python -m ruff format .
flake8: ## Run flake8 linter.
@git ls-files 'glances/ *.py' | xargs ./venv-dev/bin/python -m flake8 --config=.flake8
ruff: ## Run Ruff (fastest) linter.
./venv-dev/bin/python -m ruff check . --config=./pyproject.toml
lint: ## Lint the code.
./venv-dev/bin/python -m ruff check . --fix
codespell: ## Run codespell to fix common misspellings in text files
./venv-dev/bin/codespell -S .git,./docs/_build,./Glances.egg-info,./venv*,./glances/outputs,*.svg -L hart,bu,te,statics
./venv-dev/bin/codespell -S .git,./docs/_build,./Glances.egg-info,./venv*,./glances/outputs,*.svg -L hart,bu,te,statics -w
semgrep: ## Run semgrep to find bugs and enforce code standards
./venv-dev/bin/semgrep scan --config=auto
@ -191,7 +185,7 @@ flatpak: venv-dev-upgrade ## Generate FlatPack JSON file
rm -rf ./flatpak-builder-tools
@echo "Now follow: https://github.com/flathub/flathub/wiki/App-Submission"
# Snap package is automaticaly build on the Snapcraft.io platform
# Snap package is automatically build on the Snapcraft.io platform
# https://snapcraft.io/glances
# But you can try an offline build with the following command
snapcraft:

150
NEWS.rst
View File

@ -2,11 +2,124 @@
Glances ChangeLog
==============================================================================
===============
Version 4.2.0
===============
Under development, see roadmap here: https://github.com/nicolargo/glances/milestone/73
Contributors are welcome !
===============
Version 4.1.1
===============
Bug corrected:
* Sensors data is not exported using InfluxDB2 exporter #2856
===============
Version 4.1.0
===============
Enhancements:
* Call process_iter.clear_cache() (PsUtil 6+) when Glances user force a refresh (F5 or CTRL-R) #2753
* PsUtil 6+ no longer check PID reused #2755
* Add support for automatically hiding network interfaces that are down or that don't have any IP addresses #2799
Bug corrected:
* API: Network module is disabled but appears in endpoint "all" #2815
* API is not compatible with requests containing special/encoding char #2820
* 'j' hot key crashes Glances #2831
* Raspberry PI - CPU info is not correct #2616
* Graph export is broken if there is no graph section in Glances configuration file #2839
* Glances API status check returns Error 405 - Method Not Allowed #2841
* Rootless podman containers cause glances to fail with KeyError #2827
* --export-process-filter Filter using complete command #2824
* Exception when Glances is ran with limited plugin list #2822
* Disable separator option do not work #2823
Continuous integration and documentation:
* test test_107_fs_plugin_method fails on aarch64-linux #2819
Thanks to all contributors and bug reporters !
Special thanks to:
* Bharath Vignesh J K
* RazCrimson
* Vadim Small
===============
Version 4.0.8
===============
* Make CORS option configurable security webui #2812
* When Glances is installed via venv, default configuration file is not used documentation packaging #2803
* GET /1272f6e9e8f9d6bfd6de.png results in 404 bug webui #2781 by Emporea was closed May 25, 2024
* Screen frequently flickers when outputting to local display bug needs test #2490
* Retire ujson for being in maintenance mode dependencies enhancement #2791
===============
Version 4.0.7
===============
* cpu_hz_current not available on NetBSD #2792
* SensorType change in REST API breaks compatibility in 4.0.4 #2788
===============
Version 4.0.6
===============
* No GPU info on Web View #2796
===============
Version 4.0.5
===============
* SensorType change in REST API breaks compatibility in 4.0.4 #2788
* Please make pydantic optional dependency, not required one #2777
* Update the Grafana dashboard #2780
* 4.0.4 - On Glances startup "ERROR -- Can not init battery class #2776
* In codeSpace (with Python 3.8), an error occurs in ./unittest-restful.py #2773
Use Ruff as default Linter.
===============
Version 4.0.4
===============
Hostfix release for support sensors plugin on python 3.8
===============
Version 4.0.3
===============
Additional fixes for Sensor plugin
===============
Version 4.0.2
===============
* hotfix: plugin(sensors) - race conditions btw fan_speed & temperature… #2766
* fix: include requirements.txt and SECURITY.md for pypi dist #2761
Thanks to RazCrimson for the sensors patch !
===============
Version 4.0.1
===============
Correct issue with CI (miss pydantic dep).
===============
Version 4.0.0
===============
See release note in Wiki format here: https://github.com/nicolargo/glances/wiki/Glances-4.0-Release-Note
See release note in Wiki format: https://github.com/nicolargo/glances/wiki/Glances-4.0-Release-Note
**BREAKING CHANGES:**
@ -27,6 +140,26 @@ Glances version 4.x and higher:
* Alert data model change from a list of list to a list of dict #2633
* Docker memory usage uses the same algorithm than docker stats #2637
Special notes for package maintainers:
Minimal requirements for Glances version 4 are:
* psutil
* defusedxml
* packaging
* ujson
* pydantic
* fastapi (for WebUI / RestFull API)
* uvicorn (for WebUI / RestFull API)
* jinja2 (for WebUI / RestFull API)
Majors changes between Glances version 3 and version 4:
* Bottle has been replaced by FastAPI and Uvicorn
* CouchDB has been replaced by PyCouchDB
* nvidia-ml-py has been replaced by py3nvml
* pysnmp has been replaced by pysnmp-lextudio
Enhancements:
* Export individual processes stats #794
@ -48,6 +181,7 @@ Enhancements:
* [WebUI] Added smart plugin support #2435
* No more threshold display in the WebUI cpu/mem and memswap plugins #2420
* Refactor Glances curses code #2580
* Hide password in the Glances browser form #503
* Replace Bottle by FastAPI #2181
* Replace py3nvml with nvidia-ml-py #2688
@ -92,7 +226,7 @@ Many thinks to the contributors:
* Christoph Zimmermann
* RazCrimson
* Robin Candau
* Github GPG acces
* Github GPG access
* Continuous Integration
* Georgiy Timchenko
* turbocrime
@ -266,7 +400,7 @@ Documentation and CI:
* Update Makefile with comments
* Update Python minimal requirement for py3nvlm
* Update security policy (user can open private issue directly in Github)
* Add a simple run script. Entry point for IDE debuger
* Add a simple run script. Entry point for IDE debugger
Cyber security update:
@ -295,7 +429,7 @@ And also a big thanks to @RazCrimson (https://github.com/RazCrimson) for the sup
Version 3.3.0.4
===============
Refactor the Docker images factory, from now, only Alpine image wll be provided.
Refactor the Docker images factory, from now, only Alpine image will be provided.
The following Docker images (nicolargo/glances) are availables:
@ -343,9 +477,9 @@ Bug corrected:
* Correct issue with the regexp filter (use fullmatch instead of match)
* Errors when running Glances as web service #1702
* Apply alias to Duplicate sensor name #1686
* Make the hide function in sensors section compliant with lower/upercase #1590
* Make the hide function in sensors section compliant with lower/uppercase #1590
* Web UI truncates the days part of CPU time counter of the process list #2108
* Correct alignement issue with the diskio plugin (Console UI)
* Correct alignment issue with the diskio plugin (Console UI)
Documentation and CI:
@ -612,7 +746,7 @@ Bugs corrected:
* Docker containers information missing with Docker 20.10.x #1878
* Get system sensors temperatures thresholds #1864
Contibutors for this version:
Contributors for this version:
* Nicolargo
* Markus Pöschl
@ -1219,7 +1353,7 @@ Enhancements and new features:
* Add ZeroMQ exporter (issue #939)
* Add CouchDB exporter (issue #928)
* Add hotspot Wifi information (issue #937)
* Add default interface speed and automatic rate thresolds (issue #718)
* Add default interface speed and automatic rate thresholds (issue #718)
* Highlight max stats in the processes list (issue #878)
* Docker alerts and actions (issue #875)
* Glances API returns the processes PPID (issue #926)

View File

@ -2,7 +2,7 @@
Glances - An eye on your system
===============================
| |pypi| |test| |contibutors| |quality|
| |pypi| |test| |contributors| |quality|
| |starts| |docker| |pypistat|
| |sponsors| |twitter|
@ -25,9 +25,9 @@ Glances - An eye on your system
:target: https://github.com/nicolargo/glances/actions
:alt: Linux tests (GitHub Actions)
.. |contibutors| image:: https://img.shields.io/github/contributors/nicolargo/glances
.. |contributors| image:: https://img.shields.io/github/contributors/nicolargo/glances
:target: https://github.com/nicolargo/glances/issues?q=is%3Aissue+is%3Aopen+label%3A%22needs+contributor%22
:alt: Contibutors
:alt: Contributors
.. |quality| image:: https://scrutinizer-ci.com/g/nicolargo/glances/badges/quality-score.png?b=develop
:target: https://scrutinizer-ci.com/g/nicolargo/glances/?branch=develop
@ -86,8 +86,7 @@ Requirements
- ``psutil`` (better with latest version)
- ``defusedxml`` (in order to monkey patch xmlrpc)
- ``packaging`` (for the version comparison)
- ``ujson`` (an optimized alternative to the standard json module)
- ``pydantic`` (for the data validation support)
- ``orjson`` (an optimized alternative to the standard json module)
*Note for Python 2 users*
@ -110,14 +109,12 @@ Optional dependencies:
- ``jinja2`` (for templating, used under the hood by FastAPI)
- ``kafka-python`` (for the Kafka export module)
- ``netifaces`` (for the IP plugin)
- ``orjson`` (fast JSON library, used under the hood by FastAPI)
- ``nvidia-ml-py`` (for the GPU plugin)
- ``pycouchdb`` (for the CouchDB export module)
- ``pika`` (for the RabbitMQ/ActiveMQ export module)
- ``podman`` (for the Containers Podman monitoring support)
- ``potsdb`` (for the OpenTSDB export module)
- ``prometheus_client`` (for the Prometheus export module)
- ``py-cpuinfo`` (for the Quicklook CPU info module)
- ``pygal`` (for the graph export module)
- ``pymdstat`` (for RAID support) [Linux-only]
- ``pymongo`` (for the MongoDB export module)
@ -144,7 +141,7 @@ stable version.
To install Glances, simply use the ``pip`` command line.
Warning: on modern Linux operating systems, you may have an externally-managed-environment
error message when you try to use ``pip``. In this case, go to the the PipX section bellow.
error message when you try to use ``pip``. In this case, go to the the PipX section below.
.. code-block:: console
@ -325,7 +322,7 @@ Start Termux on your device and enter:
$ apt update
$ apt upgrade
$ apt install clang python
$ pip install fastapi uvicorn orjson jinja2
$ pip install fastapi uvicorn jinja2
$ pip install glances
And start Glances:

View File

@ -2,13 +2,10 @@
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
| Version | Support security updates |
| ------- | ------------------------ |
| 3.x | :white_check_mark: |
| < 3.0 | :x: |
| 4.x | :white_check_mark: |
| < 4.0 | :x: |
## Reporting a Vulnerability
@ -31,4 +28,3 @@ If there are any vulnerabilities in {{cookiecutter.project_name}}, don't hesitat
4. Please do not disclose the vulnerability publicly until a fix is released!
Once we have either a) published a fix, or b) declined to address the vulnerability for whatever reason, you are free to publicly disclose it.

File diff suppressed because it is too large Load Diff

View File

@ -23,12 +23,16 @@ history_size=1200
##############################################################################
[outputs]
# Options for all UIs
#--------------------
# Separator in the Curses and WebUI interface (between top and others plugins)
separator=True
#separator=True
# Set the the Curses and WebUI interface left menu plugin list (comma-separated)
#left_menu=network,wifi,connections,ports,diskio,fs,irq,folders,raid,smart,sensors,now
# Limit the number of processes to display (in the WebUI)
max_processes_display=25
# Options for WebUI
#------------------
# Set URL prefix for the WebUI and the API
# Example: url_prefix=/glances/ => http://localhost/glances/
# Note: The final / is mandatory
@ -41,9 +45,22 @@ max_processes_display=25
# then configure this folder with the webui_root_path key
# Default is folder where glances_restfull_api.py is hosted
#webui_root_path=
# CORS options
# Comma separated list of origins that should be permitted to make cross-origin requests.
# Default is *
#cors_origins=*
# Indicate that cookies should be supported for cross-origin requests.
# Default is True
#cors_credentials=True
# Comma separated list of HTTP methods that should be allowed for cross-origin requests.
# Default is *
#cors_methods=*
# Comma separated list of HTTP request headers that should be supported for cross-origin requests.
# Default is *
#cors_headers=*
##############################################################################
# plugins
# Plugins
##############################################################################
[quicklook]
@ -199,6 +216,10 @@ tx_critical=90
hide=docker.*,lo
# Define the list of wireless network interfaces to be show (comma-separated)
#show=docker.*
# Automatically hide interface not up (default is False)
hide_no_up=True
# Automatically hide interface with no IP address (default is False)
hide_no_ip=True
# It is possible to overwrite the bitrate thresholds per interface
# WLAN 0 Default limits (in bits per second aka bps) for interface bitrate
#wlan0_rx_careful=4000000
@ -317,7 +338,7 @@ disable=True
[raid]
# Documentation: https://glances.readthedocs.io/en/latest/aoa/raid.html
# This plugin is disabled by default
disable=False
disable=True
[smart]
# Documentation: https://glances.readthedocs.io/en/latest/aoa/smart.html
@ -354,6 +375,8 @@ temperature_hdd_critical=60
battery_careful=80
battery_warning=90
battery_critical=95
# Fan speed threshold in RPM
#fan_speed_careful=100
# Sensors alias
#alias=core 0:CPU Core 0,core 1:CPU Core 1
@ -763,13 +786,13 @@ refresh=3
countmax=20
[amp_conntrack]
# Use comma separated for multiple commands (no space around the comma)
# Use && separator for multiple commands
# If the regex key is not defined, the AMP will be executed every refresh second
# and the process count will not be displayed (countmin and countmax will be ignore)
enable=false
refresh=30
one_line=false
command=sysctl net.netfilter.nf_conntrack_count;sysctl net.netfilter.nf_conntrack_max
command=sysctl net.netfilter.nf_conntrack_count && sysctl net.netfilter.nf_conntrack_max
[amp_nginx]
# Use the NGinx AMP

View File

@ -1,17 +1,14 @@
py-spy
gprof2dot
black
pyright
requirements-parser
flake8
autopep8
autoflake
ruff
codespell
memory-profiler
fonttools>=4.43.0 # not directly required, pinned by Snyk to avoid a vulnerability
gprof2dot
matplotlib
semgrep
setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability
memory-profiler
numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
pillow>=10.0.1 # not directly required, pinned by Snyk to avoid a vulnerability
fonttools>=4.43.0 # not directly required, pinned by Snyk to avoid a vulnerability
pre-commit
py-spy
pyright
requirements-parser
ruff
semgrep
setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability

View File

@ -1,5 +1,7 @@
psutil
defusedxml
orjson
reuse
setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability
sphinx
sphinx_rtd_theme
ujson
reuse
setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability

View File

@ -27,7 +27,7 @@ services:
environment:
- TZ=${TZ}
- "GLANCES_OPT=-C /glances/conf/glances.conf -w"
# Uncomment for GPU compatibilty (Nvidia) inside the container
# Uncomment for GPU compatibility (Nvidia) inside the container
# deploy:
# resources:
# reservations:

View File

@ -15,7 +15,7 @@ services:
environment:
- TZ=${TZ}
- "GLANCES_OPT=-C /glances/conf/glances.conf -w"
# Uncomment for GPU compatibilty (Nvidia) inside the container
# Uncomment for GPU compatibility (Nvidia) inside the container
# deploy:
# resources:
# reservations:

View File

@ -23,12 +23,16 @@ history_size=1200
##############################################################################
[outputs]
# Options for all UIs
#--------------------
# Separator in the Curses and WebUI interface (between top and others plugins)
separator=True
#separator=True
# Set the the Curses and WebUI interface left menu plugin list (comma-separated)
#left_menu=network,wifi,connections,ports,diskio,fs,irq,folders,raid,smart,sensors,now
# Limit the number of processes to display (in the WebUI)
max_processes_display=25
# Options for WebUI
#------------------
# Set URL prefix for the WebUI and the API
# Example: url_prefix=/glances/ => http://localhost/glances/
# Note: The final / is mandatory
@ -41,9 +45,22 @@ max_processes_display=25
# then configure this folder with the webui_root_path key
# Default is folder where glances_restfull_api.py is hosted
#webui_root_path=
# CORS options
# Comma separated list of origins that should be permitted to make cross-origin requests.
# Default is *
#cors_origins=*
# Indicate that cookies should be supported for cross-origin requests.
# Default is True
#cors_credentials=True
# Comma separated list of HTTP methods that should be allowed for cross-origin requests.
# Default is *
#cors_methods=*
# Comma separated list of HTTP request headers that should be supported for cross-origin requests.
# Default is *
#cors_headers=*
##############################################################################
# plugins
# Plugins
##############################################################################
[quicklook]
@ -199,6 +216,10 @@ tx_critical=90
#hide=docker.*,lo
# Define the list of wireless network interfaces to be show (comma-separated)
#show=docker.*
# Automatically hide interface not up (default is False)
hide_no_up=True
# Automatically hide interface with no IP address (default is False)
hide_no_ip=True
# It is possible to overwrite the bitrate thresholds per interface
# WLAN 0 Default limits (in bits per second aka bps) for interface bitrate
#wlan0_rx_careful=4000000
@ -354,6 +375,8 @@ temperature_hdd_critical=60
battery_careful=80
battery_warning=90
battery_critical=95
# Fan speed threshold in RPM
#fan_speed_careful=100
# Sensors alias
#alias=core 0:CPU Core 0,core 1:CPU Core 1
@ -763,13 +786,13 @@ refresh=3
countmax=20
[amp_conntrack]
# Use comma separated for multiple commands (no space around the comma)
# Use && separator for multiple commands
# If the regex key is not defined, the AMP will be executed every refresh second
# and the process count will not be displayed (countmin and countmax will be ignore)
enable=false
refresh=30
one_line=false
command=sysctl net.netfilter.nf_conntrack_count;sysctl net.netfilter.nf_conntrack_max
command=sysctl net.netfilter.nf_conntrack_count && sysctl net.netfilter.nf_conntrack_max
[amp_nginx]
# Use the NGinx AMP

View File

@ -61,6 +61,9 @@ RUN apk add --no-cache \
RUN python${PYTHON_VERSION} -m venv venv-build
RUN /venv-build/bin/python${PYTHON_VERSION} -m pip install --upgrade pip
RUN python${PYTHON_VERSION} -m venv venv-build
RUN /venv-build/bin/python${PYTHON_VERSION} -m pip install --upgrade pip
RUN python${PYTHON_VERSION} -m venv --without-pip venv
COPY requirements.txt docker-requirements.txt webui-requirements.txt optional-requirements.txt ./
@ -105,6 +108,11 @@ COPY docker-bin.sh /usr/local/bin/glances
RUN chmod a+x /usr/local/bin/glances
ENV PATH="/venv/bin:$PATH"
# Copy binary and update PATH
COPY docker-bin.sh /usr/local/bin/glances
RUN chmod a+x /usr/local/bin/glances
ENV PATH="/venv/bin:$PATH"
# EXPOSE PORT (XMLRPC / WebUI)
EXPOSE 61209 61208

View File

@ -92,6 +92,11 @@ COPY docker-bin.sh /usr/local/bin/glances
RUN chmod a+x /usr/local/bin/glances
ENV PATH="/venv/bin:$PATH"
# Copy binary and update PATH
COPY docker-bin.sh /usr/local/bin/glances
RUN chmod a+x /usr/local/bin/glances
ENV PATH="/venv/bin:$PATH"
# EXPOSE PORT (XMLRPC / WebUI)
EXPOSE 61209 61208

View File

@ -1,10 +1,9 @@
# install with base requirements file
-r requirements.txt
docker>=6.1.1; python_version >= "3.7"
podman; python_version >= "3.6"
packaging; python_version >= "3.7"
docker>=6.1.1
podman
python-dateutil
requests
six
urllib3
requests

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 87 KiB

After

Width:  |  Height:  |  Size: 91 KiB

File diff suppressed because one or more lines are too long

View File

@ -61,9 +61,11 @@ For example:
enable=false
refresh=30
one_line=false
command=sysctl net.netfilter.nf_conntrack_count;sysctl net.netfilter.nf_conntrack_max
command=sysctl net.netfilter.nf_conntrack_count && sysctl net.netfilter.nf_conntrack_max
For security reason, pipe is not directly allowed in a AMP command but you create a sheel
Note: for multiple command, please use the '&&'' separator.
For security reason, pipe is not directly allowed in a AMP command but you create a shell
script with your command:
.. code-block:: ini

View File

@ -3,7 +3,7 @@
CLOUD
=====
This plugin diplays information about the cloud provider if your host is running on OpenStack.
This plugin displays information about the cloud provider if your host is running on OpenStack.
The plugin use the standard OpenStack `metadata`_ service to retrieve the information.

View File

@ -53,7 +53,7 @@ To switch to per-CPU stats, just hit the ``1`` key:
.. image:: ../_static/per-cpu.png
In this case, Glances will show on line per logical CPU on the system.
If you have multiple core, it is possible to define the maximun number
If you have multiple core, it is possible to define the maximum number
of CPU to display. The top 'max_cpu_display' will be display and an
extra line with the mean of all others CPU will be added.

View File

@ -57,7 +57,7 @@ Example:
**NOTE:** Setting low values for `public_refresh_interval` will result in frequent
HTTP requests to the onlive service defined in public_api. Recommended range: 120-600 seconds.
Glances uses online services in order to get the IP addresses and the additional informations.
Glances uses online services in order to get the IP addresses and the additional information.
Your IP address could be blocked if too many requests are done.

View File

@ -39,7 +39,7 @@ Trend Status
======== ==============================================================
``-`` Mean 15 lasts values equal mean 15 previous values
```` Mean 15 lasts values is lower mean 15 previous values
```` Mean 15 lasts values is higher mean 15 previous valuess
```` Mean 15 lasts values is higher mean 15 previous values
======== ==============================================================
Legend:

View File

@ -49,7 +49,7 @@ Trend Status
======== ==============================================================
``-`` Mean 15 lasts values equal mean 15 previous values
```` Mean 15 lasts values is lower mean 15 previous values
```` Mean 15 lasts values is higher mean 15 previous valuess
```` Mean 15 lasts values is higher mean 15 previous values
======== ==============================================================
Alerts are only set for used memory and used swap.

View File

@ -17,6 +17,8 @@ In this case thresholds values are define in bps.
Additionally, you can define:
- a list of network interfaces to hide
- automatically hide interfaces not up
- automatically hide interfaces without IP address
- per-interface limit values
- aliases for interface name
@ -41,8 +43,12 @@ virtual docker interface (docker0, docker1, ...):
hide=docker.*,lo
# Define the list of network interfaces to show (comma-separated regexp)
#show=eth0,eth1
# Automatically hide interface not up (default is False)
hide_no_up=True
# Automatically hide interface with no IP address (default is False)
hide_no_ip=True
# WLAN 0 alias
wlan0_alias=Wireless IF
alias=wlan0:Wireless IF
# It is possible to overwrite the bitrate thresholds per interface
# WLAN 0 Default limits (in bits per second aka bps) for interface bitrate
wlan0_rx_careful=4000000
@ -58,4 +64,4 @@ Filtering is based on regular expression. Please be sure that your regular
expression works as expected. You can use an online tool like `regex101`_ in
order to test your regular expression.
.. _regex101: https://regex101.com/
.. _regex101: https://regex101.com/

View File

@ -8,7 +8,14 @@ RAID
*Dependency: this plugin uses the optional pymdstat Python lib*
This plugin is disable by default, please use the --enable-plugin raid option
to enable it.
to enable it or enable it in the glances.conf file:
.. code-block:: ini
[raid]
# Documentation: https://glances.readthedocs.io/en/latest/aoa/raid.html
# This plugin is disabled by default
disable=False
In the terminal interface, click on ``R`` to enable/disable it.

View File

@ -27,8 +27,7 @@ There is no alert on this information.
.. note 3::
If a sensors has temperature and fan speed with the same name unit,
it is possible to alias it using:
unitname_temperature_core_alias=Alias for temp
unitname_fan_speed_alias=Alias for fan speed
alias=unitname_temperature_core_alias:Alias for temp,unitname_fan_speed_alias:Alias for fan speed
.. note 4::
If a sensors has multiple identical features names (see #2280), then

View File

@ -141,7 +141,7 @@ Get plugin stats::
"refresh": 3.0,
"regex": True,
"result": None,
"timer": 0.3574647903442383},
"timer": 0.2482151985168457},
{"count": 0,
"countmax": 20.0,
"countmin": None,
@ -150,7 +150,7 @@ Get plugin stats::
"refresh": 3.0,
"regex": True,
"result": None,
"timer": 0.35736703872680664}]
"timer": 0.24815130233764648}]
Fields descriptions:
@ -178,7 +178,7 @@ Get a specific item when field matches the given value::
"refresh": 3.0,
"regex": True,
"result": None,
"timer": 0.3574647903442383}]}
"timer": 0.2482151985168457}]}
GET cloud
---------
@ -219,23 +219,7 @@ GET containers
Get plugin stats::
# curl http://localhost:61208/api/4/containers
[{"command": "tail -f /dev/null",
"cpu": {"total": 0.0},
"cpu_percent": 0.0,
"created": "2024-05-06T08:20:31.859934699Z",
"engine": "docker",
"id": "f8d78b334f789955ab6dd0739c0bbb7e26ae3f24ed9e42c4a0d218a30377d2c8",
"image": ["catthehacker/ubuntu:act-22.04"],
"io": {"cumulative_ior": 100855808, "cumulative_iow": 0},
"key": "name",
"memory": {"inactive_file": 1704656896,
"limit": 16422473728,
"usage": 1984933888},
"memory_usage": 1984933888,
"name": "act-test-test-freebsd-700362a4fd49fe930f7ab89909c5ca853cd3a832c27b6ac4e363947b0dd29bef",
"network": {},
"status": "running",
"uptime": "yesterday"}]
[]
Fields descriptions:
@ -256,35 +240,6 @@ Fields descriptions:
* **pod_name**: Pod name (only with Podman) (unit is *None*)
* **pod_id**: Pod ID (only with Podman) (unit is *None*)
Get a specific field::
# curl http://localhost:61208/api/4/containers/name
{"name": ["act-test-test-freebsd-700362a4fd49fe930f7ab89909c5ca853cd3a832c27b6ac4e363947b0dd29bef"]}
Get a specific item when field matches the given value::
# curl http://localhost:61208/api/4/containers/name/act-test-test-freebsd-700362a4fd49fe930f7ab89909c5ca853cd3a832c27b6ac4e363947b0dd29bef
{"act-test-test-freebsd-700362a4fd49fe930f7ab89909c5ca853cd3a832c27b6ac4e363947b0dd29bef": [{"command": "tail "
"-f "
"/dev/null",
"cpu": {"total": 0.0},
"cpu_percent": 0.0,
"created": "2024-05-06T08:20:31.859934699Z",
"engine": "docker",
"id": "f8d78b334f789955ab6dd0739c0bbb7e26ae3f24ed9e42c4a0d218a30377d2c8",
"image": ["catthehacker/ubuntu:act-22.04"],
"io": {"cumulative_ior": 100855808,
"cumulative_iow": 0},
"key": "name",
"memory": {"inactive_file": 1704656896,
"limit": 16422473728,
"usage": 1984933888},
"memory_usage": 1984933888,
"name": "act-test-test-freebsd-700362a4fd49fe930f7ab89909c5ca853cd3a832c27b6ac4e363947b0dd29bef",
"network": {},
"status": "running",
"uptime": "yesterday"}]}
GET core
--------
@ -310,19 +265,19 @@ Get plugin stats::
# curl http://localhost:61208/api/4/cpu
{"cpucore": 16,
"ctx_switches": 100084028,
"ctx_switches": 493080568,
"guest": 0.0,
"idle": 3.0,
"interrupts": 99754405,
"iowait": 0.0,
"idle": 86.5,
"interrupts": 420997918,
"iowait": 0.5,
"irq": 0.0,
"nice": 0.0,
"soft_interrupts": 30928307,
"soft_interrupts": 155707720,
"steal": 0.0,
"syscalls": 0,
"system": 1.0,
"total": 25.0,
"user": 0.0}
"system": 3.5,
"total": 12.9,
"user": 9.4}
Fields descriptions:
@ -355,7 +310,7 @@ Fields descriptions:
Get a specific field::
# curl http://localhost:61208/api/4/cpu/total
{"total": 25.0}
{"total": 12.9}
GET diskio
----------
@ -365,14 +320,14 @@ Get plugin stats::
# curl http://localhost:61208/api/4/diskio
[{"disk_name": "nvme0n1",
"key": "disk_name",
"read_bytes": 5494942208,
"read_count": 276248,
"write_bytes": 12631118848,
"write_count": 434825},
"read_bytes": 10167291392,
"read_count": 391026,
"write_bytes": 31230641152,
"write_count": 1527146},
{"disk_name": "nvme0n1p1",
"key": "disk_name",
"read_bytes": 7484416,
"read_count": 592,
"read_bytes": 7558144,
"read_count": 605,
"write_bytes": 1024,
"write_count": 2}]
@ -408,10 +363,10 @@ Get a specific item when field matches the given value::
# curl http://localhost:61208/api/4/diskio/disk_name/nvme0n1
{"nvme0n1": [{"disk_name": "nvme0n1",
"key": "disk_name",
"read_bytes": 5494942208,
"read_count": 276248,
"write_bytes": 12631118848,
"write_count": 434825}]}
"read_bytes": 10167291392,
"read_count": 391026,
"write_bytes": 31230641152,
"write_count": 1527146}]}
GET folders
-----------
@ -438,13 +393,13 @@ Get plugin stats::
# curl http://localhost:61208/api/4/fs
[{"device_name": "/dev/mapper/ubuntu--vg-ubuntu--lv",
"free": 906483245056,
"free": 897378041856,
"fs_type": "ext4",
"key": "mnt_point",
"mnt_point": "/",
"percent": 4.8,
"percent": 5.8,
"size": 1003736440832,
"used": 46190690304}]
"used": 55295893504}]
Fields descriptions:
@ -465,13 +420,13 @@ Get a specific item when field matches the given value::
# curl http://localhost:61208/api/4/fs/mnt_point//
{"/": [{"device_name": "/dev/mapper/ubuntu--vg-ubuntu--lv",
"free": 906483245056,
"free": 897378041856,
"fs_type": "ext4",
"key": "mnt_point",
"mnt_point": "/",
"percent": 4.8,
"percent": 5.8,
"size": 1003736440832,
"used": 46190690304}]}
"used": 55295893504}]}
GET gpu
-------
@ -544,7 +499,10 @@ GET load
Get plugin stats::
# curl http://localhost:61208/api/4/load
{"cpucore": 16, "min1": 0.52685546875, "min15": 1.25390625, "min5": 1.544921875}
{"cpucore": 16,
"min1": 0.40185546875,
"min15": 0.587890625,
"min5": 0.638671875}
Fields descriptions:
@ -556,7 +514,7 @@ Fields descriptions:
Get a specific field::
# curl http://localhost:61208/api/4/load/min1
{"min1": 0.52685546875}
{"min1": 0.40185546875}
GET mem
-------
@ -564,16 +522,16 @@ GET mem
Get plugin stats::
# curl http://localhost:61208/api/4/mem
{"active": 7115952128,
"available": 8999600128,
"buffers": 268685312,
"cached": 9109741568,
"free": 8999600128,
"inactive": 5429415936,
"percent": 45.2,
"shared": 1431875584,
"total": 16422473728,
"used": 7422873600}
{"active": 5094199296,
"available": 10908983296,
"buffers": 180162560,
"cached": 5800796160,
"free": 10908983296,
"inactive": 3735175168,
"percent": 33.6,
"shared": 622718976,
"total": 16422486016,
"used": 5513502720}
Fields descriptions:
@ -592,7 +550,7 @@ Fields descriptions:
Get a specific field::
# curl http://localhost:61208/api/4/mem/total
{"total": 16422473728}
{"total": 16422486016}
GET memswap
-----------
@ -600,13 +558,13 @@ GET memswap
Get plugin stats::
# curl http://localhost:61208/api/4/memswap
{"free": 4293652480,
"percent": 0.0,
"sin": 0,
"sout": 131072,
{"free": 3836997632,
"percent": 10.7,
"sin": 186925056,
"sout": 1518604288,
"time_since_update": 1,
"total": 4294963200,
"used": 1310720}
"used": 457965568}
Fields descriptions:
@ -631,15 +589,15 @@ Get plugin stats::
# curl http://localhost:61208/api/4/network
[{"alias": None,
"bytes_all": 0,
"bytes_all_gauge": 2153845053,
"bytes_all_gauge": 6286191015,
"bytes_recv": 0,
"bytes_recv_gauge": 2054902380,
"bytes_recv_gauge": 5977645732,
"bytes_sent": 0,
"bytes_sent_gauge": 98942673,
"bytes_sent_gauge": 308545283,
"interface_name": "wlp0s20f3",
"key": "interface_name",
"speed": 0,
"time_since_update": 0.37114882469177246}]
"time_since_update": 0.2501566410064697}]
Fields descriptions:
@ -668,15 +626,15 @@ Get a specific item when field matches the given value::
# curl http://localhost:61208/api/4/network/interface_name/wlp0s20f3
{"wlp0s20f3": [{"alias": None,
"bytes_all": 0,
"bytes_all_gauge": 2153845053,
"bytes_all_gauge": 6286191015,
"bytes_recv": 0,
"bytes_recv_gauge": 2054902380,
"bytes_recv_gauge": 5977645732,
"bytes_sent": 0,
"bytes_sent_gauge": 98942673,
"bytes_sent_gauge": 308545283,
"interface_name": "wlp0s20f3",
"key": "interface_name",
"speed": 0,
"time_since_update": 0.37114882469177246}]}
"time_since_update": 0.2501566410064697}]}
GET now
-------
@ -684,7 +642,7 @@ GET now
Get plugin stats::
# curl http://localhost:61208/api/4/now
{"custom": "2024-05-07 11:46:44 CEST", "iso": "2024-05-07T11:46:44+02:00"}
{"custom": "2024-06-29 19:17:41 CEST", "iso": "2024-06-29T19:17:41+02:00"}
Fields descriptions:
@ -694,7 +652,7 @@ Fields descriptions:
Get a specific field::
# curl http://localhost:61208/api/4/now/iso
{"iso": "2024-05-07T11:46:44+02:00"}
{"iso": "2024-06-29T19:17:41+02:00"}
GET percpu
----------
@ -705,7 +663,7 @@ Get plugin stats::
[{"cpu_number": 0,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 21.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -713,12 +671,12 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"total": 79.0,
"user": 1.0},
{"cpu_number": 1,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 1.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -726,7 +684,7 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 99.0,
"total": 77.0,
"user": 0.0}]
Fields descriptions:
@ -761,7 +719,7 @@ Get plugin stats::
"port": 0,
"refresh": 30,
"rtt_warning": None,
"status": 0.008626,
"status": 0.006275,
"timeout": 3}]
Fields descriptions:
@ -789,7 +747,7 @@ Get a specific item when field matches the given value::
"port": 0,
"refresh": 30,
"rtt_warning": None,
"status": 0.008626,
"status": 0.006275,
"timeout": 3}]}
GET processcount
@ -798,7 +756,7 @@ GET processcount
Get plugin stats::
# curl http://localhost:61208/api/4/processcount
{"pid_max": 0, "running": 1, "sleeping": 287, "thread": 1710, "total": 432}
{"pid_max": 0, "running": 2, "sleeping": 277, "thread": 1508, "total": 412}
Fields descriptions:
@ -811,7 +769,7 @@ Fields descriptions:
Get a specific field::
# curl http://localhost:61208/api/4/processcount/total
{"total": 432}
{"total": 412}
GET processlist
---------------
@ -819,7 +777,78 @@ GET processlist
Get plugin stats::
# curl http://localhost:61208/api/4/processlist
[]
[{"cmdline": ["/snap/firefox/4336/usr/lib/firefox/firefox"],
"cpu_percent": 0.0,
"cpu_times": {"children_system": 3.08,
"children_user": 3.33,
"iowait": 0.0,
"system": 13.75,
"user": 34.7},
"gids": {"effective": 1000, "real": 1000, "saved": 1000},
"io_counters": [393136128, 195969024, 0, 0, 0],
"key": "pid",
"memory_info": {"data": 681967616,
"dirty": 0,
"lib": 0,
"rss": 443969536,
"shared": 222199808,
"text": 987136,
"vms": 3721211904},
"memory_percent": 2.7034246554842674,
"name": "firefox",
"nice": 0,
"num_threads": 120,
"pid": 793506,
"status": "S",
"time_since_update": 1,
"username": "nicolargo"},
{"cmdline": ["/snap/firefox/4336/usr/lib/firefox/firefox",
"-contentproc",
"-childID",
"2",
"-isForBrowser",
"-prefsLen",
"28218",
"-prefMapSize",
"244440",
"-jsInitLen",
"231800",
"-parentBuildID",
"20240527194810",
"-greomni",
"/snap/firefox/4336/usr/lib/firefox/omni.ja",
"-appomni",
"/snap/firefox/4336/usr/lib/firefox/browser/omni.ja",
"-appDir",
"/snap/firefox/4336/usr/lib/firefox/browser",
"{bc853380-6b8f-46ad-afe0-9da5ba832e62}",
"793506",
"true",
"tab"],
"cpu_percent": 0.0,
"cpu_times": {"children_system": 0.0,
"children_user": 0.0,
"iowait": 0.0,
"system": 2.54,
"user": 19.25},
"gids": {"effective": 1000, "real": 1000, "saved": 1000},
"io_counters": [1827840, 0, 0, 0, 0],
"key": "pid",
"memory_info": {"data": 412688384,
"dirty": 0,
"lib": 0,
"rss": 440922112,
"shared": 111878144,
"text": 987136,
"vms": 2946224128},
"memory_percent": 2.684868244493684,
"name": "Isolated Web Co",
"nice": 0,
"num_threads": 28,
"pid": 793778,
"status": "S",
"time_since_update": 1,
"username": "nicolargo"}]
Fields descriptions:
@ -843,7 +872,7 @@ GET psutilversion
Get plugin stats::
# curl http://localhost:61208/api/4/psutilversion
"5.9.8"
"6.0.0"
GET quicklook
-------------
@ -851,18 +880,18 @@ GET quicklook
Get plugin stats::
# curl http://localhost:61208/api/4/quicklook
{"cpu": 25.0,
{"cpu": 12.9,
"cpu_hz": 4475000000.0,
"cpu_hz_current": 1527240625.0000005,
"cpu_hz_current": 1666410624.9999998,
"cpu_log_core": 16,
"cpu_name": "13th Gen Intel(R) Core(TM) i7-13620H",
"cpu_phys_core": 10,
"load": 7.8,
"mem": 45.2,
"load": 3.7,
"mem": 33.6,
"percpu": [{"cpu_number": 0,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 21.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -870,12 +899,12 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"total": 79.0,
"user": 1.0},
{"cpu_number": 1,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 1.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -883,25 +912,25 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 99.0,
"total": 77.0,
"user": 0.0},
{"cpu_number": 2,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 18.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"system": 1.0,
"total": 82.0,
"user": 5.0},
{"cpu_number": 3,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 22.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -909,25 +938,25 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 78.0,
"user": 0.0},
{"cpu_number": 4,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 8.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"system": 1.0,
"total": 92.0,
"user": 14.0},
{"cpu_number": 5,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -935,64 +964,64 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 77.0,
"user": 0.0},
{"cpu_number": 6,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"iowait": 0.0,
"idle": 3.0,
"iowait": 1.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"system": 4.0,
"total": 97.0,
"user": 15.0},
{"cpu_number": 7,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"system": 1.0,
"total": 77.0,
"user": 0.0},
{"cpu_number": 8,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 19.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"system": 1.0,
"total": 81.0,
"user": 3.0},
{"cpu_number": 9,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 1.0,
"iowait": 0.0,
"idle": 22.0,
"iowait": 1.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 99.0,
"total": 78.0,
"user": 0.0},
{"cpu_number": 10,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 22.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -1000,12 +1029,12 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 78.0,
"user": 0.0},
{"cpu_number": 11,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -1013,25 +1042,25 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 77.0,
"user": 0.0},
{"cpu_number": 12,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 21.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
"nice": 0.0,
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"user": 0.0},
"system": 1.0,
"total": 79.0,
"user": 1.0},
{"cpu_number": 13,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 23.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -1039,12 +1068,12 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 77.0,
"user": 0.0},
{"cpu_number": 14,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 0.0,
"idle": 22.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -1052,12 +1081,12 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 100.0,
"total": 78.0,
"user": 0.0},
{"cpu_number": 15,
"guest": 0.0,
"guest_nice": 0.0,
"idle": 1.0,
"idle": 22.0,
"iowait": 0.0,
"irq": 0.0,
"key": "cpu_number",
@ -1065,9 +1094,9 @@ Get plugin stats::
"softirq": 0.0,
"steal": 0.0,
"system": 0.0,
"total": 99.0,
"total": 78.0,
"user": 0.0}],
"swap": 0.0}
"swap": 10.7}
Fields descriptions:
@ -1105,14 +1134,14 @@ Get plugin stats::
"label": "Ambient",
"type": "temperature_core",
"unit": "C",
"value": 35,
"value": 39,
"warning": 0},
{"critical": None,
"key": "label",
"label": "Ambient 3",
"type": "temperature_core",
"unit": "C",
"value": 29,
"value": 32,
"warning": 0}]
Fields descriptions:
@ -1173,7 +1202,7 @@ Get a specific item when field matches the given value::
"label": "Ambient",
"type": "temperature_core",
"unit": "C",
"value": 35,
"value": 39,
"warning": 0}]}
GET smart
@ -1204,7 +1233,7 @@ Fields descriptions:
* **platform**: Platform (32 or 64 bits) (unit is *None*)
* **linux_distro**: Linux distribution (unit is *None*)
* **os_version**: Operating system version (unit is *None*)
* **hr_name**: Human readable operating sytem name (unit is *None*)
* **hr_name**: Human readable operating system name (unit is *None*)
Get a specific field::
@ -1217,7 +1246,7 @@ GET uptime
Get plugin stats::
# curl http://localhost:61208/api/4/uptime
"1 day, 2:23:04"
"20 days, 2:20:27"
GET version
-----------
@ -1225,7 +1254,7 @@ GET version
Get plugin stats::
# curl http://localhost:61208/api/4/version
"4.0.0_rc02"
"4.2.0_beta01"
GET wifi
--------
@ -1234,8 +1263,8 @@ Get plugin stats::
# curl http://localhost:61208/api/4/wifi
[{"key": "ssid",
"quality_level": -61.0,
"quality_link": 49.0,
"quality_level": -59.0,
"quality_link": 51.0,
"ssid": "wlp0s20f3"}]
Get a specific field::
@ -1247,8 +1276,8 @@ Get a specific item when field matches the given value::
# curl http://localhost:61208/api/4/wifi/ssid/wlp0s20f3
{"wlp0s20f3": [{"key": "ssid",
"quality_level": -61.0,
"quality_link": 49.0,
"quality_level": -59.0,
"quality_link": 51.0,
"ssid": "wlp0s20f3"}]}
GET all stats
@ -1293,34 +1322,34 @@ GET stats history
History of a plugin::
# curl http://localhost:61208/api/4/cpu/history
{"system": [["2024-05-07T11:46:45.322880", 1.0],
["2024-05-07T11:46:46.376121", 0.0],
["2024-05-07T11:46:47.453787", 0.0]],
"user": [["2024-05-07T11:46:45.322868", 0.0],
["2024-05-07T11:46:46.376117", 0.0],
["2024-05-07T11:46:47.453775", 0.0]]}
{"system": [["2024-06-29T19:17:42.396270", 3.5],
["2024-06-29T19:17:43.447397", 0.7],
["2024-06-29T19:17:44.455830", 0.7]],
"user": [["2024-06-29T19:17:42.396268", 9.4],
["2024-06-29T19:17:43.447396", 4.0],
["2024-06-29T19:17:44.455825", 4.0]]}
Limit history to last 2 values::
# curl http://localhost:61208/api/4/cpu/history/2
{"system": [["2024-05-07T11:46:46.376121", 0.0],
["2024-05-07T11:46:47.453787", 0.0]],
"user": [["2024-05-07T11:46:46.376117", 0.0],
["2024-05-07T11:46:47.453775", 0.0]]}
{"system": [["2024-06-29T19:17:43.447397", 0.7],
["2024-06-29T19:17:44.455830", 0.7]],
"user": [["2024-06-29T19:17:43.447396", 4.0],
["2024-06-29T19:17:44.455825", 4.0]]}
History for a specific field::
# curl http://localhost:61208/api/4/cpu/system/history
{"system": [["2024-05-07T11:46:44.106241", 1.0],
["2024-05-07T11:46:45.322880", 1.0],
["2024-05-07T11:46:46.376121", 0.0],
["2024-05-07T11:46:47.453787", 0.0]]}
{"system": [["2024-06-29T19:17:41.318736", 3.5],
["2024-06-29T19:17:42.396270", 3.5],
["2024-06-29T19:17:43.447397", 0.7],
["2024-06-29T19:17:44.455830", 0.7]]}
Limit history for a specific field to last 2 values::
# curl http://localhost:61208/api/4/cpu/system/history
{"system": [["2024-05-07T11:46:46.376121", 0.0],
["2024-05-07T11:46:47.453787", 0.0]]}
{"system": [["2024-06-29T19:17:43.447397", 0.7],
["2024-06-29T19:17:44.455830", 0.7]]}
GET limits (used for thresholds)
--------------------------------
@ -1405,6 +1434,8 @@ All limits/thresholds::
"network": {"history_size": 1200.0,
"network_disable": ["False"],
"network_hide": ["docker.*", "lo"],
"network_hide_no_ip": ["True"],
"network_hide_no_up": ["True"],
"network_rx_careful": 70.0,
"network_rx_critical": 90.0,
"network_rx_warning": 80.0,
@ -1494,7 +1525,6 @@ All limits/thresholds::
"quicklook_swap_careful": 50.0,
"quicklook_swap_critical": 90.0,
"quicklook_swap_warning": 70.0},
"raid": {"history_size": 1200.0, "raid_disable": ["False"]},
"sensors": {"history_size": 1200.0,
"sensors_battery_careful": 80.0,
"sensors_battery_critical": 95.0,

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# Glances documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 1 10:53:59 2016.
@ -12,8 +11,8 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sys
from datetime import datetime
# If extensions (or modules to document with autodoc) are in another directory,
@ -27,7 +26,6 @@ sys.path.insert(0, os.path.abspath('..'))
# WARNING: Do not move this import before the sys.path.insert() call.
from glances import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
@ -125,8 +123,7 @@ html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
}
html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
@ -166,14 +163,7 @@ html_static_path = ['_static']
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'links.html',
'searchbox.html'
]
}
html_sidebars = {'**': ['about.html', 'navigation.html', 'links.html', 'searchbox.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
@ -227,13 +217,10 @@ htmlhelp_basename = 'Glancesdoc'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
@ -242,8 +229,7 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Glances.tex', 'Glances Documentation',
'Nicolas Hennion', 'manual'),
(master_doc, 'Glances.tex', 'Glances Documentation', 'Nicolas Hennion', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@ -271,10 +257,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('glances', 'glances', 'An eye on your system',
'', 1)
]
man_pages = [('glances', 'glances', 'An eye on your system', '', 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
@ -286,9 +269,15 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Glances', 'Glances Documentation',
author, 'Glances', 'One line description of project.',
'Miscellaneous'),
(
master_doc,
'Glances',
'Glances Documentation',
author,
'Glances',
'One line description of project.',
'Miscellaneous',
),
]
# Documents to append as an appendix to all manuals.

View File

@ -21,6 +21,7 @@ You can place your ``glances.conf`` file in the following locations:
``*BSD`` ~/.config/glances/, /usr/local/etc/glances/, /usr/share/docs/glances/
``macOS`` ~/.config/glances/, ~/Library/Application Support/glances/, /usr/local/etc/glances/, /usr/share/docs/glances/
``Windows`` %APPDATA%\\glances\\glances.conf
``All`` + <venv_root_folder>/share/doc/glances/
==================== =============================================================
- On Windows XP, ``%APPDATA%`` is: ``C:\Documents and Settings\<USERNAME>\Application Data``.
@ -59,17 +60,41 @@ than a second one concerning the user interface:
.. code-block:: ini
[outputs]
# Options for all UIs
#--------------------
# Separator in the Curses and WebUI interface (between top and others plugins)
separator=True
# Set the the Curses and WebUI interface left menu plugin list (comma-separated)
#left_menu=network,wifi,connections,ports,diskio,fs,irq,folders,raid,smart,sensors,now
# Limit the number of processes to display (for the WebUI)
# Limit the number of processes to display (in the WebUI)
max_processes_display=25
# Set the URL prefix (for the WebUI and the API)
# Options for WebUI
#------------------
# Set URL prefix for the WebUI and the API
# Example: url_prefix=/glances/ => http://localhost/glances/
# The final / is mandatory
# Note: The final / is mandatory
# Default is no prefix (/)
#url_prefix=/glances/
# Set root path for WebUI statics files
# Why ? On Debian system, WebUI statics files are not provided.
# You can download it in a specific folder
# thanks to https://github.com/nicolargo/glances/issues/2021
# then configure this folder with the webui_root_path key
# Default is folder where glances_restfull_api.py is hosted
#webui_root_path=
# CORS options
# Comma separated list of origins that should be permitted to make cross-origin requests.
# Default is *
#cors_origins=*
# Indicate that cookies should be supported for cross-origin requests.
# Default is True
#cors_credentials=True
# Comma separated list of HTTP methods that should be allowed for cross-origin requests.
# Default is *
#cors_methods=*
# Comma separated list of HTTP request headers that should be supported for cross-origin requests.
# Default is *
#cors_headers=*
Each plugin, export module, and application monitoring process (AMP) can
have a section. Below is an example for the CPU plugin:

View File

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "GLANCES" "1" "May 07, 2024" "4.0.0_rc02" "Glances"
.TH "GLANCES" "1" "Jun 29, 2024" "4.2.0_beta01" "Glances"
.SH NAME
glances \- An eye on your system
.SH SYNOPSIS
@ -585,6 +585,15 @@ T} T{
%APPDATA%\eglances\eglances.conf
T}
_
T{
\fBAll\fP
T} T{
.INDENT 0.0
.IP \(bu 2
<venv_root_folder>/share/doc/glances/
.UNINDENT
T}
_
.TE
.INDENT 0.0
.IP \(bu 2
@ -632,17 +641,41 @@ than a second one concerning the user interface:
.nf
.ft C
[outputs]
# Options for all UIs
#\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-
# Separator in the Curses and WebUI interface (between top and others plugins)
separator=True
# Set the the Curses and WebUI interface left menu plugin list (comma\-separated)
#left_menu=network,wifi,connections,ports,diskio,fs,irq,folders,raid,smart,sensors,now
# Limit the number of processes to display (for the WebUI)
# Limit the number of processes to display (in the WebUI)
max_processes_display=25
# Set the URL prefix (for the WebUI and the API)
# Options for WebUI
#\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-
# Set URL prefix for the WebUI and the API
# Example: url_prefix=/glances/ => http://localhost/glances/
# The final / is mandatory
# Note: The final / is mandatory
# Default is no prefix (/)
#url_prefix=/glances/
# Set root path for WebUI statics files
# Why ? On Debian system, WebUI statics files are not provided.
# You can download it in a specific folder
# thanks to https://github.com/nicolargo/glances/issues/2021
# then configure this folder with the webui_root_path key
# Default is folder where glances_restfull_api.py is hosted
#webui_root_path=
# CORS options
# Comma separated list of origins that should be permitted to make cross\-origin requests.
# Default is *
#cors_origins=*
# Indicate that cookies should be supported for cross\-origin requests.
# Default is True
#cors_credentials=True
# Comma separated list of HTTP methods that should be allowed for cross\-origin requests.
# Default is *
#cors_methods=*
# Comma separated list of HTTP request headers that should be supported for cross\-origin requests.
# Default is *
#cors_headers=*
.ft P
.fi
.UNINDENT

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -11,16 +10,16 @@
"""Init the Glances software."""
# Import system libs
import tracemalloc
import locale
import platform
import signal
import sys
import tracemalloc
# Global name
# Version should start and end with a numerical char
# See https://packaging.python.org/specifications/core-metadata/#version
__version__ = '4.0.0_rc02'
__version__ = '4.2.0_beta01'
__apiversion__ = '4'
__author__ = 'Nicolas Hennion <nicolas@nicolargo.com>'
__license__ = 'LGPLv3'
@ -44,11 +43,6 @@ try:
except locale.Error:
print("Warning: Unable to set locale. Expect encoding problems.")
# Check Python version
if sys.version_info < (3, 4):
print('Glances requires at least Python 3.4 to run.')
sys.exit(1)
# Check psutil version
psutil_min_version = (5, 3, 0)
psutil_version_info = tuple([int(num) for num in psutil_version.split('.')])
@ -56,11 +50,12 @@ if psutil_version_info < psutil_min_version:
print('psutil 5.3.0 or higher is needed. Glances cannot start.')
sys.exit(1)
# Trac malloc is only available on Python 3.4 or higher
def __signal_handler(signal, frame):
logger.debug("Signal {} catched".format(signal))
logger.debug(f"Signal {signal} caught")
end()
@ -103,20 +98,16 @@ def start(config, args):
from glances.webserver import GlancesWebServer as GlancesMode
# Init the mode
logger.info("Start {} mode".format(GlancesMode.__name__))
logger.info(f"Start {GlancesMode.__name__} mode")
mode = GlancesMode(config=config, args=args)
# Start the main loop
logger.debug("Glances started in {} seconds".format(start_duration.get()))
logger.debug(f"Glances started in {start_duration.get()} seconds")
if args.stop_after:
logger.info('Glances will be stopped in ~{} seconds'.format(args.stop_after * args.time))
logger.info(f'Glances will be stopped in ~{args.stop_after * args.time} seconds')
if args.memory_leak:
print(
'Memory leak detection, please wait ~{} seconds...'.format(
args.stop_after * args.time * args.memory_leak * 2
)
)
print(f'Memory leak detection, please wait ~{args.stop_after * args.time * args.memory_leak * 2} seconds...')
# First run without dump to fill the memory
mode.serve_n(args.stop_after)
# Then start the memory-leak loop
@ -133,7 +124,7 @@ def start(config, args):
snapshot_end = tracemalloc.take_snapshot()
snapshot_diff = snapshot_end.compare_to(snapshot_begin, 'filename')
memory_leak = sum([s.size_diff for s in snapshot_diff])
print("Memory consumption: {0:.1f}KB (see log for details)".format(memory_leak / 1000))
print(f"Memory consumption: {memory_leak / 1000:.1f}KB (see log for details)")
logger.info("Memory consumption (top 5):")
for stat in snapshot_diff[:5]:
logger.info(stat)
@ -165,12 +156,10 @@ def main():
signal.signal(sig, __signal_handler)
# Log Glances and psutil version
logger.info('Start Glances {}'.format(__version__))
logger.info(
'{} {} ({}) and psutil {} detected'.format(
platform.python_implementation(), platform.python_version(), sys.executable, psutil_version
)
)
logger.info(f'Start Glances {__version__}')
python_impl = platform.python_implementation()
python_ver = platform.python_version()
logger.info(f'{python_impl} {python_ver} ({sys.executable}) and psutil {psutil_version} detected')
# Share global var
global core

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# Glances - An eye on your system
#

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -10,8 +9,8 @@
"""Manage on alert actions."""
from glances.logger import logger
from glances.timer import Timer
from glances.secure import secure_popen
from glances.timer import Timer
try:
import chevron
@ -22,7 +21,7 @@ else:
chevron_tag = True
class GlancesActions(object):
class GlancesActions:
"""This class manage action if an alert is reached."""
def __init__(self, args=None):
@ -80,13 +79,13 @@ class GlancesActions(object):
else:
cmd_full = cmd
# Execute the action
logger.info("Action triggered for {} ({}): {}".format(stat_name, criticality, cmd_full))
logger.info(f"Action triggered for {stat_name} ({criticality}): {cmd_full}")
try:
ret = secure_popen(cmd_full)
except OSError as e:
logger.error("Action error for {} ({}): {}".format(stat_name, criticality, e))
logger.error(f"Action error for {stat_name} ({criticality}): {e}")
else:
logger.debug("Action result for {} ({}): {}".format(stat_name, criticality, ret))
logger.debug(f"Action result for {stat_name} ({criticality}): {ret}")
self.set(stat_name, criticality)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -23,11 +22,11 @@ If the *one_line* var is true then the AMP will be displayed in one line.
"""
from glances.globals import u
from glances.timer import Timer
from glances.logger import logger
from glances.timer import Timer
class GlancesAmp(object):
class GlancesAmp:
"""Main class for Glances AMP."""
NAME = '?'
@ -38,7 +37,7 @@ class GlancesAmp(object):
def __init__(self, name=None, args=None):
"""Init AMP class."""
logger.debug("AMP - Init {} version {}".format(self.NAME, self.VERSION))
logger.debug(f"AMP - Init {self.NAME} version {self.VERSION}")
# AMP name (= module name without glances_)
if name is None:
@ -74,7 +73,7 @@ class GlancesAmp(object):
amp_section = 'amp_' + self.amp_name
if hasattr(config, 'has_section') and config.has_section(amp_section):
logger.debug("AMP - {}: Load configuration".format(self.NAME))
logger.debug(f"AMP - {self.NAME}: Load configuration")
for param, _ in config.items(amp_section):
try:
self.configs[param] = config.get_float_value(amp_section, param)
@ -82,9 +81,9 @@ class GlancesAmp(object):
self.configs[param] = config.get_value(amp_section, param).split(',')
if len(self.configs[param]) == 1:
self.configs[param] = self.configs[param][0]
logger.debug("AMP - {}: Load parameter: {} = {}".format(self.NAME, param, self.configs[param]))
logger.debug(f"AMP - {self.NAME}: Load parameter: {param} = {self.configs[param]}")
else:
logger.debug("AMP - {}: Can not find section {} in the configuration file".format(self.NAME, self.amp_name))
logger.debug(f"AMP - {self.NAME}: Can not find section {self.amp_name} in the configuration file")
return False
if self.enable():
@ -92,13 +91,12 @@ class GlancesAmp(object):
for k in ['refresh']:
if k not in self.configs:
logger.warning(
"AMP - {}: Can not find configuration key {} in section {} (the AMP will be disabled)".format(
self.NAME, k, self.amp_name
)
f"AMP - {self.NAME}: Can not find configuration key {k} in section {self.amp_name} "
f"(the AMP will be disabled)"
)
self.configs['enable'] = 'false'
else:
logger.debug("AMP - {} is disabled".format(self.NAME))
logger.debug(f"AMP - {self.NAME} is disabled")
# Init the count to 0
self.configs['count'] = 0
@ -109,16 +107,14 @@ class GlancesAmp(object):
"""Generic method to get the item in the AMP configuration"""
if key in self.configs:
return self.configs[key]
else:
return None
return None
def enable(self):
"""Return True|False if the AMP is enabled in the configuration file (enable=true|false)."""
ret = self.get('enable')
if ret is None:
return False
else:
return ret.lower().startswith('true')
return ret.lower().startswith('true')
def regex(self):
"""Return regular expression used to identified the current application."""
@ -133,8 +129,7 @@ class GlancesAmp(object):
ret = self.get('one_line')
if ret is None:
return False
else:
return ret.lower().startswith('true')
return ret.lower().startswith('true')
def time_until_refresh(self):
"""Return time in seconds until refresh."""
@ -193,5 +188,4 @@ class GlancesAmp(object):
# Call the children update method
if self.should_update():
return self.update(process_list)
else:
return self.result()
return self.result()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -25,11 +24,9 @@ one_line=false
command=foo status
"""
from subprocess import check_output, STDOUT, CalledProcessError
from glances.globals import u, to_ascii
from glances.logger import logger
from glances.amps.amp import GlancesAmp
from glances.logger import logger
from glances.secure import secure_popen
class Amp(GlancesAmp):
@ -44,7 +41,7 @@ class Amp(GlancesAmp):
def __init__(self, name=None, args=None):
"""Init the AMP."""
self.NAME = name.capitalize()
super(Amp, self).__init__(name=name, args=args)
super().__init__(name=name, args=args)
def update(self, process_list):
"""Update the AMP"""
@ -54,7 +51,7 @@ class Amp(GlancesAmp):
try:
res = self.get('command')
except OSError as e:
logger.debug('{}: Error while executing command ({})'.format(self.NAME, e))
logger.debug(f'{self.NAME}: Error while executing command ({e})')
return self.result()
# No command found, use default message
if res is None:
@ -69,10 +66,7 @@ class Amp(GlancesAmp):
# Run command(s)
# Comma separated commands can be executed
try:
msg = ''
for cmd in res.split(';'):
msg += u(check_output(cmd.split(), stderr=STDOUT))
self.set_result(to_ascii(msg.rstrip()))
except CalledProcessError as e:
self.set_result(secure_popen(res).rstrip())
except Exception as e:
self.set_result(e.output)
return self.result()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -46,8 +45,8 @@ status_url=http://localhost/nginx_status
import requests
from glances.logger import logger
from glances.amps.amp import GlancesAmp
from glances.logger import logger
class Amp(GlancesAmp):

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -35,11 +34,11 @@ one_line=true
systemctl_cmd=/usr/bin/systemctl --plain
"""
from subprocess import check_output, CalledProcessError
from subprocess import CalledProcessError, check_output
from glances.logger import logger
from glances.globals import iteritems, to_ascii
from glances.amps.amp import GlancesAmp
from glances.globals import iteritems, to_ascii
from glances.logger import logger
class Amp(GlancesAmp):
@ -62,7 +61,7 @@ class Amp(GlancesAmp):
try:
res = check_output(self.get('systemctl_cmd').split())
except (OSError, CalledProcessError) as e:
logger.debug('{}: Error while executing systemctl ({})'.format(self.NAME, e))
logger.debug(f'{self.NAME}: Error while executing systemctl ({e})')
else:
status = {}
# For each line
@ -79,7 +78,7 @@ class Amp(GlancesAmp):
# Build the output (string) message
output = 'Services\n'
for k, v in iteritems(status):
output += '{}: {}\n'.format(k, v)
output += f'{k}: {v}\n'
self.set_result(output, separator=' ')
return self.result()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -34,11 +33,10 @@ one_line=true
service_cmd=/usr/bin/service --status-all
"""
from subprocess import check_output, STDOUT
from glances.logger import logger
from glances.globals import iteritems
from glances.amps.amp import GlancesAmp
from glances.globals import iteritems
from glances.logger import logger
from glances.secure import secure_popen
class Amp(GlancesAmp):
@ -59,9 +57,10 @@ class Amp(GlancesAmp):
# Get the systemctl status
logger.debug('{}: Update stats using service {}'.format(self.NAME, self.get('service_cmd')))
try:
res = check_output(self.get('service_cmd').split(), stderr=STDOUT).decode('utf-8')
except OSError as e:
logger.debug('{}: Error while executing service ({})'.format(self.NAME, e))
# res = check_output(self.get('service_cmd').split(), stderr=STDOUT).decode('utf-8')
res = secure_popen(self.get('service_cmd'))
except Exception as e:
logger.debug(f'{self.NAME}: Error while executing service ({e})')
else:
status = {'running': 0, 'stopped': 0, 'upstart': 0}
# For each line
@ -79,7 +78,7 @@ class Amp(GlancesAmp):
# Build the output (string) message
output = 'Services\n'
for k, v in iteritems(status):
output += '{}: {}\n'.format(k, v)
output += f'{k}: {v}\n'
self.set_result(output, separator=' ')
return self.result()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -13,12 +12,12 @@ import os
import re
import threading
from glances.globals import listkeys, iteritems, amps_path
from glances.globals import amps_path, iteritems, listkeys
from glances.logger import logger
from glances.processes import glances_processes
class AmpsList(object):
class AmpsList:
"""This class describes the optional application monitoring process list.
The AMP list is a list of processes with a specific monitoring action.
@ -57,9 +56,9 @@ class AmpsList(object):
try:
amp = __import__(os.path.basename(amp_module))
except ImportError as e:
logger.warning("Missing Python Lib ({}), cannot load AMP {}".format(e, amp_name))
logger.warning(f"Missing Python Lib ({e}), cannot load AMP {amp_name}")
except Exception as e:
logger.warning("Cannot load AMP {} ({})".format(amp_name, e))
logger.warning(f"Cannot load AMP {amp_name} ({e})")
else:
# Add the AMP to the dictionary
# The key is the AMP name
@ -69,7 +68,7 @@ class AmpsList(object):
# Load the AMP configuration
self.__amps_dict[amp_name].load_config(self.config)
# Log AMPs list
logger.debug("AMPs list: {}".format(self.getList()))
logger.debug(f"AMPs list: {self.getList()}")
return True
@ -108,7 +107,7 @@ class AmpsList(object):
if len(amps_list) > 0:
# At least one process is matching the regex
logger.debug("AMPS: {} processes {} detected ({})".format(len(amps_list), k, amps_list))
logger.debug(f"AMPS: {len(amps_list)} processes {k} detected ({amps_list})")
# Call the AMP update method
thread = threading.Thread(target=v.update_wrapper, args=[amps_list])
thread.start()
@ -140,7 +139,7 @@ class AmpsList(object):
)
except (TypeError, KeyError) as e:
logger.debug("Can not build AMPS list ({})".format(e))
logger.debug(f"Can not build AMPS list ({e})")
return ret

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,7 +11,7 @@
from datetime import datetime
class GlancesAttribute(object):
class GlancesAttribute:
def __init__(self, name, description='', history_max_size=None):
"""Init the attribute
@ -66,8 +65,7 @@ class GlancesAttribute(object):
def value(self):
if self.history_len() > 0:
return (self._value[1] - self.history_value()[1]) / (self._value[0] - self.history_value()[0])
else:
return None
return None
@value.setter
def value(self, new_value):

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -16,7 +15,8 @@ from glances.globals import BSD
from glances.logger import logger
try:
from zeroconf import __version__ as __zeroconf_version, ServiceBrowser, ServiceInfo, Zeroconf
from zeroconf import ServiceBrowser, ServiceInfo, Zeroconf
from zeroconf import __version__ as __zeroconf_version
zeroconf_tag = True
except ImportError:
@ -26,7 +26,7 @@ except ImportError:
if zeroconf_tag:
zeroconf_min_version = (0, 17, 0)
zeroconf_version = tuple([int(num) for num in __zeroconf_version.split('.')])
logger.debug("Zeroconf version {} detected.".format(__zeroconf_version))
logger.debug(f"Zeroconf version {__zeroconf_version} detected.")
if zeroconf_version < zeroconf_min_version:
logger.critical("Please install zeroconf 0.17 or higher.")
sys.exit(1)
@ -34,10 +34,10 @@ if zeroconf_tag:
# Global var
# Recent versions of the zeroconf python package doesn't like a zeroconf type that ends with '._tcp.'.
# Correct issue: zeroconf problem with zeroconf_type = "_%s._tcp." % 'glances' #888
zeroconf_type = "_%s._tcp.local." % 'glances'
zeroconf_type = "_{}._tcp.local.".format('glances')
class AutoDiscovered(object):
class AutoDiscovered:
"""Class to manage the auto discovered servers dict."""
def __init__(self):
@ -66,7 +66,7 @@ class AutoDiscovered(object):
'type': 'DYNAMIC',
} # Server type: 'STATIC' or 'DYNAMIC'
self._server_list.append(new_server)
logger.debug("Updated servers list (%s servers): %s" % (len(self._server_list), self._server_list))
logger.debug(f"Updated servers list ({len(self._server_list)} servers): {self._server_list}")
def remove_server(self, name):
"""Remove a server from the dict."""
@ -74,13 +74,13 @@ class AutoDiscovered(object):
if i['key'] == name:
try:
self._server_list.remove(i)
logger.debug("Remove server %s from the list" % name)
logger.debug("Updated servers list (%s servers): %s" % (len(self._server_list), self._server_list))
logger.debug(f"Remove server {name} from the list")
logger.debug(f"Updated servers list ({len(self._server_list)} servers): {self._server_list}")
except ValueError:
logger.error("Cannot remove server %s from the list" % name)
logger.error(f"Cannot remove server {name} from the list")
class GlancesAutoDiscoverListener(object):
class GlancesAutoDiscoverListener:
"""Zeroconf listener for Glances server."""
def __init__(self):
@ -104,7 +104,7 @@ class GlancesAutoDiscoverListener(object):
"""
if srv_type != zeroconf_type:
return False
logger.debug("Check new Zeroconf server: %s / %s" % (srv_type, srv_name))
logger.debug(f"Check new Zeroconf server: {srv_type} / {srv_name}")
info = zeroconf.get_service_info(srv_type, srv_name)
if info and (info.addresses or info.parsed_addresses):
address = info.addresses[0] if info.addresses else info.parsed_addresses[0]
@ -113,7 +113,7 @@ class GlancesAutoDiscoverListener(object):
# Add server to the global dict
self.servers.add_server(srv_name, new_server_ip, new_server_port)
logger.info("New Glances server detected (%s from %s:%s)" % (srv_name, new_server_ip, new_server_port))
logger.info(f"New Glances server detected ({srv_name} from {new_server_ip}:{new_server_port})")
else:
logger.warning("New Glances server detected, but failed to be get Zeroconf ServiceInfo ")
return True
@ -121,10 +121,10 @@ class GlancesAutoDiscoverListener(object):
def remove_service(self, zeroconf, srv_type, srv_name):
"""Remove the server from the list."""
self.servers.remove_server(srv_name)
logger.info("Glances server %s removed from the autodetect list" % srv_name)
logger.info(f"Glances server {srv_name} removed from the autodetect list")
class GlancesAutoDiscoverServer(object):
class GlancesAutoDiscoverServer:
"""Implementation of the Zeroconf protocol (server side for the Glances client)."""
def __init__(self, args=None):
@ -132,8 +132,8 @@ class GlancesAutoDiscoverServer(object):
logger.info("Init autodiscover mode (Zeroconf protocol)")
try:
self.zeroconf = Zeroconf()
except socket.error as e:
logger.error("Cannot start Zeroconf (%s)" % e)
except OSError as e:
logger.error(f"Cannot start Zeroconf ({e})")
self.zeroconf_enable_tag = False
else:
self.listener = GlancesAutoDiscoverListener()
@ -147,8 +147,7 @@ class GlancesAutoDiscoverServer(object):
"""Return the current server list (dict of dict)."""
if zeroconf_tag and self.zeroconf_enable_tag:
return self.listener.get_servers_list()
else:
return []
return []
def set_server(self, server_pos, key, value):
"""Set the key to the value for the server_pos (position in the list)."""
@ -160,7 +159,7 @@ class GlancesAutoDiscoverServer(object):
self.zeroconf.close()
class GlancesAutoDiscoverClient(object):
class GlancesAutoDiscoverClient:
"""Implementation of the zeroconf protocol (client side for the Glances server)."""
def __init__(self, hostname, args=None):
@ -168,8 +167,8 @@ class GlancesAutoDiscoverClient(object):
zeroconf_bind_address = args.bind_address
try:
self.zeroconf = Zeroconf()
except socket.error as e:
logger.error("Cannot start zeroconf: {}".format(e))
except OSError as e:
logger.error(f"Cannot start zeroconf: {e}")
# XXX *BSDs: Segmentation fault (core dumped)
# -- https://bitbucket.org/al45tair/netifaces/issues/15
@ -192,7 +191,7 @@ class GlancesAutoDiscoverClient(object):
try:
self.info = ServiceInfo(
zeroconf_type,
'{}:{}.{}'.format(hostname, args.port, zeroconf_type),
f'{hostname}:{args.port}.{zeroconf_type}',
address=socket.inet_pton(address_family, zeroconf_bind_address),
port=args.port,
weight=0,
@ -205,7 +204,7 @@ class GlancesAutoDiscoverClient(object):
# address (only one address) is replaced by addresses (list of addresses)
self.info = ServiceInfo(
zeroconf_type,
name='{}:{}.{}'.format(hostname, args.port, zeroconf_type),
name=f'{hostname}:{args.port}.{zeroconf_type}',
addresses=[socket.inet_pton(address_family, zeroconf_bind_address)],
port=args.port,
weight=0,
@ -216,9 +215,9 @@ class GlancesAutoDiscoverClient(object):
try:
self.zeroconf.register_service(self.info)
except Exception as e:
logger.error("Error while announcing Glances server: {}".format(e))
logger.error(f"Error while announcing Glances server: {e}")
else:
print("Announce the Glances server on the LAN (using {} IP address)".format(zeroconf_bind_address))
print(f"Announce the Glances server on the LAN (using {zeroconf_bind_address} IP address)")
else:
logger.error("Cannot announce Glances server on the network: zeroconf library not found.")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,16 +8,16 @@
"""Manage the Glances client."""
import ujson
import socket
import sys
import time
import orjson
from glances import __version__
from glances.globals import Fault, ProtocolError, ServerProxy, Transport
from glances.logger import logger
from glances.stats_client import GlancesStatsClient
from glances.outputs.glances_curses import GlancesCursesClient
from glances.stats_client import GlancesStatsClient
from glances.timer import Counter
@ -29,7 +28,7 @@ class GlancesClientTransport(Transport):
self.timeout = timeout
class GlancesClient(object):
class GlancesClient:
"""This class creates and manages the TCP client."""
def __init__(self, config=None, args=None, timeout=7, return_to_browser=False):
@ -48,10 +47,12 @@ class GlancesClient(object):
# Build the URI
if args.password != "":
self.uri = 'http://{}:{}@{}:{}'.format(args.username, args.password, args.client, args.port)
self.uri = f'http://{args.username}:{args.password}@{args.client}:{args.port}'
else:
self.uri = 'http://{}:{}'.format(args.client, args.port)
logger.debug("Try to connect to {}".format(self.uri))
self.uri = f'http://{args.client}:{args.port}'
# Avoid logging user credentials
logger.debug(f"Try to connect to 'http://{args.client}:{args.port}'")
# Try to connect to the URI
transport = GlancesClientTransport()
@ -60,7 +61,7 @@ class GlancesClient(object):
try:
self.client = ServerProxy(self.uri, transport=transport)
except Exception as e:
self.log_and_exit("Client couldn't create socket {}: {}".format(self.uri, e))
self.log_and_exit(f"Client couldn't create socket {self.uri}: {e}")
@property
def quiet(self):
@ -93,10 +94,10 @@ class GlancesClient(object):
client_version = None
try:
client_version = self.client.init()
except socket.error as err:
except OSError as err:
# Fallback to SNMP
self.client_mode = 'snmp'
logger.error("Connection to Glances server failed ({} {})".format(err.errno, err.strerror))
logger.error(f"Connection to Glances server failed ({err.errno} {err.strerror})")
fall_back_msg = 'No Glances server found. Trying fallback to SNMP...'
if not self.return_to_browser:
print(fall_back_msg)
@ -104,11 +105,11 @@ class GlancesClient(object):
logger.info(fall_back_msg)
except ProtocolError as err:
# Other errors
msg = "Connection to server {} failed".format(self.uri)
msg = f"Connection to server {self.uri} failed"
if err.errcode == 401:
msg += " (Bad username/password)"
else:
msg += " ({} {})".format(err.errcode, err.errmsg)
msg += f" ({err.errcode} {err.errmsg})"
self.log_and_exit(msg)
return False
@ -117,14 +118,12 @@ class GlancesClient(object):
if __version__.split('.')[0] == client_version.split('.')[0]:
# Init stats
self.stats = GlancesStatsClient(config=self.config, args=self.args)
self.stats.set_plugins(ujson.loads(self.client.getAllPlugins()))
logger.debug("Client version: {} / Server version: {}".format(__version__, client_version))
self.stats.set_plugins(orjson.loads(self.client.getAllPlugins()))
logger.debug(f"Client version: {__version__} / Server version: {client_version}")
else:
self.log_and_exit(
(
'Client and server not compatible: '
'Client version: {} / Server version: {}'.format(__version__, client_version)
)
'Client and server not compatible: '
f'Client version: {__version__} / Server version: {client_version}'
)
return False
@ -180,12 +179,12 @@ class GlancesClient(object):
"""Update stats from Glances/SNMP server."""
if self.client_mode == 'glances':
return self.update_glances()
elif self.client_mode == 'snmp':
if self.client_mode == 'snmp':
return self.update_snmp()
else:
self.end()
logger.critical("Unknown server mode: {}".format(self.client_mode))
sys.exit(2)
self.end()
logger.critical(f"Unknown server mode: {self.client_mode}")
sys.exit(2)
def update_glances(self):
"""Get stats from Glances server.
@ -196,8 +195,8 @@ class GlancesClient(object):
"""
# Update the stats
try:
server_stats = ujson.loads(self.client.getAll())
except socket.error:
server_stats = orjson.loads(self.client.getAll())
except OSError:
# Client cannot get server stats
return "Disconnected"
except Fault:
@ -240,12 +239,12 @@ class GlancesClient(object):
# Update the stats
counter = Counter()
cs_status = self.update()
logger.debug('Stats updated duration: {} seconds'.format(counter.get()))
logger.debug(f'Stats updated duration: {counter.get()} seconds')
# Export stats using export modules
counter_export = Counter()
self.stats.export(self.stats)
logger.debug('Stats exported duration: {} seconds'.format(counter_export.get()))
logger.debug(f'Stats exported duration: {counter_export.get()} seconds')
# Patch for issue1326 to avoid < 0 refresh
adapted_refresh = self.refresh_time - counter.get()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,20 +8,20 @@
"""Manage the Glances client browser (list of Glances server)."""
import ujson
import socket
import threading
from glances.globals import Fault, ProtocolError, ServerProxy
import orjson
from glances.autodiscover import GlancesAutoDiscoverServer
from glances.client import GlancesClient, GlancesClientTransport
from glances.logger import logger, LOG_FILENAME
from glances.globals import Fault, ProtocolError, ServerProxy
from glances.logger import LOG_FILENAME, logger
from glances.outputs.glances_curses_browser import GlancesCursesBrowser
from glances.password_list import GlancesPasswordList as GlancesPassword
from glances.static_list import GlancesStaticServer
from glances.autodiscover import GlancesAutoDiscoverServer
from glances.outputs.glances_curses_browser import GlancesCursesBrowser
class GlancesClientBrowser(object):
class GlancesClientBrowser:
"""This class creates and manages the TCP client browser (servers list)."""
def __init__(self, config=None, args=None):
@ -76,8 +75,7 @@ class GlancesClientBrowser(object):
if clear_password is not None:
server['password'] = self.password.get_hash(clear_password)
return 'http://{}:{}@{}:{}'.format(server['username'], server['password'], server['ip'], server['port'])
else:
return 'http://{}:{}'.format(server['ip'], server['port'])
return 'http://{}:{}'.format(server['ip'], server['port'])
def __update_stats(self, server):
"""Update stats for the given server (picked from the server list)"""
@ -92,19 +90,23 @@ class GlancesClientBrowser(object):
try:
s = ServerProxy(uri, transport=t)
except Exception as e:
logger.warning("Client browser couldn't create socket ({})".format(e))
logger.warning(f"Client browser couldn't create socket ({e})")
else:
# Mandatory stats
try:
# CPU%
cpu_percent = 100 - ujson.loads(s.getCpu())['idle']
server['cpu_percent'] = '{:.1f}'.format(cpu_percent)
# logger.info(f"CPU stats {s.getPlugin('cpu')}")
# logger.info(f"CPU views {s.getPluginView('cpu')}")
server['cpu_percent'] = orjson.loads(s.getPlugin('cpu'))['total']
server['cpu_percent_decoration'] = orjson.loads(s.getPluginView('cpu'))['total']['decoration']
# MEM%
server['mem_percent'] = ujson.loads(s.getMem())['percent']
server['mem_percent'] = orjson.loads(s.getPlugin('mem'))['percent']
server['mem_percent_decoration'] = orjson.loads(s.getPluginView('mem'))['percent']['decoration']
# OS (Human Readable name)
server['hr_name'] = ujson.loads(s.getSystem())['hr_name']
except (socket.error, Fault, KeyError) as e:
logger.debug("Error while grabbing stats form server ({})".format(e))
server['hr_name'] = orjson.loads(s.getPlugin('system'))['hr_name']
server['hr_name_decoration'] = 'DEFAULT'
except (OSError, Fault, KeyError) as e:
logger.debug(f"Error while grabbing stats form server ({e})")
server['status'] = 'OFFLINE'
except ProtocolError as e:
if e.errcode == 401:
@ -114,7 +116,7 @@ class GlancesClientBrowser(object):
server['status'] = 'PROTECTED'
else:
server['status'] = 'OFFLINE'
logger.debug("Cannot grab stats from server ({} {})".format(e.errcode, e.errmsg))
logger.debug(f"Cannot grab stats from server ({e.errcode} {e.errmsg})")
else:
# Status
server['status'] = 'ONLINE'
@ -122,17 +124,17 @@ class GlancesClientBrowser(object):
# Optional stats (load is not available on Windows OS)
try:
# LOAD
load_min5 = ujson.loads(s.getLoad())['min5']
server['load_min5'] = '{:.2f}'.format(load_min5)
server['load_min5'] = round(orjson.loads(s.getPlugin('load'))['min5'], 1)
server['load_min5_decoration'] = orjson.loads(s.getPluginView('load'))['min5']['decoration']
except Exception as e:
logger.warning("Error while grabbing stats form server ({})".format(e))
logger.warning(f"Error while grabbing stats form server ({e})")
return server
def __display_server(self, server):
"""Connect and display the given server"""
# Display the Glances client for the selected server
logger.debug("Selected server {}".format(server))
logger.debug(f"Selected server {server}")
# Connection can take time
# Display a popup
@ -146,7 +148,7 @@ class GlancesClientBrowser(object):
# Else, the password should be enter by the user
# Display a popup to enter password
clear_password = self.screen.display_popup(
'Password needed for {}: '.format(server['name']), is_input=True
'Password needed for {}: '.format(server['name']), popup_type='input', is_password=True
)
# Store the password for the selected server
if clear_password is not None:
@ -201,7 +203,7 @@ class GlancesClientBrowser(object):
# For each server in the list, grab elementary stats (CPU, LOAD, MEM, OS...)
thread_list = {}
while not self.screen.is_end:
logger.debug("Iter through the following server list: {}".format(self.get_servers_list()))
logger.debug(f"Iter through the following server list: {self.get_servers_list()}")
for v in self.get_servers_list():
key = v["key"]
thread = thread_list.get(key, None)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,13 +8,13 @@
"""Manage the configuration file."""
import os
import sys
import builtins
import multiprocessing
from io import open
import os
import re
import sys
from glances.globals import ConfigParser, NoOptionError, NoSectionError, system_exec, BSD, LINUX, MACOS, SUNOS, WINDOWS
from glances.globals import BSD, LINUX, MACOS, SUNOS, WINDOWS, ConfigParser, NoOptionError, NoSectionError, system_exec
from glances.logger import logger
@ -82,19 +81,32 @@ def default_config_dir():
- Linux, SunOS, *BSD, macOS: /usr/share/doc (as defined in the setup.py files)
- Windows: %APPDATA%\glances
"""
path = []
# Add venv path (solve issue #2803)
if in_virtualenv():
path.append(os.path.join(sys.prefix, 'share', 'doc', 'glances'))
# Add others system path
if LINUX or SUNOS or BSD or MACOS:
path = '/usr/share/doc'
path.append('/usr/share/doc')
else:
path = os.environ.get('APPDATA')
if path is None:
path = ''
else:
path = os.path.join(path, 'glances')
path.append(os.environ.get('APPDATA'))
return [path]
return path
class Config(object):
def in_virtualenv():
# Source: https://stackoverflow.com/questions/1871549/how-to-determine-if-python-is-running-inside-a-virtualenv/1883251#1883251
return sys.prefix != get_base_prefix_compat()
def get_base_prefix_compat():
"""Get base/real prefix, or sys.prefix if there is none."""
# Source: https://stackoverflow.com/questions/1871549/how-to-determine-if-python-is-running-inside-a-virtualenv/1883251#1883251
return getattr(sys, "base_prefix", None) or getattr(sys, "real_prefix", None) or sys.prefix
class Config:
"""This class is used to access/read config file, if it exists.
:param config_dir: the path to search for config file
@ -105,6 +117,7 @@ class Config(object):
self.config_dir = config_dir
self.config_filename = 'glances.conf'
self._loaded_config_file = None
self._config_file_paths = self.config_file_paths()
# Re pattern for optimize research of `foo`
self.re_pattern = re.compile(r'(\`.+?\`)')
@ -152,16 +165,16 @@ class Config(object):
def read(self):
"""Read the config file, if it exists. Using defaults otherwise."""
for config_file in self.config_file_paths():
logger.debug('Search glances.conf file in {}'.format(config_file))
for config_file in self._config_file_paths:
logger.debug(f'Search glances.conf file in {config_file}')
if os.path.exists(config_file):
try:
with open(config_file, encoding='utf-8') as f:
with builtins.open(config_file, encoding='utf-8') as f:
self.parser.read_file(f)
self.parser.read(f)
logger.info("Read configuration file '{}'".format(config_file))
logger.info(f"Read configuration file '{config_file}'")
except UnicodeDecodeError as err:
logger.error("Can not read configuration file '{}': {}".format(config_file, err))
logger.error(f"Can not read configuration file '{config_file}': {err}")
sys.exit(1)
# Save the loaded configuration file path (issue #374)
self._loaded_config_file = config_file

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,47 +8,68 @@
"""CPU percent stats shared between CPU and Quicklook plugins."""
from glances.logger import logger
from glances.timer import Timer
from typing import List, Optional, TypedDict
import psutil
from glances.logger import logger
from glances.timer import Timer
class CpuPercent(object):
__all__ = ["cpu_percent"]
class CpuInfo(TypedDict):
cpu_name: str
cpu_hz: Optional[float]
cpu_hz_current: Optional[float]
class PerCpuPercentInfo(TypedDict):
key: str
cpu_number: int
total: float
user: float
system: float
idle: float
nice: Optional[float]
iowait: Optional[float]
irq: Optional[float]
softirq: Optional[float]
steal: Optional[float]
guest: Optional[float]
guest_nice: Optional[float]
dpc: Optional[float]
interrupt: Optional[float]
class CpuPercent:
"""Get and store the CPU percent."""
def __init__(self, cached_timer_cpu=3):
self.cpu_info = {'cpu_name': None, 'cpu_hz_current': None, 'cpu_hz': None}
self.cpu_percent = 0
self.percpu_percent = []
# Get CPU name
self.__get_cpu_name()
def __init__(self, cached_timer_cpu: int = 2):
# cached_timer_cpu is the minimum time interval between stats updates
# since last update is passed (will retrieve old cached info instead)
self.cached_timer_cpu = cached_timer_cpu
self.timer_cpu = Timer(0)
self.timer_percpu = Timer(0)
# psutil.cpu_freq() consumes lots of CPU
# So refresh the stats every refresh*2 (6 seconds)
# So refresh CPU frequency stats every refresh * 2
self.cached_timer_cpu_info = cached_timer_cpu * 2
# Get CPU name
self.timer_cpu_info = Timer(0)
self.cpu_info: CpuInfo = {'cpu_name': self.__get_cpu_name(), 'cpu_hz_current': None, 'cpu_hz': None}
# Warning from PsUtil documentation
# The first time this function is called with interval = 0.0 or None
# it will return a meaningless 0.0 value which you are supposed to ignore.
self.timer_cpu = Timer(0)
self.cpu_percent = self._compute_cpu()
self.timer_percpu = Timer(0)
self.percpu_percent = self._compute_percpu()
def get_key(self):
"""Return the key of the per CPU list."""
return 'cpu_number'
def get(self, percpu=False):
"""Update and/or return the CPU using the psutil library.
If percpu, return the percpu stats"""
if percpu:
return self.__get_percpu()
else:
return self.__get_cpu()
def get_info(self):
def get_info(self) -> CpuInfo:
"""Get additional information about the CPU"""
# Never update more than 1 time per cached_timer_cpu_info
if self.timer_cpu_info.finished() and hasattr(psutil, 'cpu_freq'):
@ -57,7 +77,7 @@ class CpuPercent(object):
try:
cpu_freq = psutil.cpu_freq()
except Exception as e:
logger.debug('Can not grab CPU information ({})'.format(e))
logger.debug(f'Can not grab CPU information ({e})')
else:
if hasattr(cpu_freq, 'current'):
self.cpu_info['cpu_hz_current'] = cpu_freq.current
@ -71,59 +91,69 @@ class CpuPercent(object):
self.timer_cpu_info.reset(duration=self.cached_timer_cpu_info)
return self.cpu_info
def __get_cpu_name(self):
@staticmethod
def __get_cpu_name() -> str:
# Get the CPU name once from the /proc/cpuinfo file
# TODO: Multisystem...
# Read the first line with the "model name" ("Model" for Raspberry Pi)
try:
self.cpu_info['cpu_name'] = open('/proc/cpuinfo', 'r').readlines()[4].split(':')[1].strip()
except (FileNotFoundError, PermissionError, IndexError, KeyError, AttributeError):
self.cpu_info['cpu_name'] = 'CPU'
return self.cpu_info['cpu_name']
cpuinfo_lines = open('/proc/cpuinfo').readlines()
except (FileNotFoundError, PermissionError):
logger.debug("No permission to read '/proc/cpuinfo'")
return 'CPU'
def __get_cpu(self):
for line in cpuinfo_lines:
if line.startswith('model name') or line.startswith('Model') or line.startswith('cpu model'):
return line.split(':')[1].strip()
return 'CPU'
def get_cpu(self) -> float:
"""Update and/or return the CPU using the psutil library."""
# Never update more than 1 time per cached_timer_cpu
if self.timer_cpu.finished():
self.cpu_percent = psutil.cpu_percent(interval=0.0)
# Reset timer for cache
self.timer_cpu.reset(duration=self.cached_timer_cpu)
# Update the stats
self.cpu_percent = self._compute_cpu()
return self.cpu_percent
def __get_percpu(self):
@staticmethod
def _compute_cpu() -> float:
return psutil.cpu_percent(interval=0.0)
def get_percpu(self) -> List[PerCpuPercentInfo]:
"""Update and/or return the per CPU list using the psutil library."""
# Never update more than 1 time per cached_timer_cpu
if self.timer_percpu.finished():
self.percpu_percent = []
for cpu_number, cputimes in enumerate(psutil.cpu_times_percent(interval=0.0, percpu=True)):
cpu = {
'key': self.get_key(),
'cpu_number': cpu_number,
'total': round(100 - cputimes.idle, 1),
'user': cputimes.user,
'system': cputimes.system,
'idle': cputimes.idle,
}
# The following stats are for API purposes only
if hasattr(cputimes, 'nice'):
cpu['nice'] = cputimes.nice
if hasattr(cputimes, 'iowait'):
cpu['iowait'] = cputimes.iowait
if hasattr(cputimes, 'irq'):
cpu['irq'] = cputimes.irq
if hasattr(cputimes, 'softirq'):
cpu['softirq'] = cputimes.softirq
if hasattr(cputimes, 'steal'):
cpu['steal'] = cputimes.steal
if hasattr(cputimes, 'guest'):
cpu['guest'] = cputimes.guest
if hasattr(cputimes, 'guest_nice'):
cpu['guest_nice'] = cputimes.guest_nice
# Append new CPU to the list
self.percpu_percent.append(cpu)
# Reset timer for cache
self.timer_percpu.reset(duration=self.cached_timer_cpu)
# Reset timer for cache
self.timer_percpu.reset(duration=self.cached_timer_cpu)
# Update stats
self.percpu_percent = self._compute_percpu()
return self.percpu_percent
def _compute_percpu(self) -> List[PerCpuPercentInfo]:
psutil_percpu = enumerate(psutil.cpu_times_percent(interval=0.0, percpu=True))
return [
{
'key': self.get_key(),
'cpu_number': cpu_number,
'total': round(100 - cpu_times.idle, 1),
'user': cpu_times.user,
'system': cpu_times.system,
'idle': cpu_times.idle,
'nice': cpu_times.nice if hasattr(cpu_times, 'nice') else None,
'iowait': cpu_times.iowait if hasattr(cpu_times, 'iowait') else None,
'irq': cpu_times.irq if hasattr(cpu_times, 'irq') else None,
'softirq': cpu_times.softirq if hasattr(cpu_times, 'softirq') else None,
'steal': cpu_times.steal if hasattr(cpu_times, 'steal') else None,
'guest': cpu_times.guest if hasattr(cpu_times, 'guest') else None,
'guest_nice': cpu_times.steal if hasattr(cpu_times, 'guest_nice') else None,
'dpc': cpu_times.dpc if hasattr(cpu_times, 'dpc') else None,
'interrupt': cpu_times.interrupt if hasattr(cpu_times, 'interrupt') else None,
}
for cpu_number, cpu_times in psutil_percpu
]
# CpuPercent instance shared between plugins
cpu_percent = CpuPercent()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -8,7 +7,8 @@
#
"""Manage Glances event class
This class is a Pydantic data class for the Glances event.
This class is a data class for the Glances event.
event_state = "OK|CAREFUL|WARNING|CRITICAL"
event_type = "CPU*|LOAD|MEM|MON"
@ -32,7 +32,13 @@ Item (or event) is defined by:
}
"""
from pydantic.dataclasses import dataclass
from glances.logger import logger
try:
from pydantic.dataclasses import dataclass
except ImportError as e:
logger.warning(f"Missing Python Lib ({e}), EventList will be skipping data validation")
from dataclasses import dataclass
from glances.processes import sort_stats

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -10,12 +9,12 @@
"""Manage Glances events list (previously Glances logs in Glances < 3.1)."""
import time
from dataclasses import asdict
from datetime import datetime
from pydantic import RootModel
from glances.event import GlancesEvent
from glances.processes import glances_processes
from glances.thresholds import glances_thresholds
from glances.event import GlancesEvent
# Static decision tree for the global alert message
# - msg: Message to be displayed (result of the decision tree)
@ -158,11 +157,10 @@ def build_global_message():
if themax['weight'] >= themax['thresholds_min']:
# Check if the weight is > to the minimal threshold value
return themax['msg']
else:
return tree[0]['msg']
return tree[0]['msg']
class GlancesEventsList(object):
class GlancesEventsList:
"""This class manages events inside the Glances software.
GlancesEventsList is a list of GlancesEvent.
GlancesEvent is defined in the event.py file
@ -201,7 +199,7 @@ class GlancesEventsList(object):
def get(self):
"""Return the RAW events list."""
return [RootModel[GlancesEvent](e).model_dump() for e in self.events_list]
return [asdict(e) for e in self.events_list]
def len(self):
"""Return the number of events in the logs list."""

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -13,13 +12,12 @@ I am your father...
...for all Glances exports IF.
"""
from glances.globals import json_dumps
from glances.globals import NoOptionError, NoSectionError, iteritems, iterkeys
from glances.timer import Counter
from glances.globals import NoOptionError, NoSectionError, iteritems, iterkeys, json_dumps
from glances.logger import logger
from glances.timer import Counter
class GlancesExport(object):
class GlancesExport:
"""Main class for Glances export IF."""
# List of non exportable plugins
@ -40,7 +38,7 @@ class GlancesExport(object):
"""Init the export class."""
# Export name
self.export_name = self.__class__.__module__
logger.debug("Init export module %s" % self.export_name)
logger.debug(f"Init export module {self.export_name}")
# Init the config & args
self.config = config
@ -64,18 +62,16 @@ class GlancesExport(object):
counter = Counter()
ret = fct(*args, **kw)
duration = counter.get()
logger.debug(
"{} {} {} return {} in {} seconds".format(
args[0].__class__.__name__, args[0].__class__.__module__, fct.__name__, ret, duration
)
)
class_name = args[0].__class__.__name__
class_module = args[0].__class__.__module__
logger.debug(f"{class_name} {class_module} {fct.__name__} return {ret} in {duration} seconds")
return ret
return wrapper
def exit(self):
"""Close the export module."""
logger.debug("Finalise export interface %s" % self.export_name)
logger.debug(f"Finalise export interface {self.export_name}")
def load_conf(self, section, mandatories=['host', 'port'], options=None):
"""Load the export <section> configuration in the Glances configuration file.
@ -96,10 +92,10 @@ class GlancesExport(object):
for opt in mandatories:
setattr(self, opt, self.config.get_value(section, opt))
except NoSectionError:
logger.error("No {} configuration found".format(section))
logger.error(f"No {section} configuration found")
return False
except NoOptionError as e:
logger.error("Error in the {} configuration ({})".format(section, e))
logger.error(f"Error in the {section} configuration ({e})")
return False
# Load options
@ -109,8 +105,8 @@ class GlancesExport(object):
except NoOptionError:
pass
logger.debug("Load {} from the Glances configuration file".format(section))
logger.debug("{} parameters: {}".format(section, {opt: getattr(self, opt) for opt in mandatories + options}))
logger.debug(f"Load {section} from the Glances configuration file")
logger.debug(f"{section} parameters: {({opt: getattr(self, opt) for opt in mandatories + options})}")
return True
@ -120,11 +116,10 @@ class GlancesExport(object):
try:
ret = item[item['key']]
except KeyError:
logger.error("No 'key' available in {}".format(item))
logger.error(f"No 'key' available in {item}")
if isinstance(ret, list):
return ret[0]
else:
return ret
return ret
def parse_tags(self, tags):
"""Parse tags into a dict.
@ -201,11 +196,10 @@ class GlancesExport(object):
for key, value in sorted(iteritems(stats)):
if isinstance(value, bool):
value = json_dumps(value)
if isinstance(value, list):
try:
value = value[0]
except IndexError:
value = ''
value = ' '.join([str(v) for v in value])
if isinstance(value, dict):
item_names, item_values = self.build_export(value)
item_names = [pre_key + key.lower() + str(i) for i in item_names]

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -13,13 +12,13 @@ import sys
from datetime import datetime
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
from cassandra import InvalidRequest
from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import Cluster
from cassandra.util import uuid_from_time
from cassandra import InvalidRequest
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
@ -27,7 +26,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the Cassandra export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.keyspace = None
@ -69,53 +68,52 @@ class Export(GlancesExport):
)
session = cluster.connect()
except Exception as e:
logger.critical("Cannot connect to Cassandra cluster '%s:%s' (%s)" % (self.host, self.port, e))
logger.critical(f"Cannot connect to Cassandra cluster '{self.host}:{self.port}' ({e})")
sys.exit(2)
# Keyspace
try:
session.set_keyspace(self.keyspace)
except InvalidRequest:
logger.info("Create keyspace {} on the Cassandra cluster".format(self.keyspace))
c = "CREATE KEYSPACE %s WITH replication = { 'class': 'SimpleStrategy', 'replication_factor': '%s' }" % (
self.keyspace,
self.replication_factor,
logger.info(f"Create keyspace {self.keyspace} on the Cassandra cluster")
c = (
f"CREATE KEYSPACE {self.keyspace} WITH "
f"replication = {{ 'class': 'SimpleStrategy', 'replication_factor': '{self.replication_factor}' }}"
)
session.execute(c)
session.set_keyspace(self.keyspace)
logger.info(
"Stats will be exported to Cassandra cluster {} ({}) in keyspace {}".format(
cluster.metadata.cluster_name, cluster.metadata.all_hosts(), self.keyspace
)
f"Stats will be exported to Cassandra cluster {cluster.metadata.cluster_name} "
f"({cluster.metadata.all_hosts()}) in keyspace {self.keyspace}"
)
# Table
try:
session.execute(
"CREATE TABLE %s (plugin text, time timeuuid, stat map<text,float>, PRIMARY KEY (plugin, time)) \
WITH CLUSTERING ORDER BY (time DESC)"
% self.table
f"CREATE TABLE {self.table} "
f"(plugin text, time timeuuid, stat map<text,float>, PRIMARY KEY (plugin, time)) "
f"WITH CLUSTERING ORDER BY (time DESC)"
)
except Exception:
logger.debug("Cassandra table %s already exist" % self.table)
logger.debug(f"Cassandra table {self.table} already exist")
return cluster, session
def export(self, name, columns, points):
"""Write the points to the Cassandra cluster."""
logger.debug("Export {} stats to Cassandra".format(name))
logger.debug(f"Export {name} stats to Cassandra")
# Remove non number stats and convert all to float (for Boolean)
data = {k: float(v) for (k, v) in dict(zip(columns, points)).iteritems() if isinstance(v, Number)}
# Write input to the Cassandra table
try:
stmt = "INSERT INTO {} (plugin, time, stat) VALUES (?, ?, ?)".format(self.table)
stmt = f"INSERT INTO {self.table} (plugin, time, stat) VALUES (?, ?, ?)"
query = self.session.prepare(stmt)
self.session.execute(query, (name, uuid_from_time(datetime.now()), data))
except Exception as e:
logger.error("Cannot export {} stats to Cassandra ({})".format(name, e))
logger.error(f"Cannot export {name} stats to Cassandra ({e})")
def exit(self):
"""Close the Cassandra export module."""
@ -123,4 +121,4 @@ class Export(GlancesExport):
self.session.shutdown()
self.cluster.shutdown()
# Call the father method
super(Export, self).exit()
super().exit()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -20,18 +19,18 @@
import sys
from datetime import datetime
from glances.logger import logger
from glances.exports.export import GlancesExport
import pycouchdb
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the CouchDB export module."""
def __init__(self, config=None, args=None):
"""Init the CouchDB export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Load the CouchDB configuration file section
# User and Password are mandatory with CouchDB 3.0 and higher
@ -48,15 +47,15 @@ class Export(GlancesExport):
return None
# @TODO: https
server_uri = 'http://{}:{}@{}:{}/'.format(self.user, self.password, self.host, self.port)
server_uri = f'http://{self.user}:{self.password}@{self.host}:{self.port}/'
try:
s = pycouchdb.Server(server_uri)
except Exception as e:
logger.critical("Cannot connect to CouchDB server (%s)" % e)
logger.critical(f"Cannot connect to CouchDB server ({e})")
sys.exit(2)
else:
logger.info("Connected to the CouchDB server version %s" % s.info()['version'])
logger.info("Connected to the CouchDB server version {}".format(s.info()['version']))
try:
s.database(self.db)
@ -64,15 +63,15 @@ class Export(GlancesExport):
# Database did not exist
# Create it...
s.create(self.db)
logger.info("Create CouchDB database %s" % self.db)
logger.info(f"Create CouchDB database {self.db}")
else:
logger.info("CouchDB database %s already exist" % self.db)
logger.info(f"CouchDB database {self.db} already exist")
return s.database(self.db)
def export(self, name, columns, points):
"""Write the points to the CouchDB server."""
logger.debug("Export {} stats to CouchDB".format(name))
logger.debug(f"Export {name} stats to CouchDB")
# Create DB input
data = dict(zip(columns, points))
@ -85,4 +84,4 @@ class Export(GlancesExport):
try:
self.client.save(data)
except Exception as e:
logger.error("Cannot export {} stats to CouchDB ({})".format(name, e))
logger.error(f"Cannot export {name} stats to CouchDB ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,13 +8,13 @@
"""CSV interface class."""
import os.path
import csv
import os.path
import sys
import time
from glances.logger import logger
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
@ -23,7 +22,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the CSV export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# CSV file name
self.csv_filename = args.export_csv_file
@ -42,8 +41,8 @@ class Export(GlancesExport):
try:
self.csv_file = open_csv_file(self.csv_filename, 'r')
reader = csv.reader(self.csv_file)
except IOError as e:
logger.critical("Cannot open existing CSV file: {}".format(e))
except OSError as e:
logger.critical(f"Cannot open existing CSV file: {e}")
sys.exit(2)
self.old_header = next(reader, None)
self.csv_file.close()
@ -51,11 +50,11 @@ class Export(GlancesExport):
try:
self.csv_file = open_csv_file(self.csv_filename, file_mode)
self.writer = csv.writer(self.csv_file)
except IOError as e:
logger.critical("Cannot create the CSV file: {}".format(e))
except OSError as e:
logger.critical(f"Cannot create the CSV file: {e}")
sys.exit(2)
logger.info("Stats exported to CSV file: {}".format(self.csv_filename))
logger.info(f"Stats exported to CSV file: {self.csv_filename}")
self.export_enable = True
@ -63,7 +62,7 @@ class Export(GlancesExport):
def exit(self):
"""Close the CSV file."""
logger.debug("Finalise export interface %s" % self.export_name)
logger.debug(f"Finalise export interface {self.export_name}")
self.csv_file.close()
def update(self, stats):
@ -95,8 +94,8 @@ class Export(GlancesExport):
if self.old_header != csv_header and self.old_header is not None:
# Header are different, log an error and do not write data
logger.error("Cannot append data to existing CSV file. Headers are different.")
logger.debug("Old header: {}".format(self.old_header))
logger.debug("New header: {}".format(csv_header))
logger.debug(f"Old header: {self.old_header}")
logger.debug(f"New header: {csv_header}")
else:
# Header are equals, ready to write data
self.old_header = None

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,18 +11,18 @@
import sys
from datetime import datetime
from glances.logger import logger
from glances.exports.export import GlancesExport
from elasticsearch import Elasticsearch, helpers
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the ElasticSearch (ES) export module."""
def __init__(self, config=None, args=None):
"""Init the ES export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.index = None
@ -44,24 +43,22 @@ class Export(GlancesExport):
return None
try:
es = Elasticsearch(hosts=['{}://{}:{}'.format(self.scheme, self.host, self.port)])
es = Elasticsearch(hosts=[f'{self.scheme}://{self.host}:{self.port}'])
except Exception as e:
logger.critical(
"Cannot connect to ElasticSearch server %s://%s:%s (%s)" % (self.scheme, self.host, self.port, e)
)
logger.critical(f"Cannot connect to ElasticSearch server {self.scheme}://{self.host}:{self.port} ({e})")
sys.exit(2)
if not es.ping():
logger.critical("Cannot ping the ElasticSearch server %s://%s:%s" % (self.scheme, self.host, self.port))
logger.critical(f"Cannot ping the ElasticSearch server {self.scheme}://{self.host}:{self.port}")
sys.exit(2)
else:
logger.info("Connected to the ElasticSearch server %s://%s:%s" % (self.scheme, self.host, self.port))
logger.info(f"Connected to the ElasticSearch server {self.scheme}://{self.host}:{self.port}")
return es
def export(self, name, columns, points):
"""Write the points to the ES server."""
logger.debug("Export {} stats to ElasticSearch".format(name))
logger.debug(f"Export {name} stats to ElasticSearch")
# Generate index name with the index field + current day
index = '{}-{}'.format(self.index, datetime.utcnow().strftime("%Y.%m.%d"))
@ -72,17 +69,17 @@ class Export(GlancesExport):
dt_now = datetime.utcnow().isoformat('T')
action = {
"_index": index,
"_id": '{}.{}'.format(name, dt_now),
"_type": 'glances-{}'.format(name),
"_id": f'{name}.{dt_now}',
"_type": f'glances-{name}',
"_source": {"plugin": name, "timestamp": dt_now},
}
action['_source'].update(zip(columns, [str(p) for p in points]))
actions.append(action)
logger.debug("Exporting the following object to elasticsearch: {}".format(action))
logger.debug(f"Exporting the following object to elasticsearch: {action}")
# Write input to the ES index
try:
helpers.bulk(self.client, actions)
except Exception as e:
logger.error("Cannot export {} stats to ElasticSearch ({})".format(name, e))
logger.error(f"Cannot export {name} stats to ElasticSearch ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,17 +8,18 @@
"""Graph exporter interface class."""
from pygal import DateTimeLine
import pygal.style
import sys
import os
import tempfile
import errno
import os
import sys
import tempfile
import pygal.style
from pygal import DateTimeLine
from glances.exports.export import GlancesExport
from glances.globals import iteritems, time_serie_subsample
from glances.logger import logger
from glances.timer import Timer
from glances.globals import iteritems, time_serie_subsample
from glances.exports.export import GlancesExport
class Export(GlancesExport):
@ -27,36 +27,38 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Load the Graph configuration file section (is exists)
self.export_enable = self.load_conf('graph', options=['path', 'generate_every', 'width', 'height', 'style'])
# Manage options (command line arguments overwrite configuration file)
self.path = args.export_graph_path or self.path
self.generate_every = int(getattr(self, 'generate_every', 0))
self.width = int(getattr(self, 'width', 800))
self.height = int(getattr(self, 'height', 600))
self.style = getattr(pygal.style, getattr(self, 'style', 'DarkStyle'), pygal.style.DarkStyle)
self.generate_every = int(getattr(self, 'generate_every', 0) or 0)
self.width = int(getattr(self, 'width', 800) or 800)
self.height = int(getattr(self, 'height', 600) or 600)
self.style = (
getattr(pygal.style, getattr(self, 'style', 'DarkStyle'), pygal.style.DarkStyle) or pygal.style.DarkStyle
)
# Create export folder
try:
os.makedirs(self.path)
except OSError as e:
if e.errno != errno.EEXIST:
logger.critical("Cannot create the Graph output folder {} ({})".format(self.path, e))
logger.critical(f"Cannot create the Graph output folder {self.path} ({e})")
sys.exit(2)
# Check if output folder is writeable
try:
tempfile.TemporaryFile(dir=self.path)
except OSError:
logger.critical("Graph output folder {} is not writeable".format(self.path))
logger.critical(f"Graph output folder {self.path} is not writeable")
sys.exit(2)
logger.info("Graphs will be created in the {} folder".format(self.path))
logger.info(f"Graphs will be created in the {self.path} folder")
if self.generate_every != 0:
logger.info("Graphs will be created automatically every {} seconds".format(self.generate_every))
logger.info(f"Graphs will be created automatically every {self.generate_every} seconds")
logger.info("or when 'g' key is pressed (only through the CLI interface)")
# Start the timer
self._timer = Timer(self.generate_every)
@ -66,7 +68,7 @@ class Export(GlancesExport):
def exit(self):
"""Close the files."""
logger.debug("Finalise export interface %s" % self.export_name)
logger.debug(f"Finalise export interface {self.export_name}")
def update(self, stats):
"""Generate Graph file in the output folder."""
@ -84,7 +86,7 @@ class Export(GlancesExport):
if plugin_name in self.plugins_to_export(stats):
self.export(plugin_name, plugin.get_export_history())
logger.info("Graphs created in {}".format(self.path))
logger.info(f"Graphs created in {self.path}")
self.args.generate_graph = False
def export(self, title, data):

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,18 +11,18 @@
import sys
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
from graphitesend import GraphiteClient
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the Graphite export module."""
def __init__(self, config=None, args=None):
"""Init the Graphite export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
# N/A
@ -74,25 +73,25 @@ class Export(GlancesExport):
debug=self.debug,
)
except Exception as e:
logger.error("Can not write data to Graphite server: {}:{} ({})".format(self.host, self.port, e))
logger.error(f"Can not write data to Graphite server: {self.host}:{self.port} ({e})")
client = None
else:
logger.info("Stats will be exported to Graphite server: {}:{}".format(self.host, self.port))
logger.info(f"Stats will be exported to Graphite server: {self.host}:{self.port}")
return client
def export(self, name, columns, points):
"""Export the stats to the Graphite server."""
if self.client is None:
return False
before_filtering_dict = dict(zip([normalize('{}.{}'.format(name, i)) for i in columns], points))
before_filtering_dict = dict(zip([normalize(f'{name}.{i}') for i in columns], points))
after_filtering_dict = dict(filter(lambda i: isinstance(i[1], Number), before_filtering_dict.items()))
try:
self.client.send_dict(after_filtering_dict)
except Exception as e:
logger.error("Can not export stats to Graphite (%s)" % e)
logger.error(f"Can not export stats to Graphite ({e})")
return False
else:
logger.debug("Export {} stats to Graphite".format(name))
logger.debug(f"Export {name} stats to Graphite")
return True
@ -100,6 +99,4 @@ def normalize(name):
"""Normalize name for the Graphite convention"""
# Name should not contain space
ret = name.replace(' ', '_')
return ret
return name.replace(' ', '_')

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,13 +11,13 @@
import sys
from platform import node
from glances.logger import logger
from glances.exports.export import GlancesExport
from influxdb import InfluxDBClient
from influxdb.client import InfluxDBClientError
FIELD_TO_TAG = ['name', 'cmdline']
from glances.exports.export import GlancesExport
from glances.logger import logger
FIELD_TO_TAG = ['name', 'cmdline', 'type']
class Export(GlancesExport):
@ -26,7 +25,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the InfluxDB export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.user = None
@ -75,13 +74,13 @@ class Export(GlancesExport):
)
get_all_db = [i['name'] for i in db.get_list_database()]
except InfluxDBClientError as e:
logger.critical("Cannot connect to InfluxDB database '%s' (%s)" % (self.db, e))
logger.critical(f"Cannot connect to InfluxDB database '{self.db}' ({e})")
sys.exit(2)
if self.db in get_all_db:
logger.info("Stats will be exported to InfluxDB server: {}".format(db._baseurl))
logger.info(f"Stats will be exported to InfluxDB server: {db._baseurl}")
else:
logger.critical("InfluxDB database '%s' did not exist. Please create it" % self.db)
logger.critical(f"InfluxDB database '{self.db}' did not exist. Please create it")
sys.exit(2)
return db
@ -106,9 +105,7 @@ class Export(GlancesExport):
# Manage field
if measurement is not None:
fields = {
k.replace('{}.'.format(measurement), ''): data_dict[k]
for k in data_dict
if k.startswith('{}.'.format(measurement))
k.replace(f'{measurement}.', ''): data_dict[k] for k in data_dict if k.startswith(f'{measurement}.')
}
else:
fields = data_dict
@ -142,8 +139,7 @@ class Export(GlancesExport):
if k in fields:
tags[k] = str(fields[k])
# Remove it from the field list (can not be a field and a tag)
if k in fields:
fields.pop(fields[k])
fields.pop(k)
# Add the measurement to the list
ret.append({'measurement': name, 'tags': tags, 'fields': fields})
return ret
@ -155,12 +151,12 @@ class Export(GlancesExport):
name = self.prefix + '.' + name
# Write input to the InfluxDB database
if len(points) == 0:
logger.debug("Cannot export empty {} stats to InfluxDB".format(name))
logger.debug(f"Cannot export empty {name} stats to InfluxDB")
else:
try:
self.client.write_points(self._normalize(name, columns, points), time_precision="s")
except Exception as e:
# Log level set to debug instead of error (see: issue #1561)
logger.debug("Cannot export {} stats to InfluxDB ({})".format(name, e))
# Log level set to warning instead of error (see: issue #1561)
logger.warning(f"Cannot export {name} stats to InfluxDB ({e})")
else:
logger.debug("Export {} stats to InfluxDB".format(name))
logger.debug(f"Export {name} stats to InfluxDB")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,12 +11,12 @@
import sys
from platform import node
from glances.logger import logger
from glances.exports.export import GlancesExport
from influxdb_client import InfluxDBClient, WriteOptions
FIELD_TO_TAG = ['name', 'cmdline']
from glances.exports.export import GlancesExport
from glances.logger import logger
FIELD_TO_TAG = ['name', 'cmdline', 'type']
class Export(GlancesExport):
@ -25,7 +24,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the InfluxDB export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.org = None
@ -58,7 +57,7 @@ class Export(GlancesExport):
self.interval = 0
# and should be set to the Glances refresh time if the value is 0
self.interval = self.interval if self.interval > 0 else self.args.time
logger.debug("InfluxDB export interval is set to {} seconds".format(self.interval))
logger.debug(f"InfluxDB export interval is set to {self.interval} seconds")
# The hostname is always add as a tag
self.hostname = node().split('.')[0]
@ -71,20 +70,18 @@ class Export(GlancesExport):
if not self.export_enable:
return None
url = '{}://{}:{}'.format(self.protocol, self.host, self.port)
url = f'{self.protocol}://{self.host}:{self.port}'
try:
# See docs: https://influxdb-client.readthedocs.io/en/stable/api.html#influxdbclient
client = InfluxDBClient(url=url, enable_gzip=False, verify_ssl=False, org=self.org, token=self.token)
except Exception as e:
logger.critical("Cannot connect to InfluxDB server '%s' (%s)" % (url, e))
logger.critical(f"Cannot connect to InfluxDB server '{url}' ({e})")
sys.exit(2)
else:
logger.info(
"Connected to InfluxDB server version {} ({})".format(client.health().version, client.health().message)
)
logger.info(f"Connected to InfluxDB server version {client.health().version} ({client.health().message})")
# Create the write client
write_client = client.write_api(
return client.write_api(
write_options=WriteOptions(
batch_size=500,
flush_interval=self.interval * 1000,
@ -95,7 +92,6 @@ class Export(GlancesExport):
exponential_base=2,
)
)
return write_client
def _normalize(self, name, columns, points):
"""Normalize data for the InfluxDB's data model.
@ -117,9 +113,7 @@ class Export(GlancesExport):
# Manage field
if measurement is not None:
fields = {
k.replace('{}.'.format(measurement), ''): data_dict[k]
for k in data_dict
if k.startswith('{}.'.format(measurement))
k.replace(f'{measurement}.', ''): data_dict[k] for k in data_dict if k.startswith(f'{measurement}.')
}
else:
fields = data_dict
@ -153,8 +147,7 @@ class Export(GlancesExport):
if k in fields:
tags[k] = str(fields[k])
# Remove it from the field list (can not be a field and a tag)
if k in fields:
fields.pop(fields[k])
fields.pop(k)
# Add the measurement to the list
ret.append({'measurement': name, 'tags': tags, 'fields': fields})
return ret
@ -166,12 +159,12 @@ class Export(GlancesExport):
name = self.prefix + '.' + name
# Write input to the InfluxDB database
if len(points) == 0:
logger.debug("Cannot export empty {} stats to InfluxDB".format(name))
logger.debug(f"Cannot export empty {name} stats to InfluxDB")
else:
try:
self.client.write(self.bucket, self.org, self._normalize(name, columns, points), time_precision="s")
except Exception as e:
# Log level set to debug instead of error (see: issue #1561)
logger.debug("Cannot export {} stats to InfluxDB ({})".format(name, e))
# Log level set to warning instead of error (see: issue #1561)
logger.warning(f"Cannot export {name} stats to InfluxDB ({e})")
else:
logger.debug("Export {} stats to InfluxDB".format(name))
logger.debug(f"Export {name} stats to InfluxDB")

View File

@ -2,9 +2,9 @@
import sys
from glances.globals import listkeys, json_dumps
from glances.logger import logger
from glances.exports.export import GlancesExport
from glances.globals import json_dumps, listkeys
from glances.logger import logger
class Export(GlancesExport):
@ -12,7 +12,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the JSON export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# JSON file name
self.json_filename = args.export_json_file
@ -21,11 +21,11 @@ class Export(GlancesExport):
try:
self.json_file = open(self.json_filename, 'w')
self.json_file.close()
except IOError as e:
logger.critical("Cannot create the JSON file: {}".format(e))
except OSError as e:
logger.critical(f"Cannot create the JSON file: {e}")
sys.exit(2)
logger.info("Exporting stats to file: {}".format(self.json_filename))
logger.info(f"Exporting stats to file: {self.json_filename}")
self.export_enable = True
@ -34,7 +34,7 @@ class Export(GlancesExport):
def exit(self):
"""Close the JSON file."""
logger.debug("Finalise export interface %s" % self.export_name)
logger.debug(f"Finalise export interface {self.export_name}")
self.json_file.close()
def export(self, name, columns, points):
@ -44,11 +44,11 @@ class Export(GlancesExport):
if name == self.last_exported_list()[0] and self.buffer != {}:
# One whole loop has been completed
# Flush stats to file
logger.debug("Exporting stats ({}) to JSON file ({})".format(listkeys(self.buffer), self.json_filename))
logger.debug(f"Exporting stats ({listkeys(self.buffer)}) to JSON file ({self.json_filename})")
# Export stats to JSON file
with open(self.json_filename, "w") as self.json_file:
self.json_file.write("{}\n".format(json_dumps(self.buffer)))
self.json_file.write(f"{json_dumps(self.buffer)}\n")
# Reset buffer
self.buffer = {}

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -11,19 +10,19 @@
import sys
from glances.logger import logger
from glances.globals import json_dumps
from glances.exports.export import GlancesExport
from kafka import KafkaProducer
from glances.exports.export import GlancesExport
from glances.globals import json_dumps
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the Kafka export module."""
def __init__(self, config=None, args=None):
"""Init the Kafka export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.topic = None
@ -48,7 +47,7 @@ class Export(GlancesExport):
return None
# Build the server URI with host and port
server_uri = '{}:{}'.format(self.host, self.port)
server_uri = f'{self.host}:{self.port}'
try:
s = KafkaProducer(
@ -57,16 +56,16 @@ class Export(GlancesExport):
compression_type=self.compression,
)
except Exception as e:
logger.critical("Cannot connect to Kafka server %s (%s)" % (server_uri, e))
logger.critical(f"Cannot connect to Kafka server {server_uri} ({e})")
sys.exit(2)
else:
logger.info("Connected to the Kafka server %s" % server_uri)
logger.info(f"Connected to the Kafka server {server_uri}")
return s
def export(self, name, columns, points):
"""Write the points to the kafka server."""
logger.debug("Export {} stats to Kafka".format(name))
logger.debug(f"Export {name} stats to Kafka")
# Create DB input
data = dict(zip(columns, points))
@ -84,7 +83,7 @@ class Export(GlancesExport):
value=data,
)
except Exception as e:
logger.error("Cannot export {} stats to Kafka ({})".format(name, e))
logger.error(f"Cannot export {name} stats to Kafka ({e})")
def exit(self):
"""Close the Kafka export module."""
@ -92,4 +91,4 @@ class Export(GlancesExport):
self.client.flush()
self.client.close()
# Call the father method
super(Export, self).exit()
super().exit()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -10,12 +9,12 @@
"""MongoDB interface class."""
import sys
from glances.logger import logger
from glances.exports.export import GlancesExport
from urllib.parse import quote_plus
import pymongo
from urllib.parse import quote_plus
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
@ -23,7 +22,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the MongoDB export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.db = None
@ -45,13 +44,13 @@ class Export(GlancesExport):
if not self.export_enable:
return None
server_uri = 'mongodb://%s:%s@%s:%s' % (quote_plus(self.user), quote_plus(self.password), self.host, self.port)
server_uri = f'mongodb://{quote_plus(self.user)}:{quote_plus(self.password)}@{self.host}:{self.port}'
try:
client = pymongo.MongoClient(server_uri)
client.admin.command('ping')
except Exception as e:
logger.critical("Cannot connect to MongoDB server %s:%s (%s)" % (self.host, self.port, e))
logger.critical(f"Cannot connect to MongoDB server {self.host}:{self.port} ({e})")
sys.exit(2)
else:
logger.info("Connected to the MongoDB server")
@ -64,7 +63,7 @@ class Export(GlancesExport):
def export(self, name, columns, points):
"""Write the points to the MongoDB server."""
logger.debug("Export {} stats to MongoDB".format(name))
logger.debug(f"Export {name} stats to MongoDB")
# Create DB input
data = dict(zip(columns, points))
@ -73,4 +72,4 @@ class Export(GlancesExport):
try:
self.database()[name].insert_one(data)
except Exception as e:
logger.error("Cannot export {} stats to MongoDB ({})".format(name, e))
logger.error(f"Cannot export {name} stats to MongoDB ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -13,21 +12,21 @@ import socket
import string
import sys
from glances.logger import logger
from glances.exports.export import GlancesExport
from glances.globals import json_dumps
# Import paho for MQTT
import certifi
import paho.mqtt.client as paho
from glances.exports.export import GlancesExport
from glances.globals import json_dumps
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the MQTT export module."""
def __init__(self, config=None, args=None):
"""Init the MQTT export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.user = None
@ -87,7 +86,7 @@ class Export(GlancesExport):
client.loop_start()
return client
except Exception as e:
logger.critical("Connection to MQTT server %s:%s failed with error: %s " % (self.host, self.port, e))
logger.critical(f"Connection to MQTT server {self.host}:{self.port} failed with error: {e} ")
return None
def export(self, name, columns, points):
@ -109,14 +108,14 @@ class Export(GlancesExport):
self.client.publish(topic, value)
except Exception as e:
logger.error("Can not export stats to MQTT server (%s)" % e)
logger.error(f"Can not export stats to MQTT server ({e})")
elif self.topic_structure == 'per-plugin':
try:
topic = '/'.join([self.topic, self.devicename, name])
sensor_values = dict(zip(columns, points))
# Build the value to output
output_value = dict()
output_value = {}
for key in sensor_values:
split_key = key.split('.')
@ -124,7 +123,7 @@ class Export(GlancesExport):
current_level = output_value
for depth in range(len(split_key) - 1):
if split_key[depth] not in current_level:
current_level[split_key[depth]] = dict()
current_level[split_key[depth]] = {}
current_level = current_level[split_key[depth]]
# Add the value
@ -133,4 +132,4 @@ class Export(GlancesExport):
json_value = json_dumps(output_value)
self.client.publish(topic, json_value)
except Exception as e:
logger.error("Can not export stats to MQTT server (%s)" % e)
logger.error(f"Can not export stats to MQTT server ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,18 +11,18 @@
import sys
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
import potsdb
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the OpenTSDB export module."""
def __init__(self, config=None, args=None):
"""Init the OpenTSDB export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
# N/A
@ -52,7 +51,7 @@ class Export(GlancesExport):
try:
db = potsdb.Client(self.host, port=int(self.port), check_host=True)
except Exception as e:
logger.critical("Cannot connect to OpenTSDB server %s:%s (%s)" % (self.host, self.port, e))
logger.critical(f"Cannot connect to OpenTSDB server {self.host}:{self.port} ({e})")
sys.exit(2)
return db
@ -62,18 +61,18 @@ class Export(GlancesExport):
for i in range(len(columns)):
if not isinstance(points[i], Number):
continue
stat_name = '{}.{}.{}'.format(self.prefix, name, columns[i])
stat_name = f'{self.prefix}.{name}.{columns[i]}'
stat_value = points[i]
tags = self.parse_tags(self.tags)
try:
self.client.send(stat_name, stat_value, **tags)
except Exception as e:
logger.error("Can not export stats %s to OpenTSDB (%s)" % (name, e))
logger.debug("Export {} stats to OpenTSDB".format(name))
logger.error(f"Can not export stats {name} to OpenTSDB ({e})")
logger.debug(f"Export {name} stats to OpenTSDB")
def exit(self):
"""Close the OpenTSDB export module."""
# Waits for all outstanding metrics to be sent and background thread closes
self.client.wait()
# Call the father method
super(Export, self).exit()
super().exit()

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,11 +11,11 @@
import sys
from numbers import Number
from glances.logger import logger
from prometheus_client import Gauge, start_http_server
from glances.exports.export import GlancesExport
from glances.globals import iteritems, listkeys
from prometheus_client import start_http_server, Gauge
from glances.logger import logger
class Export(GlancesExport):
@ -26,7 +25,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the Prometheus export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Load the Prometheus configuration file section
self.export_enable = self.load_conf('prometheus', mandatories=['host', 'port', 'labels'], options=['prefix'])
@ -52,14 +51,14 @@ class Export(GlancesExport):
try:
start_http_server(port=int(self.port), addr=self.host)
except Exception as e:
logger.critical("Can not start Prometheus exporter on {}:{} ({})".format(self.host, self.port, e))
logger.critical(f"Can not start Prometheus exporter on {self.host}:{self.port} ({e})")
sys.exit(2)
else:
logger.info("Start Prometheus exporter on {}:{}".format(self.host, self.port))
logger.info(f"Start Prometheus exporter on {self.host}:{self.port}")
def export(self, name, columns, points):
"""Write the points to the Prometheus exporter using Gauge."""
logger.debug("Export {} stats to Prometheus exporter".format(name))
logger.debug(f"Export {name} stats to Prometheus exporter")
# Remove non number stats and convert all to float (for Boolean)
data = {k: float(v) for (k, v) in iteritems(dict(zip(columns, points))) if isinstance(v, Number)}

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -14,19 +13,19 @@ import socket
import sys
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
# Import pika for RabbitMQ
import pika
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the rabbitMQ export module."""
def __init__(self, config=None, args=None):
"""Init the RabbitMQ export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.user = None
@ -67,10 +66,9 @@ class Export(GlancesExport):
self.protocol + '://' + self.user + ':' + self.password + '@' + self.host + ':' + self.port + '/'
)
connection = pika.BlockingConnection(parameters)
channel = connection.channel()
return channel
return connection.channel()
except Exception as e:
logger.critical("Connection to rabbitMQ server %s:%s failed. %s" % (self.host, self.port, e))
logger.critical(f"Connection to rabbitMQ server {self.host}:{self.port} failed. {e}")
sys.exit(2)
def export(self, name, columns, points):
@ -79,10 +77,10 @@ class Export(GlancesExport):
for i in range(len(columns)):
if not isinstance(points[i], Number):
continue
else:
data += ", " + columns[i] + "=" + str(points[i])
data += ", " + columns[i] + "=" + str(points[i])
logger.debug(data)
try:
self.client.basic_publish(exchange='', routing_key=self.queue, body=data)
except Exception as e:
logger.error("Can not export stats to RabbitMQ (%s)" % e)
logger.error(f"Can not export stats to RabbitMQ ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,12 +8,11 @@
"""RESTful interface class."""
from requests import post
from glances.exports.export import GlancesExport
from glances.globals import listkeys
from glances.logger import logger
from glances.exports.export import GlancesExport
from requests import post
class Export(GlancesExport):
@ -23,7 +21,7 @@ class Export(GlancesExport):
def __init__(self, config=None, args=None):
"""Init the RESTful export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.protocol = None
@ -46,15 +44,15 @@ class Export(GlancesExport):
if not self.export_enable:
return None
# Build the RESTful URL where the stats will be posted
url = '{}://{}:{}{}'.format(self.protocol, self.host, self.port, self.path)
logger.info("Stats will be exported to the RESTful endpoint {}".format(url))
url = f'{self.protocol}://{self.host}:{self.port}{self.path}'
logger.info(f"Stats will be exported to the RESTful endpoint {url}")
return url
def export(self, name, columns, points):
"""Export the stats to the Statsd server."""
if name == self.last_exported_list()[0] and self.buffer != {}:
# One complete loop have been done
logger.debug("Export stats ({}) to RESTful endpoint ({})".format(listkeys(self.buffer), self.client))
logger.debug(f"Export stats ({listkeys(self.buffer)}) to RESTful endpoint ({self.client})")
# Export stats
post(self.client, json=self.buffer, allow_redirects=True)
# Reset buffer

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,19 +11,19 @@
import socket
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
# Import bernhard for Riemann
import bernhard
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the Riemann export module."""
def __init__(self, config=None, args=None):
"""Init the Riemann export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
# N/A
@ -48,10 +47,9 @@ class Export(GlancesExport):
if not self.export_enable:
return None
try:
client = bernhard.Client(host=self.host, port=self.port)
return client
return bernhard.Client(host=self.host, port=self.port)
except Exception as e:
logger.critical("Connection to Riemann failed : %s " % e)
logger.critical(f"Connection to Riemann failed : {e} ")
return None
def export(self, name, columns, points):
@ -59,10 +57,10 @@ class Export(GlancesExport):
for i in range(len(columns)):
if not isinstance(points[i], Number):
continue
else:
data = {'host': self.hostname, 'service': name + " " + columns[i], 'metric': points[i]}
logger.debug(data)
try:
self.client.send(data)
except Exception as e:
logger.error("Cannot export stats to Riemann (%s)" % e)
data = {'host': self.hostname, 'service': name + " " + columns[i], 'metric': points[i]}
logger.debug(data)
try:
self.client.send(data)
except Exception as e:
logger.error(f"Cannot export stats to Riemann ({e})")

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -11,18 +10,18 @@
from numbers import Number
from glances.logger import logger
from glances.exports.export import GlancesExport
from statsd import StatsClient
from glances.exports.export import GlancesExport
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the Statsd export module."""
def __init__(self, config=None, args=None):
"""Init the Statsd export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
# N/A
@ -46,7 +45,7 @@ class Export(GlancesExport):
"""Init the connection to the Statsd server."""
if not self.export_enable:
return None
logger.info("Stats will be exported to StatsD server: {}:{}".format(self.host, self.port))
logger.info(f"Stats will be exported to StatsD server: {self.host}:{self.port}")
return StatsClient(self.host, int(self.port), prefix=self.prefix)
def export(self, name, columns, points):
@ -54,13 +53,13 @@ class Export(GlancesExport):
for i in range(len(columns)):
if not isinstance(points[i], Number):
continue
stat_name = '{}.{}'.format(name, columns[i])
stat_name = f'{name}.{columns[i]}'
stat_value = points[i]
try:
self.client.gauge(normalize(stat_name), stat_value)
except Exception as e:
logger.error("Can not export stats to Statsd (%s)" % e)
logger.debug("Export {} stats to Statsd".format(name))
logger.error(f"Can not export stats to Statsd ({e})")
logger.debug(f"Export {name} stats to Statsd")
def normalize(name):
@ -69,6 +68,4 @@ def normalize(name):
# Name should not contain some specials chars (issue #1068)
ret = name.replace(':', '')
ret = ret.replace('%', '')
ret = ret.replace(' ', '_')
return ret
return ret.replace(' ', '_')

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -11,21 +10,20 @@
import sys
from glances.globals import b
from glances.logger import logger
from glances.exports.export import GlancesExport
from glances.globals import json_dumps
import zmq
from zmq.utils.strtypes import asbytes
from glances.exports.export import GlancesExport
from glances.globals import b, json_dumps
from glances.logger import logger
class Export(GlancesExport):
"""This class manages the ZeroMQ export module."""
def __init__(self, config=None, args=None):
"""Init the ZeroMQ export IF."""
super(Export, self).__init__(config=config, args=args)
super().__init__(config=config, args=args)
# Mandatory configuration keys (additional to host and port)
self.prefix = None
@ -47,17 +45,17 @@ class Export(GlancesExport):
if not self.export_enable:
return None
server_uri = 'tcp://{}:{}'.format(self.host, self.port)
server_uri = f'tcp://{self.host}:{self.port}'
try:
self.context = zmq.Context()
publisher = self.context.socket(zmq.PUB)
publisher.bind(server_uri)
except Exception as e:
logger.critical("Cannot connect to ZeroMQ server %s (%s)" % (server_uri, e))
logger.critical(f"Cannot connect to ZeroMQ server {server_uri} ({e})")
sys.exit(2)
else:
logger.info("Connected to the ZeroMQ server %s" % server_uri)
logger.info(f"Connected to the ZeroMQ server {server_uri}")
return publisher
@ -70,7 +68,7 @@ class Export(GlancesExport):
def export(self, name, columns, points):
"""Write the points to the ZeroMQ server."""
logger.debug("Export {} stats to ZeroMQ".format(name))
logger.debug(f"Export {name} stats to ZeroMQ")
# Create DB input
data = dict(zip(columns, points))
@ -90,6 +88,6 @@ class Export(GlancesExport):
try:
self.client.send_multipart(message)
except Exception as e:
logger.error("Cannot export {} stats to ZeroMQ ({})".format(name, e))
logger.error(f"Cannot export {name} stats to ZeroMQ ({e})")
return True

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,7 +11,7 @@ import re
from glances.logger import logger
class GlancesFilterList(object):
class GlancesFilterList:
"""Manage a lis of GlancesFilter objects
>>> fl = GlancesFilterList()
@ -55,7 +54,7 @@ class GlancesFilterList(object):
return False
class GlancesFilter(object):
class GlancesFilter:
"""Allow Glances to filter processes
>>> f = GlancesFilter()
@ -127,9 +126,9 @@ class GlancesFilter(object):
# Compute the regular expression
try:
self._filter_re = re.compile(self.filter)
logger.debug("Filter regex compilation OK: {}".format(self.filter))
logger.debug(f"Filter regex compilation OK: {self.filter}")
except Exception as e:
logger.error("Cannot compile filter regex: {} ({})".format(self.filter, e))
logger.error(f"Cannot compile filter regex: {self.filter} ({e})")
self._filter = None
self._filter_re = None
self._filter_key = None
@ -156,9 +155,9 @@ class GlancesFilter(object):
if self.filter_key is None:
# Apply filter on command line and process name
return self._is_process_filtered(process, key='name') or self._is_process_filtered(process, key='cmdline')
else:
# Apply filter on <key>
return self._is_process_filtered(process)
# Apply filter on <key>
return self._is_process_filtered(process)
def _is_process_filtered(self, process, key=None):
"""Return True if the process[key] should be filtered according to the current filter"""

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -8,15 +7,13 @@
#
"""Manage the folder list."""
from __future__ import unicode_literals
from glances.timer import Timer
from glances.globals import nativestr, folder_size
from glances.globals import folder_size, nativestr
from glances.logger import logger
from glances.timer import Timer
class FolderList(object):
class FolderList:
"""This class describes the optional monitored folder list.
The folder list is a list of 'important' folder to monitor.
@ -67,8 +64,7 @@ class FolderList(object):
value['path'] = self.config.get_value(section, key + 'path')
if value['path'] is None:
continue
else:
value['path'] = nativestr(value['path'])
value['path'] = nativestr(value['path'])
# Optional conf keys
# Refresh time

View File

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# ruff: noqa: F401
#
# This file is part of Glances.
#
@ -13,29 +13,27 @@
# GLOBAL IMPORTS
################
import errno
import os
import sys
import platform
import ujson
from operator import itemgetter, methodcaller
import unicodedata
import types
import subprocess
from datetime import datetime
import re
import base64
import errno
import functools
import weakref
import os
import platform
import queue
import re
import subprocess
import sys
import weakref
from configparser import ConfigParser, NoOptionError, NoSectionError
from datetime import datetime
from operator import itemgetter, methodcaller
from statistics import mean
from xmlrpc.client import Fault, ProtocolError, ServerProxy, Transport, Server
from xmlrpc.server import SimpleXMLRPCRequestHandler, SimpleXMLRPCServer
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
from urllib.parse import urlparse
from urllib.request import Request, urlopen
from xmlrpc.client import Fault, ProtocolError, Server, ServerProxy, Transport
from xmlrpc.server import SimpleXMLRPCRequestHandler, SimpleXMLRPCServer
import orjson
# Correct issue #1025 by monkey path the xmlrpc lib
from defusedxml.xmlrpc import monkey_patch
@ -91,7 +89,7 @@ def printandflush(string):
def to_ascii(s):
"""Convert the bytes string to a ASCII string
Usefull to remove accent (diacritics)"""
Useful to remove accent (diacritics)"""
if isinstance(s, binary_type):
return s.decode()
return s.encode('ascii', 'ignore').decode()
@ -136,10 +134,9 @@ def b(s, errors='replace'):
def nativestr(s, errors='replace'):
if isinstance(s, text_type):
return s
elif isinstance(s, (int, float)):
if isinstance(s, (int, float)):
return s.__str__()
else:
return s.decode('utf-8', errors=errors)
return s.decode('utf-8', errors=errors)
def system_exec(command):
@ -147,7 +144,7 @@ def system_exec(command):
try:
res = subprocess.run(command.split(' '), stdout=subprocess.PIPE).stdout.decode('utf-8')
except Exception as e:
res = 'ERROR: {}'.format(e)
res = f'ERROR: {e}'
return res.rstrip()
@ -156,7 +153,7 @@ def subsample(data, sampling):
Data should be a list of numerical itervalues
Return a subsampled list of sampling lenght
Return a subsampled list of sampling length
"""
if len(data) <= sampling:
return data
@ -204,7 +201,7 @@ def is_admin():
try:
return ctypes.windll.shell32.IsUserAnAdmin()
except Exception as e:
print("Admin check failed with error: %s" % e)
print(f"Admin check failed with error: {e}")
traceback.print_exc()
return False
else:
@ -219,13 +216,13 @@ def key_exist_value_not_none(k, d):
return k in d and d[k] is not None
def key_exist_value_not_none_not_v(k, d, value='', lengh=None):
def key_exist_value_not_none_not_v(k, d, value='', length=None):
# Return True if:
# - key k exists
# - d[k] is not None
# - d[k] != value
# - if lengh is not None and len(d[k]) >= lengh
return k in d and d[k] is not None and d[k] != value and (lengh is None or len(d[k]) >= lengh)
# - if length is not None and len(d[k]) >= length
return k in d and d[k] is not None and d[k] != value and (length is None or len(d[k]) >= length)
def disable(class_name, var):
@ -301,7 +298,7 @@ def urlopen_auth(url, username, password):
return urlopen(
Request(
url,
headers={'Authorization': 'Basic ' + base64.b64encode(('%s:%s' % (username, password)).encode()).decode()},
headers={'Authorization': 'Basic ' + base64.b64encode((f'{username}:{password}').encode()).decode()},
)
)
@ -312,9 +309,9 @@ def json_dumps(data):
Manage the issue #815 for Windows OS with UnicodeDecodeError catching.
"""
try:
return ujson.dumps(data)
return orjson.dumps(data)
except UnicodeDecodeError:
return ujson.dumps(data, ensure_ascii=False)
return orjson.dumps(data, ensure_ascii=False)
def dictlist(data, item):
@ -339,8 +336,7 @@ def json_dumps_dictlist(data, item):
dl = dictlist(data, item)
if dl is None:
return None
else:
return json_dumps(dl)
return json_dumps(dl)
def string_value_to_float(s):
@ -429,12 +425,12 @@ def weak_lru_cache(maxsize=128, typed=False):
def namedtuple_to_dict(data):
"""Convert a namedtuple to a dict, using the _asdict() method embeded in PsUtil stats."""
"""Convert a namedtuple to a dict, using the _asdict() method embedded in PsUtil stats."""
return {k: (v._asdict() if hasattr(v, '_asdict') else v) for k, v in data.items()}
def list_of_namedtuple_to_list_of_dict(data):
"""Convert a list of namedtuples to a dict, using the _asdict() method embeded in PsUtil stats."""
"""Convert a list of namedtuples to a dict, using the _asdict() method embedded in PsUtil stats."""
return [namedtuple_to_dict(d) for d in data]

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,7 +11,7 @@
from glances.attribute import GlancesAttribute
class GlancesHistory(object):
class GlancesHistory:
"""This class manage a dict of GlancesAttribute
- key: stats name
- value: GlancesAttribute"""

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,13 +8,12 @@
"""Custom logger class."""
import os
import json
import getpass
import tempfile
import json
import logging
import logging.config
import os
import tempfile
from glances.globals import safe_makedirs
@ -37,7 +35,7 @@ elif os.path.isdir(_XDG_CACHE_HOME) and os.access(_XDG_CACHE_HOME, os.W_OK):
safe_makedirs(os.path.join(_XDG_CACHE_HOME, 'glances'))
LOG_FILENAME = os.path.join(_XDG_CACHE_HOME, 'glances', 'glances.log')
else:
LOG_FILENAME = os.path.join(tempfile.gettempdir(), 'glances-{}.log'.format(getpass.getuser()))
LOG_FILENAME = os.path.join(tempfile.gettempdir(), f'glances-{getpass.getuser()}.log')
# Define the logging configuration
LOGGING_CFG = {
@ -89,7 +87,7 @@ def glances_logger(env_key='LOG_CFG'):
user_file = os.getenv(env_key, None)
if user_file and os.path.exists(user_file):
# A user file as been defined. Use it...
with open(user_file, 'rt') as f:
with open(user_file) as f:
config = json.load(f)
# Load the configuration

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -15,14 +14,14 @@ import tempfile
from logging import DEBUG
from warnings import simplefilter
from glances import __version__, psutil_version, __apiversion__
from glances.globals import WINDOWS, disable, enable
from glances import __apiversion__, __version__, psutil_version
from glances.config import Config
from glances.globals import WINDOWS, disable, enable
from glances.logger import LOG_FILENAME, logger
from glances.processes import sort_processes_key_list
from glances.logger import logger, LOG_FILENAME
class GlancesMain(object):
class GlancesMain:
"""Main class to manage Glances instance."""
# Default stats' minimum refresh time is 2 seconds
@ -101,10 +100,10 @@ Examples of use:
def version_msg(self):
"""Return the version message."""
version = 'Glances version:\t{}\n'.format(__version__)
version += 'Glances API version:\t{}\n'.format(__apiversion__)
version += 'PsUtil version:\t\t{}\n'.format(psutil_version)
version += 'Log file:\t\t{}\n'.format(LOG_FILENAME)
version = f'Glances version:\t{__version__}\n'
version += f'Glances API version:\t{__apiversion__}\n'
version += f'PsUtil version:\t\t{psutil_version}\n'
version += f'Log file:\t\t{LOG_FILENAME}\n'
return version
def init_args(self):
@ -241,7 +240,7 @@ Examples of use:
)
parser.add_argument(
'--enable-irq', action='store_true', default=False, dest='enable_irq', help='enable IRQ module'
),
)
parser.add_argument(
'--enable-process-extended',
action='store_true',
@ -255,14 +254,14 @@ Examples of use:
default=True,
dest='enable_separator',
help='disable separator in the UI (between top and others modules)',
),
)
parser.add_argument(
'--disable-cursor',
action='store_true',
default=False,
dest='disable_cursor',
help='disable cursor (process selection) in the UI',
),
)
# Sort processes list
parser.add_argument(
'--sort-processes',
@ -334,7 +333,7 @@ Examples of use:
default=None,
type=int,
dest='port',
help='define the client/server TCP port [default: {}]'.format(self.server_port),
help=f'define the client/server TCP port [default: {self.server_port}]',
)
parser.add_argument(
'-B',
@ -374,7 +373,7 @@ Examples of use:
default=self.DEFAULT_REFRESH_TIME,
type=float,
dest='time',
help='set minimum refresh rate in seconds [default: {} sec]'.format(self.DEFAULT_REFRESH_TIME),
help=f'set minimum refresh rate in seconds [default: {self.DEFAULT_REFRESH_TIME} sec]',
)
parser.add_argument(
'-w',
@ -382,14 +381,14 @@ Examples of use:
action='store_true',
default=False,
dest='webserver',
help='run Glances in web server mode (FastAPI, Uvicorn, Jinja2 and OrJsonLib needed)',
help='run Glances in web server mode (FastAPI, Uvicorn, Jinja2 libs needed)',
)
parser.add_argument(
'--cached-time',
default=self.cached_time,
type=int,
dest='cached_time',
help='set the server cache time [default: {} sec]'.format(self.cached_time),
help=f'set the server cache time [default: {self.cached_time} sec]',
)
parser.add_argument(
'--stop-after',
@ -577,7 +576,7 @@ Examples of use:
if args.time == self.DEFAULT_REFRESH_TIME:
args.time = global_refresh
logger.debug('Global refresh rate is set to {} seconds'.format(args.time))
logger.debug(f'Global refresh rate is set to {args.time} seconds')
def init_plugins(self, args):
"""Init Glances plugins"""
@ -585,7 +584,7 @@ Examples of use:
for s in self.config.sections():
if self.config.has_section(s) and (self.config.get_bool_value(s, 'disable', False)):
disable(args, s)
logger.debug('{} disabled by the configuration file'.format(s))
logger.debug(f'{s} disabled by the configuration file')
# The configuration key can be overwrite from the command line
if args and args.disable_plugin and 'all' in args.disable_plugin.split(','):
if not args.enable_plugin:
@ -664,19 +663,19 @@ Examples of use:
# Interactive or file password
if args.server:
args.password = self.__get_password(
description='Define the Glances server password ({} username): '.format(args.username),
description=f'Define the Glances server password ({args.username} username): ',
confirm=True,
username=args.username,
)
elif args.webserver:
args.password = self.__get_password(
description='Define the Glances webserver password ({} username): '.format(args.username),
description=f'Define the Glances webserver password ({args.username} username): ',
confirm=True,
username=args.username,
)
elif args.client:
args.password = self.__get_password(
description='Enter the Glances server password ({} username): '.format(args.username),
description=f'Enter the Glances server password ({args.username} username): ',
clear=True,
username=args.username,
)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,16 +8,16 @@
"""Manage Glances update."""
from datetime import datetime, timedelta
import threading
import json
import pickle
import os
import pickle
import threading
from datetime import datetime, timedelta
from ssl import CertificateError
from glances import __version__
from glances.globals import nativestr, urlopen, HTTPError, URLError, safe_makedirs
from glances.config import user_cache_dir
from glances.globals import HTTPError, URLError, nativestr, safe_makedirs, urlopen
from glances.logger import logger
try:
@ -26,13 +25,13 @@ try:
PACKAGING_IMPORT = True
except Exception as e:
logger.warning("Unable to import 'packaging' module ({}). Glances cannot check for updates.".format(e))
logger.warning(f"Unable to import 'packaging' module ({e}). Glances cannot check for updates.")
PACKAGING_IMPORT = False
PYPI_API_URL = 'https://pypi.python.org/pypi/Glances/json'
class Outdated(object):
class Outdated:
"""
This class aims at providing methods to warn the user when a new Glances
version is available on the PyPI repository (https://pypi.python.org/pypi/Glances/).
@ -46,7 +45,7 @@ class Outdated(object):
self.cache_file = os.path.join(self.cache_dir, 'glances-version.db')
# Set default value...
self.data = {u'installed_version': __version__, u'latest_version': '0.0', u'refresh_date': datetime.now()}
self.data = {'installed_version': __version__, 'latest_version': '0.0', 'refresh_date': datetime.now()}
# Disable update check if `packaging` is not installed
if not PACKAGING_IMPORT:
@ -56,7 +55,7 @@ class Outdated(object):
if not self.args.disable_check_update:
self.load_config(config)
logger.debug("Check Glances version up-to-date: {}".format(not self.args.disable_check_update))
logger.debug(f"Check Glances version up-to-date: {not self.args.disable_check_update}")
# And update !
self.get_pypi_version()
@ -68,7 +67,7 @@ class Outdated(object):
if hasattr(config, 'has_section') and config.has_section(global_section):
self.args.disable_check_update = config.get_value(global_section, 'check_update').lower() == 'false'
else:
logger.debug("Cannot find section {} in the configuration file".format(global_section))
logger.debug(f"Cannot find section {global_section} in the configuration file")
return False
return True
@ -110,9 +109,7 @@ class Outdated(object):
# Check is disabled by configuration
return False
logger.debug(
"Check Glances version (installed: {} / latest: {})".format(self.installed_version(), self.latest_version())
)
logger.debug(f"Check Glances version (installed: {self.installed_version()} / latest: {self.latest_version()})")
return Version(self.latest_version()) > Version(self.installed_version())
def _load_cache(self):
@ -124,7 +121,7 @@ class Outdated(object):
with open(self.cache_file, 'rb') as f:
cached_data = pickle.load(f)
except Exception as e:
logger.debug("Cannot read version from cache file: {} ({})".format(self.cache_file, e))
logger.debug(f"Cannot read version from cache file: {self.cache_file} ({e})")
else:
logger.debug("Read version from cache file")
if (
@ -147,21 +144,21 @@ class Outdated(object):
with open(self.cache_file, 'wb') as f:
pickle.dump(self.data, f)
except Exception as e:
logger.error("Cannot write version to cache file {} ({})".format(self.cache_file, e))
logger.error(f"Cannot write version to cache file {self.cache_file} ({e})")
def _update_pypi_version(self):
"""Get the latest PyPI version (as a string) via the RESTful JSON API"""
logger.debug("Get latest Glances version from the PyPI RESTful API ({})".format(PYPI_API_URL))
logger.debug(f"Get latest Glances version from the PyPI RESTful API ({PYPI_API_URL})")
# Update the current time
self.data[u'refresh_date'] = datetime.now()
self.data['refresh_date'] = datetime.now()
try:
res = urlopen(PYPI_API_URL, timeout=3).read()
except (HTTPError, URLError, CertificateError) as e:
logger.debug("Cannot get Glances version from the PyPI RESTful API ({})".format(e))
logger.debug(f"Cannot get Glances version from the PyPI RESTful API ({e})")
else:
self.data[u'latest_version'] = json.loads(nativestr(res))['info']['version']
self.data['latest_version'] = json.loads(nativestr(res))['info']['version']
logger.debug("Save Glances version to the cache file")
# Save result to the cache file

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,12 +8,10 @@
"""Manage bars for Glances output."""
from __future__ import division
from math import modf
class Bar(object):
class Bar:
"""Manage bar (progression or status).
import sys
@ -76,6 +73,7 @@ class Bar(object):
return self.__size
if self.__display_value:
return self.__size - 6
return None
@property
def percent(self):
@ -114,7 +112,7 @@ class Bar(object):
ret, '>' if self.percent > self.max_value else ' ', self.max_value, self.__unit_char
)
else:
ret = '{}{:5.1f}{}'.format(ret, self.percent, self.__unit_char)
ret = f'{ret}{self.percent:5.1f}{self.__unit_char}'
# Add overlay
if overlay and len(overlay) < len(ret) - 6:

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -8,15 +7,15 @@
#
"""Curses interface class."""
from __future__ import unicode_literals
import getpass
import sys
from glances.globals import MACOS, WINDOWS, nativestr, u, itervalues, enable, disable
from glances.logger import logger
from glances.events_list import glances_events
from glances.processes import glances_processes, sort_processes_key_list
from glances.globals import MACOS, WINDOWS, disable, enable, itervalues, nativestr, u
from glances.logger import logger
from glances.outputs.glances_unicode import unicode_message
from glances.processes import glances_processes, sort_processes_key_list
from glances.timer import Timer
# Import curses library for "normal" operating system
@ -31,7 +30,7 @@ except ImportError:
sys.exit(1)
class _GlancesCurses(object):
class _GlancesCurses:
"""This class manages the curses display (and key pressed).
Note: It is a private class, use GlancesCursesClient or GlancesCursesBrowser.
@ -146,15 +145,15 @@ class _GlancesCurses(object):
logger.critical("Cannot init the curses library.\n")
sys.exit(1)
else:
logger.debug("Curses library initialized with term: {}".format(curses.longname()))
logger.debug(f"Curses library initialized with term: {curses.longname()}")
except Exception as e:
if args.export:
logger.info("Cannot init the curses library, quiet mode on and export.")
args.quiet = True
return
else:
logger.critical("Cannot init the curses library ({})".format(e))
sys.exit(1)
logger.critical(f"Cannot init the curses library ({e})")
sys.exit(1)
# Load configuration file
self.load_config(config)
@ -163,7 +162,7 @@ class _GlancesCurses(object):
self._init_cursor()
# Init the colors
self._init_colors()
self.colors_list = build_colors_list(args)
# Init main window
self.term_window = self.screen.subwin(0, 0)
@ -196,8 +195,10 @@ class _GlancesCurses(object):
"""Load the outputs section of the configuration file."""
if config is not None and config.has_section('outputs'):
logger.debug('Read the outputs section in the configuration file')
# Separator ?
self.args.enable_separator = config.get_bool_value('outputs', 'separator', default=True)
# Separator
self.args.enable_separator = config.get_bool_value(
'outputs', 'separator', default=self.args.enable_separator
)
# Set the left sidebar list
self._left_sidebar = config.get_list_value('outputs', 'left_menu', default=self._left_sidebar)
@ -215,130 +216,6 @@ class _GlancesCurses(object):
curses.cbreak()
self.set_cursor(0)
def _init_colors(self):
"""Init the Curses color layout."""
# Set curses options
try:
if hasattr(curses, 'start_color'):
curses.start_color()
logger.debug('Curses interface compatible with {} colors'.format(curses.COLORS))
if hasattr(curses, 'use_default_colors'):
curses.use_default_colors()
except Exception as e:
logger.warning('Error initializing terminal color ({})'.format(e))
# Init colors
if self.args.disable_bold:
A_BOLD = 0
self.args.disable_bg = True
else:
A_BOLD = curses.A_BOLD
self.title_color = A_BOLD
self.title_underline_color = A_BOLD | curses.A_UNDERLINE
self.help_color = A_BOLD
if curses.has_colors():
# The screen is compatible with a colored design
# ex: export TERM=xterm-256color
# export TERM=xterm-color
curses.init_pair(1, -1, -1)
if self.args.disable_bg:
curses.init_pair(2, curses.COLOR_RED, -1)
curses.init_pair(3, curses.COLOR_GREEN, -1)
curses.init_pair(5, curses.COLOR_MAGENTA, -1)
else:
curses.init_pair(2, -1, curses.COLOR_RED)
curses.init_pair(3, -1, curses.COLOR_GREEN)
curses.init_pair(5, -1, curses.COLOR_MAGENTA)
curses.init_pair(4, curses.COLOR_BLUE, -1)
curses.init_pair(6, curses.COLOR_RED, -1)
curses.init_pair(7, curses.COLOR_GREEN, -1)
curses.init_pair(8, curses.COLOR_MAGENTA, -1)
# Colors text styles
self.no_color = curses.color_pair(1)
self.default_color = curses.color_pair(3) | A_BOLD
self.nice_color = curses.color_pair(8)
self.cpu_time_color = curses.color_pair(8)
self.ifCAREFUL_color = curses.color_pair(4) | A_BOLD
self.ifWARNING_color = curses.color_pair(5) | A_BOLD
self.ifCRITICAL_color = curses.color_pair(2) | A_BOLD
self.default_color2 = curses.color_pair(7)
self.ifCAREFUL_color2 = curses.color_pair(4)
self.ifWARNING_color2 = curses.color_pair(8) | A_BOLD
self.ifCRITICAL_color2 = curses.color_pair(6) | A_BOLD
self.ifINFO_color = curses.color_pair(4)
self.filter_color = A_BOLD
self.selected_color = A_BOLD
self.separator = curses.color_pair(1)
if curses.COLORS > 8:
# ex: export TERM=xterm-256color
colors_list = [curses.COLOR_CYAN, curses.COLOR_YELLOW]
for i in range(0, 3):
try:
curses.init_pair(i + 9, colors_list[i], -1)
except Exception:
curses.init_pair(i + 9, -1, -1)
self.filter_color = curses.color_pair(9) | A_BOLD
self.selected_color = curses.color_pair(10) | A_BOLD
# Define separator line style
curses.init_color(11, 500, 500, 500)
curses.init_pair(11, curses.COLOR_BLACK, -1)
self.separator = curses.color_pair(11)
else:
# The screen is NOT compatible with a colored design
# switch to B&W text styles
# ex: export TERM=xterm-mono
self.no_color = -1
self.default_color = -1
self.nice_color = A_BOLD
self.cpu_time_color = A_BOLD
self.ifCAREFUL_color = A_BOLD
self.ifWARNING_color = curses.A_UNDERLINE
self.ifCRITICAL_color = curses.A_REVERSE
self.default_color2 = -1
self.ifCAREFUL_color2 = A_BOLD
self.ifWARNING_color2 = curses.A_UNDERLINE
self.ifCRITICAL_color2 = curses.A_REVERSE
self.ifINFO_color = A_BOLD
self.filter_color = A_BOLD
self.selected_color = A_BOLD
self.separator = -1
# Define the colors list (hash table) for stats
self.colors_list = {
'DEFAULT': self.no_color,
'UNDERLINE': curses.A_UNDERLINE,
'BOLD': A_BOLD,
'SORT': curses.A_UNDERLINE | A_BOLD,
'OK': self.default_color2,
'MAX': self.default_color2 | A_BOLD,
'FILTER': self.filter_color,
'TITLE': self.title_color,
'PROCESS': self.default_color2,
'PROCESS_SELECTED': self.default_color2 | curses.A_UNDERLINE,
'STATUS': self.default_color2,
'NICE': self.nice_color,
'CPU_TIME': self.cpu_time_color,
'CAREFUL': self.ifCAREFUL_color2,
'WARNING': self.ifWARNING_color2,
'CRITICAL': self.ifCRITICAL_color2,
'OK_LOG': self.default_color,
'CAREFUL_LOG': self.ifCAREFUL_color,
'WARNING_LOG': self.ifWARNING_color,
'CRITICAL_LOG': self.ifCRITICAL_color,
'PASSWORD': curses.A_PROTECT,
'SELECTED': self.selected_color,
'INFO': self.ifINFO_color,
'ERROR': self.selected_color,
'SEPARATOR': self.separator,
}
def set_cursor(self, value):
"""Configure the curse cursor appearance.
@ -354,8 +231,7 @@ class _GlancesCurses(object):
def get_key(self, window):
# TODO: Check issue #163
ret = window.getch()
return ret
return window.getch()
def __catch_key(self, return_to_browser=False):
# Catch the pressed key
@ -364,7 +240,7 @@ class _GlancesCurses(object):
return -1
# Actions (available in the global hotkey dict)...
logger.debug("Keypressed (code: {})".format(self.pressedkey))
logger.debug(f"Keypressed (code: {self.pressedkey})")
for hotkey in self._hotkeys:
if self.pressedkey == ord(hotkey) and 'switch' in self._hotkeys[hotkey]:
self._handle_switch(hotkey)
@ -491,10 +367,10 @@ class _GlancesCurses(object):
if return_to_browser:
logger.info("Stop Glances client and return to the browser")
else:
logger.info("Stop Glances (keypressed: {})".format(self.pressedkey))
logger.info(f"Stop Glances (keypressed: {self.pressedkey})")
def _handle_refresh(self):
pass
glances_processes.reset_internal_cache()
def loop_position(self):
"""Return the current sort in the loop"""
@ -569,13 +445,14 @@ class _GlancesCurses(object):
self.new_line()
self.line -= 1
line_width = self.term_window.getmaxyx()[1] - self.column
self.term_window.addnstr(
self.line,
self.column,
unicode_message('MEDIUM_LINE', self.args) * line_width,
line_width,
self.colors_list[color],
)
if self.line >= 0:
self.term_window.addnstr(
self.line,
self.column,
unicode_message('MEDIUM_LINE', self.args) * line_width,
line_width,
self.colors_list[color],
)
def __get_stat_display(self, stats, layer):
"""Return a dict of dict with all the stats display.
@ -712,7 +589,7 @@ class _GlancesCurses(object):
# Display graph generation popup
if self.args.generate_graph:
if 'graph' in stats.getExportsList():
self.display_popup('Generate graph in {}'.format(self.args.export_graph_path))
self.display_popup(f'Generate graph in {self.args.export_graph_path}')
else:
logger.warning('Graph export module is disable. Run Glances with --export graph to enable it.')
self.args.generate_graph = False
@ -731,7 +608,7 @@ class _GlancesCurses(object):
:param process
:return: None
"""
logger.debug("Selected process to kill: {}".format(process))
logger.debug(f"Selected process to kill: {process}")
if 'childrens' in process:
pid_to_kill = process['childrens']
@ -751,9 +628,9 @@ class _GlancesCurses(object):
try:
ret_kill = glances_processes.kill(pid)
except Exception as e:
logger.error('Can not kill process {} ({})'.format(pid, e))
logger.error(f'Can not kill process {pid} ({e})')
else:
logger.info('Kill signal has been sent to process {} (return code: {})'.format(pid, ret_kill))
logger.info(f'Kill signal has been sent to process {pid} (return code: {ret_kill})')
def __display_header(self, stat_display):
"""Display the firsts lines (header) in the Curses interface.
@ -825,7 +702,7 @@ class _GlancesCurses(object):
max_width=quicklook_width, args=self.args
)
except AttributeError as e:
logger.debug("Quicklook plugin not available (%s)" % e)
logger.debug(f"Quicklook plugin not available ({e})")
else:
plugin_widths['quicklook'] = self.get_stats_display_width(stat_display["quicklook"])
stats_width = sum(itervalues(plugin_widths)) + 1
@ -920,12 +797,20 @@ class _GlancesCurses(object):
self.display_plugin(stat_display[p])
def display_popup(
self, message, size_x=None, size_y=None, duration=3, popup_type='info', input_size=30, input_value=None
self,
message,
size_x=None,
size_y=None,
duration=3,
popup_type='info',
input_size=30,
input_value=None,
is_password=False,
):
"""
Display a centered popup.
popup_type: ='info'
popup_type: ='info'
Just an information popup, no user interaction
Display a centered popup with the given message during duration seconds
If size_x and size_y: set the popup size
@ -977,7 +862,10 @@ class _GlancesCurses(object):
popup.refresh()
self.wait(duration * 1000)
return True
elif popup_type == 'input':
if popup_type == 'input':
logger.info(popup_type)
logger.info(is_password)
# Create a sub-window for the text field
sub_pop = popup.derwin(1, input_size, 2, 2 + len(m))
sub_pop.attron(self.colors_list['FILTER'])
@ -990,17 +878,22 @@ class _GlancesCurses(object):
# Create the textbox inside the sub-windows
self.set_cursor(2)
self.term_window.keypad(1)
if is_password:
textbox = getpass.getpass('')
self.set_cursor(0)
if textbox != '':
return textbox
return None
# No password
textbox = GlancesTextbox(sub_pop, insert_mode=True)
textbox.edit()
self.set_cursor(0)
# self.term_window.keypad(0)
if textbox.gather() != '':
logger.debug("User enters the following string: %s" % textbox.gather())
return textbox.gather()[:-1]
else:
logger.debug("User enters an empty string")
return None
elif popup_type == 'yesno':
return None
if popup_type == 'yesno':
# # Create a sub-window for the text field
sub_pop = popup.derwin(1, 2, len(sentence_list) + 1, len(m) + 2)
sub_pop.attron(self.colors_list['FILTER'])
@ -1018,6 +911,8 @@ class _GlancesCurses(object):
# self.term_window.keypad(0)
return textbox.gather()
return None
def display_plugin(self, plugin_stats, display_optional=True, display_additional=True, max_y=65535, add_space=0):
"""Display the plugin_stats on the screen.
@ -1112,6 +1007,7 @@ class _GlancesCurses(object):
# Have empty lines after the plugins
self.next_line += add_space
return None
def clear(self):
"""Erase the content of the screen.
@ -1194,8 +1090,7 @@ class _GlancesCurses(object):
if isexitkey and self.args.help_tag:
# Quit from help should return to main screen, not exit #1874
self.args.help_tag = not self.args.help_tag
isexitkey = False
return isexitkey
return False
if not isexitkey and pressedkey > -1:
# Redraw display
@ -1236,7 +1131,7 @@ class _GlancesCurses(object):
)
)
except Exception as e:
logger.debug('ERROR: Can not compute plugin width ({})'.format(e))
logger.debug(f'ERROR: Can not compute plugin width ({e})')
return 0
else:
return c
@ -1249,7 +1144,7 @@ class _GlancesCurses(object):
try:
c = [i['msg'] for i in curse_msg['msgdict']].count('\n')
except Exception as e:
logger.debug('ERROR: Can not compute plugin height ({})'.format(e))
logger.debug(f'ERROR: Can not compute plugin height ({e})')
return 0
else:
return c + 1
@ -1263,21 +1158,146 @@ class GlancesCursesClient(_GlancesCurses):
"""Class for the Glances curse client."""
class GlancesTextbox(Textbox, object):
class GlancesTextbox(Textbox):
def __init__(self, *args, **kwargs):
super(GlancesTextbox, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def do_command(self, ch):
if ch == 10: # Enter
return 0
if ch == 127: # Back
return 8
return super(GlancesTextbox, self).do_command(ch)
return super().do_command(ch)
class GlancesTextboxYesNo(Textbox, object):
class GlancesTextboxYesNo(Textbox):
def __init__(self, *args, **kwargs):
super(GlancesTextboxYesNo, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def do_command(self, ch):
return super(GlancesTextboxYesNo, self).do_command(ch)
return super().do_command(ch)
def build_colors_list(args):
"""Init the Curses color layout."""
# Set curses options
try:
if hasattr(curses, 'start_color'):
curses.start_color()
logger.debug(f'Curses interface compatible with {curses.COLORS} colors')
if hasattr(curses, 'use_default_colors'):
curses.use_default_colors()
except Exception as e:
logger.warning(f'Error initializing terminal color ({e})')
# Init colors
if args.disable_bold:
A_BOLD = 0
args.disable_bg = True
else:
A_BOLD = curses.A_BOLD
title_color = A_BOLD
if curses.has_colors():
# The screen is compatible with a colored design
# ex: export TERM=xterm-256color
# export TERM=xterm-color
curses.init_pair(1, -1, -1)
if args.disable_bg:
curses.init_pair(2, curses.COLOR_RED, -1)
curses.init_pair(3, curses.COLOR_GREEN, -1)
curses.init_pair(5, curses.COLOR_MAGENTA, -1)
else:
curses.init_pair(2, -1, curses.COLOR_RED)
curses.init_pair(3, 0, curses.COLOR_GREEN)
curses.init_pair(5, -1, curses.COLOR_MAGENTA)
curses.init_pair(4, curses.COLOR_BLUE, -1)
curses.init_pair(6, curses.COLOR_RED, -1)
curses.init_pair(7, curses.COLOR_GREEN, -1)
curses.init_pair(8, curses.COLOR_MAGENTA, -1)
# Colors text styles
no_color = curses.color_pair(1)
default_color = curses.color_pair(3) | A_BOLD
nice_color = curses.color_pair(8)
cpu_time_color = curses.color_pair(8)
ifCAREFUL_color = curses.color_pair(4) | A_BOLD
ifWARNING_color = curses.color_pair(5) | A_BOLD
ifCRITICAL_color = curses.color_pair(2) | A_BOLD
default_color2 = curses.color_pair(7)
ifCAREFUL_color2 = curses.color_pair(4)
ifWARNING_color2 = curses.color_pair(8) | A_BOLD
ifCRITICAL_color2 = curses.color_pair(6) | A_BOLD
ifINFO_color = curses.color_pair(4)
filter_color = A_BOLD
selected_color = A_BOLD
separator = curses.color_pair(1)
if curses.COLORS > 8:
# ex: export TERM=xterm-256color
colors_list = [curses.COLOR_CYAN, curses.COLOR_YELLOW]
for i in range(0, 3):
try:
curses.init_pair(i + 9, colors_list[i], -1)
except Exception:
curses.init_pair(i + 9, -1, -1)
filter_color = curses.color_pair(9) | A_BOLD
selected_color = curses.color_pair(10) | A_BOLD
# Define separator line style
try:
curses.init_color(11, 500, 500, 500)
curses.init_pair(11, curses.COLOR_BLACK, -1)
separator = curses.color_pair(11)
except Exception:
# Catch exception in TMUX
pass
else:
# The screen is NOT compatible with a colored design
# switch to B&W text styles
# ex: export TERM=xterm-mono
no_color = -1
default_color = -1
nice_color = A_BOLD
cpu_time_color = A_BOLD
ifCAREFUL_color = A_BOLD
ifWARNING_color = curses.A_UNDERLINE
ifCRITICAL_color = curses.A_REVERSE
default_color2 = -1
ifCAREFUL_color2 = A_BOLD
ifWARNING_color2 = curses.A_UNDERLINE
ifCRITICAL_color2 = curses.A_REVERSE
ifINFO_color = A_BOLD
filter_color = A_BOLD
selected_color = A_BOLD
separator = -1
# Define the colors list (hash table) for stats
return {
'DEFAULT': no_color,
'UNDERLINE': curses.A_UNDERLINE,
'BOLD': A_BOLD,
'SORT': curses.A_UNDERLINE | A_BOLD,
'OK': default_color2,
'MAX': default_color2 | A_BOLD,
'FILTER': filter_color,
'TITLE': title_color,
'PROCESS': default_color2,
'PROCESS_SELECTED': default_color2 | curses.A_UNDERLINE,
'STATUS': default_color2,
'NICE': nice_color,
'CPU_TIME': cpu_time_color,
'CAREFUL': ifCAREFUL_color2,
'WARNING': ifWARNING_color2,
'CRITICAL': ifCRITICAL_color2,
'OK_LOG': default_color,
'CAREFUL_LOG': ifCAREFUL_color,
'WARNING_LOG': ifWARNING_color,
'CRITICAL_LOG': ifCRITICAL_color,
'PASSWORD': curses.A_PROTECT,
'SELECTED': selected_color,
'INFO': ifINFO_color,
'ERROR': selected_color,
'SEPARATOR': separator,
}

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,11 +8,11 @@
"""Curses browser interface class ."""
import math
import curses
from glances.outputs.glances_curses import _GlancesCurses
import math
from glances.logger import logger
from glances.outputs.glances_curses import _GlancesCurses
from glances.timer import Timer
@ -22,14 +21,14 @@ class GlancesCursesBrowser(_GlancesCurses):
def __init__(self, args=None):
"""Init the father class."""
super(GlancesCursesBrowser, self).__init__(args=args)
super().__init__(args=args)
_colors_list = {
'UNKNOWN': self.no_color,
'SNMP': self.default_color2,
'ONLINE': self.default_color2,
'OFFLINE': self.ifCRITICAL_color2,
'PROTECTED': self.ifWARNING_color2,
'UNKNOWN': self.colors_list['DEFAULT'],
'SNMP': self.colors_list['OK'],
'ONLINE': self.colors_list['OK'],
'OFFLINE': self.colors_list['CRITICAL'],
'PROTECTED': self.colors_list['WARNING'],
}
self.colors_list.update(_colors_list)
@ -151,7 +150,7 @@ class GlancesCursesBrowser(_GlancesCurses):
self.pressedkey = self.get_key(self.term_window)
refresh = False
if self.pressedkey != -1:
logger.debug("Key pressed. Code=%s" % self.pressedkey)
logger.debug(f"Key pressed. Code={self.pressedkey}")
# Actions...
if self.pressedkey == ord('\x1b') or self.pressedkey == ord('q'):
@ -163,23 +162,23 @@ class GlancesCursesBrowser(_GlancesCurses):
elif self.pressedkey == 10:
# 'ENTER' > Run Glances on the selected server
self.active_server = self._current_page * self._page_max_lines + self.cursor_position
logger.debug("Server {}/{} selected".format(self.active_server, len(stats)))
logger.debug(f"Server {self.active_server}/{len(stats)} selected")
elif self.pressedkey == curses.KEY_UP or self.pressedkey == 65:
# 'UP' > Up in the server list
self.cursor_up(stats)
logger.debug("Server {}/{} selected".format(self.cursor + 1, len(stats)))
logger.debug(f"Server {self.cursor + 1}/{len(stats)} selected")
elif self.pressedkey == curses.KEY_DOWN or self.pressedkey == 66:
# 'DOWN' > Down in the server list
self.cursor_down(stats)
logger.debug("Server {}/{} selected".format(self.cursor + 1, len(stats)))
logger.debug(f"Server {self.cursor + 1}/{len(stats)} selected")
elif self.pressedkey == curses.KEY_PPAGE:
# 'Page UP' > Prev page in the server list
self.cursor_pageup(stats)
logger.debug("PageUP: Server ({}/{}) pages.".format(self._current_page + 1, self._page_max))
logger.debug(f"PageUP: Server ({self._current_page + 1}/{self._page_max}) pages.")
elif self.pressedkey == curses.KEY_NPAGE:
# 'Page Down' > Next page in the server list
self.cursor_pagedown(stats)
logger.debug("PageDown: Server {}/{} pages".format(self._current_page + 1, self._page_max))
logger.debug(f"PageDown: Server {self._current_page + 1}/{self._page_max} pages")
elif self.pressedkey == ord('1'):
self._stats_list = None
refresh = True
@ -211,7 +210,7 @@ class GlancesCursesBrowser(_GlancesCurses):
:param return_to_browser:
"""
# Flush display
logger.debug('Servers list: {}'.format(stats))
logger.debug(f'Servers list: {stats}')
self.flush(stats)
# Wait
@ -250,15 +249,19 @@ class GlancesCursesBrowser(_GlancesCurses):
screen_x = self.screen.getmaxyx()[1]
screen_y = self.screen.getmaxyx()[0]
stats_max = screen_y - 3
stats_len = len(stats)
self._page_max_lines = stats_max
self._page_max = int(math.ceil(stats_len / stats_max))
# Init position
x = 0
y = 0
self._page_max = int(math.ceil(len(stats) / stats_max))
# Display top header
# Display header
x, y = self.__display_header(stats, 0, 0, screen_x, screen_y)
# Display Glances server list
# ================================
return self.__display_server_list(stats, x, y, screen_x, screen_y)
def __display_header(self, stats, x, y, screen_x, screen_y):
stats_len = len(stats)
stats_max = screen_y - 3
if stats_len == 0:
if self.first_scan and not self.args.disable_autodiscover:
msg = 'Glances is scanning your network. Please wait...'
@ -268,38 +271,39 @@ class GlancesCursesBrowser(_GlancesCurses):
elif len(stats) == 1:
msg = 'One Glances server available'
else:
msg = '{} Glances servers available'.format(stats_len)
msg = f'{stats_len} Glances servers available'
if self.args.disable_autodiscover:
msg += ' (auto discover is disabled)'
if screen_y > 1:
self.term_window.addnstr(y, x, msg, screen_x - x, self.colors_list['TITLE'])
msg = '{}'.format(self._get_status_count(stats))
msg = f'{self._get_status_count(stats)}'
self.term_window.addnstr(y + 1, x, msg, screen_x - x)
if stats_len > stats_max and screen_y > 2:
msg = '{} servers displayed.({}/{}) {}'.format(
self.get_pagelines(stats), self._current_page + 1, self._page_max, self._get_status_count(stats)
)
page_lines = self.get_pagelines(stats)
status_count = self._get_status_count(stats)
msg = f'{page_lines} servers displayed.({self._current_page + 1}/{self._page_max}) {status_count}'
self.term_window.addnstr(y + 1, x, msg, screen_x - x)
if stats_len == 0:
return x, y
def __display_server_list(self, stats, x, y, screen_x, screen_y):
if len(stats) == 0:
# No server to display
return False
# Display the Glances server list
# ================================
stats_max = screen_y - 3
# Table of table
# Item description: [stats_id, column name, column size]
column_def = [
['name', 'Name', 16],
['alias', None, None],
['load_min5', 'LOAD', 6],
['cpu_percent', 'CPU%', 5],
['mem_percent', 'MEM%', 5],
['status', 'STATUS', 9],
['ip', 'IP', 15],
# ['port', 'PORT', 5],
['hr_name', 'OS', 16],
]
y = 2
@ -325,24 +329,10 @@ class GlancesCursesBrowser(_GlancesCurses):
# Display table
line = 0
for v in current_page:
for server_stat in current_page:
# Limit the number of displayed server (see issue #1256)
if line >= stats_max:
continue
# Get server stats
server_stat = {}
for c in column_def:
try:
server_stat[c[0]] = v[c[0]]
except KeyError as e:
logger.debug("Cannot grab stats {} from server (KeyError: {})".format(c[0], e))
server_stat[c[0]] = '?'
# Display alias instead of name
try:
if c[0] == 'alias' and v[c[0]] is not None:
server_stat['name'] = v[c[0]]
except KeyError:
pass
# Display line for server stats
cpt = 0
@ -356,9 +346,20 @@ class GlancesCursesBrowser(_GlancesCurses):
# Display the line
xc += 2
for c in column_def:
if xc < screen_x and y < screen_y and c[1] is not None:
if xc < screen_x and y < screen_y:
# Display server stats
self.term_window.addnstr(y, xc, format(server_stat[c[0]]), c[2], self.colors_list[v['status']])
value = format(server_stat.get(c[0], '?'))
if c[0] == 'name' and 'alias' in server_stat:
value = server_stat['alias']
decoration = self.colors_list.get(
server_stat[c[0] + '_decoration'].replace('_LOG', '')
if c[0] + '_decoration' in server_stat
else self.colors_list[server_stat['status']],
self.colors_list['DEFAULT'],
)
if c[0] == 'status':
decoration = self.colors_list[server_stat['status']]
self.term_window.addnstr(y, xc, value, c[2], decoration)
xc += c[2] + self.space_between_column
cpt += 1
# Next line, next server...

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -12,27 +11,30 @@
import os
import sys
import tempfile
from io import open
import webbrowser
from urllib.parse import urljoin
# Replace typing_extensions by typing when Python 3.8 support will be dropped
from typing import Annotated
try:
from typing import Annotated
except ImportError:
# Only for Python 3.8
# To be removed when Python 3.8 support will be dropped
from typing_extensions import Annotated
from glances import __version__, __apiversion__
from glances import __apiversion__, __version__
from glances.logger import logger
from glances.password import GlancesPassword
from glances.timer import Timer
from glances.logger import logger
# FastAPI import
try:
from fastapi import FastAPI, Depends, HTTPException, status, APIRouter, Request
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from fastapi import APIRouter, Depends, FastAPI, HTTPException, Request, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware
from fastapi.responses import HTMLResponse, ORJSONResponse
from fastapi.templating import Jinja2Templates
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
except ImportError:
logger.critical('FastAPI import error. Glances cannot start in web server mode.')
sys.exit(2)
@ -42,6 +44,7 @@ try:
except ImportError:
logger.critical('Uvicorn import error. Glances cannot start in web server mode.')
sys.exit(2)
import builtins
import contextlib
import threading
import time
@ -73,7 +76,7 @@ class GlancesUvicornServer(uvicorn.Server):
thread.join()
class GlancesRestfulApi(object):
class GlancesRestfulApi:
"""This class manages the Restful API server."""
API_VERSION = __apiversion__
@ -98,7 +101,7 @@ class GlancesRestfulApi(object):
self.load_config(config)
# Set the bind URL
self.bind_url = urljoin('http://{}:{}/'.format(self.args.bind_address, self.args.port), self.url_prefix)
self.bind_url = urljoin(f'http://{self.args.bind_address}:{self.args.port}/', self.url_prefix)
# FastAPI Init
if self.args.password:
@ -127,11 +130,11 @@ class GlancesRestfulApi(object):
# https://fastapi.tiangolo.com/tutorial/cors/
self._app.add_middleware(
CORSMiddleware,
# allow_origins=["*"],
allow_origins=[self.bind_url],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
# Related to https://github.com/nicolargo/glances/issues/2812
allow_origins=config.get_list_value('outputs', 'cors_origins', default=["*"]),
allow_credentials=config.get_bool_value('outputs', 'cors_credentials', default=True),
allow_methods=config.get_list_value('outputs', 'cors_methods', default=["*"]),
allow_headers=config.get_list_value('outputs', 'cors_headers', default=["*"]),
)
# FastAPI Enable GZIP compression
@ -147,9 +150,9 @@ class GlancesRestfulApi(object):
self.url_prefix = '/'
if config is not None and config.has_section('outputs'):
n = config.get_value('outputs', 'max_processes_display', default=None)
logger.debug('Number of processes to display in the WebUI: {}'.format(n))
logger.debug(f'Number of processes to display in the WebUI: {n}')
self.url_prefix = config.get_value('outputs', 'url_prefix', default='/')
logger.debug('URL prefix: {}'.format(self.url_prefix))
logger.debug(f'URL prefix: {self.url_prefix}')
def __update__(self):
# Never update more than 1 time per cached_time
@ -180,92 +183,93 @@ class GlancesRestfulApi(object):
# REST API
router.add_api_route(
'/api/%s/status' % self.API_VERSION,
f'/api/{self.API_VERSION}/status',
status_code=status.HTTP_200_OK,
methods=['HEAD', 'GET'],
response_class=ORJSONResponse,
endpoint=self._api_status,
)
router.add_api_route(
'/api/%s/config' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_config
f'/api/{self.API_VERSION}/config', response_class=ORJSONResponse, endpoint=self._api_config
)
router.add_api_route(
'/api/%s/config/{section}' % self.API_VERSION,
f'/api/{self.API_VERSION}/config/{{section}}',
response_class=ORJSONResponse,
endpoint=self._api_config_section,
)
router.add_api_route(
'/api/%s/config/{section}/{item}' % self.API_VERSION,
f'/api/{self.API_VERSION}/config/{{section}}/{{item}}',
response_class=ORJSONResponse,
endpoint=self._api_config_section_item,
)
router.add_api_route('/api/%s/args' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_args)
router.add_api_route(f'/api/{self.API_VERSION}/args', response_class=ORJSONResponse, endpoint=self._api_args)
router.add_api_route(
'/api/%s/args/{item}' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_args_item
f'/api/{self.API_VERSION}/args/{{item}}', response_class=ORJSONResponse, endpoint=self._api_args_item
)
router.add_api_route(
'/api/%s/pluginslist' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_plugins
f'/api/{self.API_VERSION}/pluginslist', response_class=ORJSONResponse, endpoint=self._api_plugins
)
router.add_api_route('/api/%s/all' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_all)
router.add_api_route(f'/api/{self.API_VERSION}/all', response_class=ORJSONResponse, endpoint=self._api_all)
router.add_api_route(
'/api/%s/all/limits' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_all_limits
f'/api/{self.API_VERSION}/all/limits', response_class=ORJSONResponse, endpoint=self._api_all_limits
)
router.add_api_route(
'/api/%s/all/views' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_all_views
f'/api/{self.API_VERSION}/all/views', response_class=ORJSONResponse, endpoint=self._api_all_views
)
router.add_api_route('/api/%s/help' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_help)
router.add_api_route('/api/%s/{plugin}' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api)
router.add_api_route(f'/api/{self.API_VERSION}/help', response_class=ORJSONResponse, endpoint=self._api_help)
router.add_api_route(f'/api/{self.API_VERSION}/{{plugin}}', response_class=ORJSONResponse, endpoint=self._api)
router.add_api_route(
'/api/%s/{plugin}/history' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_history
f'/api/{self.API_VERSION}/{{plugin}}/history', response_class=ORJSONResponse, endpoint=self._api_history
)
router.add_api_route(
'/api/%s/{plugin}/history/{nb}' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/history/{{nb}}',
response_class=ORJSONResponse,
endpoint=self._api_history,
)
router.add_api_route(
'/api/%s/{plugin}/top/{nb}' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_top
f'/api/{self.API_VERSION}/{{plugin}}/top/{{nb}}', response_class=ORJSONResponse, endpoint=self._api_top
)
router.add_api_route(
'/api/%s/{plugin}/limits' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_limits
f'/api/{self.API_VERSION}/{{plugin}}/limits', response_class=ORJSONResponse, endpoint=self._api_limits
)
router.add_api_route(
'/api/%s/{plugin}/views' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_views
f'/api/{self.API_VERSION}/{{plugin}}/views', response_class=ORJSONResponse, endpoint=self._api_views
)
router.add_api_route(
'/api/%s/{plugin}/{item}' % self.API_VERSION, response_class=ORJSONResponse, endpoint=self._api_item
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}', response_class=ORJSONResponse, endpoint=self._api_item
)
router.add_api_route(
'/api/%s/{plugin}/{item}/history' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}/history',
response_class=ORJSONResponse,
endpoint=self._api_item_history,
)
router.add_api_route(
'/api/%s/{plugin}/{item}/history/{nb}' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}/history/{{nb}}',
response_class=ORJSONResponse,
endpoint=self._api_item_history,
)
router.add_api_route(
'/api/%s/{plugin}/{item}/description' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}/description',
response_class=ORJSONResponse,
endpoint=self._api_item_description,
)
router.add_api_route(
'/api/%s/{plugin}/{item}/unit' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}/unit',
response_class=ORJSONResponse,
endpoint=self._api_item_unit,
)
router.add_api_route(
'/api/%s/{plugin}/{item}/{value}' % self.API_VERSION,
f'/api/{self.API_VERSION}/{{plugin}}/{{item}}/{{value:path}}',
response_class=ORJSONResponse,
endpoint=self._api_value,
)
# Restful API
bindmsg = 'Glances RESTful API Server started on {}api/{}'.format(self.bind_url, self.API_VERSION)
bindmsg = f'Glances RESTful API Server started on {self.bind_url}api/{self.API_VERSION}'
logger.info(bindmsg)
# WEB UI
@ -276,9 +280,9 @@ class GlancesRestfulApi(object):
# Statics files
self._app.mount("/static", StaticFiles(directory=self.STATIC_PATH), name="static")
logger.info("Get WebUI in {}".format(self.STATIC_PATH))
logger.info(f"Get WebUI in {self.STATIC_PATH}")
bindmsg = 'Glances Web User Interface started on {}'.format(self.bind_url)
bindmsg = f'Glances Web User Interface started on {self.bind_url}'
else:
bindmsg = 'The WebUI is disable (--disable-webui)'
@ -313,7 +317,7 @@ class GlancesRestfulApi(object):
try:
self.uvicorn_server = GlancesUvicornServer(config=uvicorn_config)
except Exception as e:
logger.critical('Error: Can not ran Glances Web server ({})'.format(e))
logger.critical(f'Error: Can not ran Glances Web server ({e})')
self.uvicorn_server = None
else:
with self.uvicorn_server.run_in_thread():
@ -365,7 +369,7 @@ class GlancesRestfulApi(object):
try:
plist = self.stats.get_plugin("help").get_view_data()
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get help view data (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get help view data ({str(e)})")
return ORJSONResponse(plist)
@ -401,7 +405,7 @@ class GlancesRestfulApi(object):
try:
plist = self.plugins_list
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get plugin list (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get plugin list ({str(e)})")
return ORJSONResponse(plist)
@ -416,10 +420,10 @@ class GlancesRestfulApi(object):
if self.args.debug:
fname = os.path.join(tempfile.gettempdir(), 'glances-debug.json')
try:
with open(fname) as f:
with builtins.open(fname) as f:
return f.read()
except IOError:
logger.debug("Debug file (%s) not found" % fname)
except OSError:
logger.debug(f"Debug file ({fname}) not found")
# Update the stat
self.__update__()
@ -428,7 +432,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the stat ID
statval = self.stats.getAllAsDict()
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get stats (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get stats ({str(e)})")
return ORJSONResponse(statval)
@ -444,7 +448,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the stat limits
limits = self.stats.getAllLimitsAsDict()
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get limits (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get limits ({str(e)})")
return ORJSONResponse(limits)
@ -460,7 +464,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the stat view
limits = self.stats.getAllViewsAsDict()
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get views (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get views ({str(e)})")
return ORJSONResponse(limits)
@ -475,7 +479,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -485,9 +489,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the stat ID
statval = self.stats.get_plugin(plugin).get_raw()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get plugin %s (%s)" % (plugin, str(e))
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get plugin {plugin} ({str(e)})")
return ORJSONResponse(statval)
@ -504,7 +506,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -514,9 +516,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the stat ID
statval = self.stats.get_plugin(plugin).get_raw()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get plugin %s (%s)" % (plugin, str(e))
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get plugin {plugin} ({str(e)})")
print(statval)
@ -537,7 +537,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -548,7 +548,7 @@ class GlancesRestfulApi(object):
statval = self.stats.get_plugin(plugin).get_raw_history(nb=int(nb))
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get plugin history %s (%s)" % (plugin, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get plugin history {plugin} ({str(e)})"
)
return statval
@ -564,7 +564,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
try:
@ -572,7 +572,7 @@ class GlancesRestfulApi(object):
ret = self.stats.get_plugin(plugin).limits
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get limits for plugin %s (%s)" % (plugin, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get limits for plugin {plugin} ({str(e)})"
)
return ORJSONResponse(ret)
@ -588,7 +588,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
try:
@ -596,7 +596,7 @@ class GlancesRestfulApi(object):
ret = self.stats.get_plugin(plugin).get_views()
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get views for plugin %s (%s)" % (plugin, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get views for plugin {plugin} ({str(e)})"
)
return ORJSONResponse(ret)
@ -612,7 +612,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -624,7 +624,7 @@ class GlancesRestfulApi(object):
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Cannot get item %s in plugin %s (%s)" % (item, plugin, str(e)),
detail=f"Cannot get item {item} in plugin {plugin} ({str(e)})",
)
return ORJSONResponse(ret)
@ -641,7 +641,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -652,7 +652,7 @@ class GlancesRestfulApi(object):
ret = self.stats.get_plugin(plugin).get_raw_history(item, nb=nb)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get history for plugin %s (%s)" % (plugin, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get history for plugin {plugin} ({str(e)})"
)
else:
return ORJSONResponse(ret)
@ -668,7 +668,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
try:
@ -677,7 +677,7 @@ class GlancesRestfulApi(object):
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Cannot get %s description for plugin %s (%s)" % (item, plugin, str(e)),
detail=f"Cannot get {item} description for plugin {plugin} ({str(e)})",
)
else:
return ORJSONResponse(ret)
@ -693,7 +693,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
try:
@ -702,7 +702,7 @@ class GlancesRestfulApi(object):
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Cannot get %s unit for plugin %s (%s)" % (item, plugin, str(e)),
detail=f"Cannot get {item} unit for plugin {plugin} ({str(e)})",
)
else:
return ORJSONResponse(ret)
@ -718,7 +718,7 @@ class GlancesRestfulApi(object):
if plugin not in self.plugins_list:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Unknown plugin %s (available plugins: %s)" % (plugin, self.plugins_list),
detail=f"Unknown plugin {plugin} (available plugins: {self.plugins_list})",
)
# Update the stat
@ -730,7 +730,7 @@ class GlancesRestfulApi(object):
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Cannot get %s = %s for plugin %s (%s)" % (item, value, plugin, str(e)),
detail=f"Cannot get {item} = {value} for plugin {plugin} ({str(e)})",
)
else:
return ORJSONResponse(ret)
@ -746,7 +746,7 @@ class GlancesRestfulApi(object):
# Get the RAW value of the config' dict
args_json = self.config.as_dict()
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get config (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get config ({str(e)})")
else:
return ORJSONResponse(args_json)
@ -760,16 +760,14 @@ class GlancesRestfulApi(object):
"""
config_dict = self.config.as_dict()
if section not in config_dict:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown configuration item %s" % section
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Unknown configuration item {section}")
try:
# Get the RAW value of the config' dict
ret_section = config_dict[section]
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get config section %s (%s)" % (section, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get config section {section} ({str(e)})"
)
return ORJSONResponse(ret_section)
@ -784,16 +782,14 @@ class GlancesRestfulApi(object):
"""
config_dict = self.config.as_dict()
if section not in config_dict:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown configuration item %s" % section
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Unknown configuration item {section}")
try:
# Get the RAW value of the config' dict section
ret_section = config_dict[section]
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get config section %s (%s)" % (section, str(e))
status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get config section {section} ({str(e)})"
)
try:
@ -802,7 +798,7 @@ class GlancesRestfulApi(object):
except Exception as e:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Cannot get item %s in config section %s (%s)" % (item, section, str(e)),
detail=f"Cannot get item {item} in config section {section} ({str(e)})",
)
return ORJSONResponse(ret_item)
@ -820,7 +816,7 @@ class GlancesRestfulApi(object):
# Source: https://docs.python.org/%s/library/functions.html#vars
args_json = vars(self.args)
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get args (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get args ({str(e)})")
return ORJSONResponse(args_json)
@ -833,7 +829,7 @@ class GlancesRestfulApi(object):
HTTP/404 if others error
"""
if item not in self.args:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Unknown argument item %s" % item)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Unknown argument item {item}")
try:
# Get the RAW value of the args' dict
@ -841,6 +837,6 @@ class GlancesRestfulApi(object):
# Source: https://docs.python.org/%s/library/functions.html#vars
args_json = vars(self.args)[item]
except Exception as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Cannot get args item (%s)" % str(e))
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Cannot get args item ({str(e)})")
return ORJSONResponse(args_json)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,28 +8,27 @@
"""Manage sparklines for Glances output."""
from __future__ import unicode_literals
from __future__ import division
import sys
from glances.logger import logger
from glances.globals import nativestr
from glances.logger import logger
sparklines_module = True
try:
from sparklines import sparklines
except ImportError as e:
logger.warning("Sparklines module not found ({})".format(e))
logger.warning(f"Sparklines module not found ({e})")
sparklines_module = False
try:
'┌┬┐╔╦╗╒╤╕╓╥╖│║─═├┼┤╠╬╣╞╪╡╟╫╢└┴┘╚╩╝╘╧╛╙╨╜'.encode(sys.stdout.encoding)
except (UnicodeEncodeError, TypeError) as e:
logger.warning("UTF-8 is mandatory for sparklines ({})".format(e))
logger.warning(f"UTF-8 is mandatory for sparklines ({e})")
sparklines_module = False
class Sparkline(object):
class Sparkline:
"""Manage sparklines (see https://pypi.org/project/sparklines/)."""
def __init__(self, size, pre_char='[', post_char=']', unit_char='%', display_value=True):
@ -58,6 +56,7 @@ class Sparkline(object):
return self.__size
if self.__display_value:
return self.__size - 6
return None
@property
def percents(self):
@ -81,7 +80,7 @@ class Sparkline(object):
if self.__display_value:
percents_without_none = [x for x in self.percents if x is not None]
if len(percents_without_none) > 0:
ret = '{}{:5.1f}{}'.format(ret, percents_without_none[-1], self.__unit_char)
ret = f'{ret}{percents_without_none[-1]:5.1f}{self.__unit_char}'
ret = nativestr(ret)
if overwrite and len(overwrite) < len(ret) - 6:
ret = overwrite + ret[len(overwrite) :]

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -11,11 +10,11 @@
import time
from glances.logger import logger
from glances.globals import printandflush
from glances.logger import logger
class GlancesStdout(object):
class GlancesStdout:
"""This class manages the Stdout display."""
def __init__(self, config=None, args=None):
@ -65,9 +64,9 @@ class GlancesStdout(object):
# With attribute
if isinstance(stat, dict):
try:
printandflush("{}.{}: {}".format(plugin, attribute, stat[attribute]))
printandflush(f"{plugin}.{attribute}: {stat[attribute]}")
except KeyError as err:
logger.error("Can not display stat {}.{} ({})".format(plugin, attribute, err))
logger.error(f"Can not display stat {plugin}.{attribute} ({err})")
elif isinstance(stat, list):
for i in stat:
if key is None:
@ -77,12 +76,12 @@ class GlancesStdout(object):
else:
continue
try:
printandflush("{}.{}.{}: {}".format(plugin, i_key, attribute, i[attribute]))
printandflush(f"{plugin}.{i_key}.{attribute}: {i[attribute]}")
except KeyError as err:
logger.error("Can not display stat {}.{} ({})".format(plugin, attribute, err))
logger.error(f"Can not display stat {plugin}.{attribute} ({err})")
else:
# Without attribute
printandflush("{}: {}".format(plugin, stat))
printandflush(f"{plugin}: {stat}")
# Wait until next refresh
if duration > 0:

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -9,24 +8,23 @@
"""Fields description interface class."""
from pprint import pformat
import json
import time
from pprint import pformat
from glances import __apiversion__
from glances.logger import logger
from glances.globals import iteritems
from glances.logger import logger
API_URL = f"http://localhost:61208/api/{__apiversion__}"
API_URL = "http://localhost:61208/api/{api_version}".format(api_version=__apiversion__)
APIDOC_HEADER = """\
APIDOC_HEADER = f"""\
.. _api:
API (Restfull/JSON) documentation
=================================
This documentation describes the Glances API version {api_version} (Restfull/JSON) interface.
This documentation describes the Glances API version {__apiversion__} (Restfull/JSON) interface.
For Glances version 3, please have a look on:
``https://github.com/nicolargo/glances/blob/support/glancesv3/docs/api.rst``
@ -45,7 +43,7 @@ It is also ran automatically when Glances is started in Web server mode (-w).
API URL
-------
The default root API URL is ``http://localhost:61208/api/{api_version}``.
The default root API URL is ``http://localhost:61208/api/{__apiversion__}``.
The bind address and port could be changed using the ``--bind`` and ``--port`` command line options.
@ -60,7 +58,7 @@ For example:
[outputs]
url_prefix = /glances/
will change the root API URL to ``http://localhost:61208/glances/api/{api_version}`` and the Web UI URL to
will change the root API URL to ``http://localhost:61208/glances/api/{__apiversion__}`` and the Web UI URL to
``http://localhost:61208/glances/``
API documentation URL
@ -75,9 +73,7 @@ WebUI refresh
It is possible to change the Web UI refresh rate (default is 2 seconds) using the following option in the URL:
``http://localhost:61208/glances/?refresh=5``
""".format(
api_version=__apiversion__
)
"""
def indent_stat(stat, indent=' '):
@ -85,8 +81,7 @@ def indent_stat(stat, indent=' '):
if isinstance(stat, list) and len(stat) > 1 and isinstance(stat[0], dict):
# Only display two first items
return indent + pformat(stat[0:2]).replace('\n', '\n' + indent).replace("'", '"')
else:
return indent + pformat(stat).replace('\n', '\n' + indent).replace("'", '"')
return indent + pformat(stat).replace('\n', '\n' + indent).replace("'", '"')
def print_api_status():
@ -99,7 +94,7 @@ def print_api_status():
print('')
print('Get the Rest API status::')
print('')
print(' # curl -I {}/status'.format(API_URL))
print(f' # curl -I {API_URL}/status')
print(indent_stat('HTTP/1.0 200 OK'))
print('')
@ -111,20 +106,20 @@ def print_plugins_list(stat):
print('')
print('Get the plugins list::')
print('')
print(' # curl {}/pluginslist'.format(API_URL))
print(f' # curl {API_URL}/pluginslist')
print(indent_stat(stat))
print('')
def print_plugin_stats(plugin, stat):
sub_title = 'GET {}'.format(plugin)
sub_title = f'GET {plugin}'
print(sub_title)
print('-' * len(sub_title))
print('')
print('Get plugin stats::')
print('')
print(' # curl {}/{}'.format(API_URL, plugin))
print(f' # curl {API_URL}/{plugin}')
print(indent_stat(json.loads(stat.get_stats())))
print('')
@ -183,7 +178,7 @@ def print_plugin_description(plugin, stat):
print('')
else:
logger.error('No fields_description variable defined for plugin {}'.format(plugin))
logger.error(f'No fields_description variable defined for plugin {plugin}')
def print_plugin_item_value(plugin, stat, stat_export):
@ -205,13 +200,13 @@ def print_plugin_item_value(plugin, stat, stat_export):
value = stat_item[item]
print('Get a specific field::')
print('')
print(' # curl {}/{}/{}'.format(API_URL, plugin, item))
print(f' # curl {API_URL}/{plugin}/{item}')
print(indent_stat(stat_item))
print('')
if item and value and stat.get_stats_value(item, value):
print('Get a specific item when field matches the given value::')
print('')
print(' # curl {}/{}/{}/{}'.format(API_URL, plugin, item, value))
print(f' # curl {API_URL}/{plugin}/{item}/{value}')
print(indent_stat(json.loads(stat.get_stats_value(item, value))))
print('')
@ -223,7 +218,7 @@ def print_all():
print('')
print('Get all Glances stats::')
print('')
print(' # curl {}/all'.format(API_URL))
print(f' # curl {API_URL}/all')
print(' Return a very big dictionary (avoid using this request, performances will be poor)...')
print('')
@ -237,7 +232,7 @@ def print_top(stats):
print('')
print('Get top 2 processes of the processlist plugin::')
print('')
print(' # curl {}/processlist/top/2'.format(API_URL))
print(f' # curl {API_URL}/processlist/top/2')
print(indent_stat(stats.get_plugin('processlist').get_export()[:2]))
print('')
print('Note: Only work for plugin with a list of items')
@ -250,7 +245,7 @@ def print_fields_info(stats):
print('-' * len(sub_title))
print('Get item description (human readable) for a specific plugin/item::')
print('')
print(' # curl {}/diskio/read_bytes/description'.format(API_URL))
print(f' # curl {API_URL}/diskio/read_bytes/description')
print(indent_stat(stats.get_plugin('diskio').get_item_info('read_bytes', 'description')))
print('')
print('Note: the description is defined in the fields_description variable of the plugin.')
@ -260,7 +255,7 @@ def print_fields_info(stats):
print('-' * len(sub_title))
print('Get item unit for a specific plugin/item::')
print('')
print(' # curl {}/diskio/read_bytes/unit'.format(API_URL))
print(f' # curl {API_URL}/diskio/read_bytes/unit')
print(indent_stat(stats.get_plugin('diskio').get_item_info('read_bytes', 'unit')))
print('')
print('Note: the description is defined in the fields_description variable of the plugin.')
@ -278,22 +273,22 @@ def print_history(stats):
print('')
print('History of a plugin::')
print('')
print(' # curl {}/cpu/history'.format(API_URL))
print(f' # curl {API_URL}/cpu/history')
print(indent_stat(json.loads(stats.get_plugin('cpu').get_stats_history(nb=3))))
print('')
print('Limit history to last 2 values::')
print('')
print(' # curl {}/cpu/history/2'.format(API_URL))
print(f' # curl {API_URL}/cpu/history/2')
print(indent_stat(json.loads(stats.get_plugin('cpu').get_stats_history(nb=2))))
print('')
print('History for a specific field::')
print('')
print(' # curl {}/cpu/system/history'.format(API_URL))
print(f' # curl {API_URL}/cpu/system/history')
print(indent_stat(json.loads(stats.get_plugin('cpu').get_stats_history('system'))))
print('')
print('Limit history for a specific field to last 2 values::')
print('')
print(' # curl {}/cpu/system/history'.format(API_URL))
print(f' # curl {API_URL}/cpu/system/history')
print(indent_stat(json.loads(stats.get_plugin('cpu').get_stats_history('system', nb=2))))
print('')
@ -305,17 +300,17 @@ def print_limits(stats):
print('')
print('All limits/thresholds::')
print('')
print(' # curl {}/all/limits'.format(API_URL))
print(f' # curl {API_URL}/all/limits')
print(indent_stat(stats.getAllLimitsAsDict()))
print('')
print('Limits/thresholds for the cpu plugin::')
print('')
print(' # curl {}/cpu/limits'.format(API_URL))
print(f' # curl {API_URL}/cpu/limits')
print(indent_stat(stats.get_plugin('cpu').limits))
print('')
class GlancesStdoutApiDoc(object):
class GlancesStdoutApiDoc:
"""This class manages the fields description display."""
def __init__(self, config=None, args=None):

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -14,7 +13,7 @@ import time
from glances.globals import printandflush
class GlancesStdoutCsv(object):
class GlancesStdoutCsv:
"""This class manages the StdoutCsv display."""
separator = ','
@ -53,18 +52,18 @@ class GlancesStdoutCsv(object):
line = ''
if attribute is not None:
line += '{}.{}{}'.format(plugin, attribute, self.separator)
line += f'{plugin}.{attribute}{self.separator}'
else:
if isinstance(stat, dict):
for k in stat.keys():
line += '{}.{}{}'.format(plugin, str(k), self.separator)
line += f'{plugin}.{str(k)}{self.separator}'
elif isinstance(stat, list):
for i in stat:
if isinstance(i, dict) and 'key' in i:
for k in i.keys():
line += '{}.{}.{}{}'.format(plugin, str(i[i['key']]), str(k), self.separator)
else:
line += '{}{}'.format(plugin, self.separator)
line += f'{plugin}{self.separator}'
return line
@ -73,18 +72,18 @@ class GlancesStdoutCsv(object):
line = ''
if attribute is not None:
line += '{}{}'.format(str(stat.get(attribute, self.na)), self.separator)
line += f'{str(stat.get(attribute, self.na))}{self.separator}'
else:
if isinstance(stat, dict):
for v in stat.values():
line += '{}{}'.format(str(v), self.separator)
line += f'{str(v)}{self.separator}'
elif isinstance(stat, list):
for i in stat:
if isinstance(i, dict) and 'key' in i:
for v in i.values():
line += '{}{}'.format(str(v), self.separator)
line += f'{str(v)}{self.separator}'
else:
line += '{}{}'.format(str(stat), self.separator)
line += f'{str(stat)}{self.separator}'
return line

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -10,16 +9,16 @@
"""Issue interface class."""
import os
import sys
import platform
import time
import pprint
from glances.timer import Counter
from glances import __version__, psutil_version
import sys
import time
import psutil
import glances
from glances import __version__, psutil_version
from glances.timer import Counter
TERMINAL_WIDTH = 79
@ -39,7 +38,7 @@ class colors:
self.NO = ''
class GlancesStdoutIssue(object):
class GlancesStdoutIssue:
"""This class manages the Issue display."""
def __init__(self, config=None, args=None):
@ -52,18 +51,14 @@ class GlancesStdoutIssue(object):
def print_version(self):
sys.stdout.write('=' * TERMINAL_WIDTH + '\n')
sys.stdout.write(
'Glances {} ({})\n'.format(colors.BLUE + __version__ + colors.NO, os.path.realpath(glances.__file__))
)
sys.stdout.write('Python {} ({})\n'.format(colors.BLUE + platform.python_version() + colors.NO, sys.executable))
sys.stdout.write(
'PsUtil {} ({})\n'.format(colors.BLUE + psutil_version + colors.NO, os.path.realpath(psutil.__file__))
)
sys.stdout.write(f'Glances {colors.BLUE + __version__ + colors.NO} ({os.path.realpath(glances.__file__)})\n')
sys.stdout.write(f'Python {colors.BLUE + platform.python_version() + colors.NO} ({sys.executable})\n')
sys.stdout.write(f'PsUtil {colors.BLUE + psutil_version + colors.NO} ({os.path.realpath(psutil.__file__)})\n')
sys.stdout.write('=' * TERMINAL_WIDTH + '\n')
sys.stdout.flush()
def print_issue(self, plugin, result, message):
sys.stdout.write('{}{}{}'.format(colors.BLUE + plugin, result, message))
sys.stdout.write(f'{colors.BLUE + plugin}{result}{message}')
sys.stdout.write(colors.NO + '\n')
sys.stdout.flush()
@ -108,9 +103,7 @@ class GlancesStdoutIssue(object):
except Exception as e:
stat_error = e
if stat_error is None:
result = (colors.GREEN + '[OK] ' + colors.BLUE + ' {:.5f}s '.format(counter.get())).rjust(
41 - len(plugin)
)
result = (colors.GREEN + '[OK] ' + colors.BLUE + f' {counter.get():.5f}s ').rjust(41 - len(plugin))
if isinstance(stat, list) and len(stat) > 0 and 'key' in stat[0]:
key = 'key={} '.format(stat[0]['key'])
stat_output = pprint.pformat([stat[0]], compact=True, width=120, depth=3)
@ -118,9 +111,7 @@ class GlancesStdoutIssue(object):
else:
message = '\n' + colors.NO + pprint.pformat(stat, compact=True, width=120, depth=2)
else:
result = (colors.RED + '[ERROR]' + colors.BLUE + ' {:.5f}s '.format(counter.get())).rjust(
41 - len(plugin)
)
result = (colors.RED + '[ERROR]' + colors.BLUE + f' {counter.get():.5f}s ').rjust(41 - len(plugin))
message = colors.NO + str(stat_error)[0 : TERMINAL_WIDTH - 41]
# Display the result
@ -128,7 +119,7 @@ class GlancesStdoutIssue(object):
# Display total time need to update all plugins
sys.stdout.write('=' * TERMINAL_WIDTH + '\n')
print("Total time to update all stats: {}{:.5f}s{}".format(colors.BLUE, counter_total.get(), colors.NO))
print(f"Total time to update all stats: {colors.BLUE}{counter_total.get():.5f}s{colors.NO}")
sys.stdout.write('=' * TERMINAL_WIDTH + '\n')
# Return True to exit directly (no refresh)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -14,7 +13,7 @@ import time
from glances.globals import printandflush
class GlancesStdoutJson(object):
class GlancesStdoutJson:
"""This class manages the Stdout JSON display."""
def __init__(self, config=None, args=None):
@ -47,7 +46,7 @@ class GlancesStdoutJson(object):
else:
continue
# Display stats
printandflush('{}: {}'.format(plugin, stat))
printandflush(f'{plugin}: {stat}')
# Wait until next refresh
if duration > 0:

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
@ -10,14 +9,14 @@
"""Manage unicode message for Glances output."""
_unicode_message = {
'ARROW_LEFT': [u'\u2190', u'<'],
'ARROW_RIGHT': [u'\u2192', u'>'],
'ARROW_UP': [u'\u2191', u'^'],
'ARROW_DOWN': [u'\u2193', u'v'],
'CHECK': [u'\u2713', u''],
'PROCESS_SELECTOR': [u'>', u'>'],
'MEDIUM_LINE': [u'\u23AF', u'-'],
'LOW_LINE': [u'\u2581', u'_'],
'ARROW_LEFT': ['\u2190', '<'],
'ARROW_RIGHT': ['\u2192', '>'],
'ARROW_UP': ['\u2191', '^'],
'ARROW_DOWN': ['\u2193', 'v'],
'CHECK': ['\u2713', ''],
'PROCESS_SELECTOR': ['>', '>'],
'MEDIUM_LINE': ['\u23af', '-'],
'LOW_LINE': ['\u2581', '_'],
}
@ -25,5 +24,4 @@ def unicode_message(key, args=None):
"""Return the unicode message for the given key."""
if args and hasattr(args, 'disable_unicode') and args.disable_unicode:
return _unicode_message[key][1]
else:
return _unicode_message[key][0]
return _unicode_message[key][0]

View File

@ -218,23 +218,23 @@ body {
padding-left: 10px;
}
/* Loading page */
// /* Loading page */
#loading-page .glances-logo {
background: url('../images/glances.png') no-repeat center center;
background-size: contain;
}
// #loading-page .glances-logo {
// background: url('../images/glances.png') no-repeat center center;
// background-size: contain;
// }
@media (max-width: 750px) {
#loading-page .glances-logo {
height: 400px;
}
}
@media (min-width: 750px) {
#loading-page .glances-logo {
height: 500px;
}
}
// @media (max-width: 750px) {
// #loading-page .glances-logo {
// height: 400px;
// }
// }
// @media (min-width: 750px) {
// #loading-page .glances-logo {
// height: 500px;
// }
// }
/*

View File

@ -1,7 +1,6 @@
<template>
<div v-if="!dataLoaded" class="container-fluid" id="loading-page">
<div class="glances-logo"></div>
<div class="loader">Loading...</div>
<div class="loader">Glances is loading...</div>
</div>
<glances-help v-else-if="args.help_tag"></glances-help>
<main v-else>

Some files were not shown because too many files have changed in this diff Show More