refactor: Unify disk conversion to string step (#446)

Refactor to unify the process disk usage -> string into one function.
This commit is contained in:
Clement Tsang 2021-04-07 20:52:29 -04:00 committed by GitHub
parent 5bd9e4f6ae
commit e63a0b4082
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 53 additions and 36 deletions

View File

@ -24,7 +24,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Sleep for a few seconds. - name: Sleep for a few seconds to prevent timing issues between the deletion and creation.
run: sleep 10 run: sleep 10
- name: Create nightly GitHub release - name: Create nightly GitHub release
@ -207,6 +207,8 @@ jobs:
tar -czvf bottom_${{ matrix.triple.target }}.tar.gz btm completion tar -czvf bottom_${{ matrix.triple.target }}.tar.gz btm completion
echo "ASSET=bottom_${{ matrix.triple.target }}.tar.gz" >> $GITHUB_ENV echo "ASSET=bottom_${{ matrix.triple.target }}.tar.gz" >> $GITHUB_ENV
# TODO: Move this elsewhere; do this all at once, and do not continue if any fails. Store artifacts. Do the same for deployment.
- name: Upload main release - name: Upload main release
uses: actions/upload-release-asset@v1.0.1 uses: actions/upload-release-asset@v1.0.1
id: upload id: upload

3
.gitignore vendored
View File

@ -25,3 +25,6 @@ sample_configs/testing.toml
# Wix # Wix
/wix/ /wix/
# Cargo-deny
deny.toml

View File

@ -73,6 +73,7 @@
"denylist", "denylist",
"doctest", "doctest",
"dont", "dont",
"drprasad",
"eselect", "eselect",
"fedoracentos", "fedoracentos",
"fpath", "fpath",

View File

@ -110,3 +110,4 @@ output = "bottom_x86_64_installer.msi"
version = "1" version = "1"
default-features = false default-features = false
features = ["user-hooks"] features = ["user-hooks"]

View File

@ -575,6 +575,27 @@ pub enum ProcessNamingType {
Path, Path,
} }
/// Given read/s, write/s, total read, and total write values, return 4 strings that represent read/s, write/s, total read, and total write
fn get_disk_io_strings(
rps: u64, wps: u64, total_read: u64, total_write: u64,
) -> (String, String, String, String) {
// Note we always use bytes for total read/write here (for now).
let converted_rps = get_decimal_bytes(rps);
let converted_wps = get_decimal_bytes(wps);
let converted_total_read = get_decimal_bytes(total_read);
let converted_total_write = get_decimal_bytes(total_write);
(
format!("{:.*}{}/s", 0, converted_rps.0, converted_rps.1),
format!("{:.*}{}/s", 0, converted_wps.0, converted_wps.1),
format!("{:.*}{}", 0, converted_total_read.0, converted_total_read.1),
format!(
"{:.*}{}",
0, converted_total_write.0, converted_total_write.1
),
)
}
/// Because we needed to UPDATE data entries rather than REPLACING entries, we instead update /// Because we needed to UPDATE data entries rather than REPLACING entries, we instead update
/// the existing vector. /// the existing vector.
pub fn convert_process_data( pub fn convert_process_data(
@ -589,17 +610,11 @@ pub fn convert_process_data(
existing_converted_process_data.keys().copied().collect(); existing_converted_process_data.keys().copied().collect();
for process in &current_data.process_harvest { for process in &current_data.process_harvest {
let converted_rps = get_decimal_bytes(process.read_bytes_per_sec); let (read_per_sec, write_per_sec, total_read, total_write) = get_disk_io_strings(
let converted_wps = get_decimal_bytes(process.write_bytes_per_sec); process.read_bytes_per_sec,
let converted_total_read = get_decimal_bytes(process.total_read_bytes); process.write_bytes_per_sec,
let converted_total_write = get_decimal_bytes(process.total_write_bytes); process.total_read_bytes,
process.total_write_bytes,
let read_per_sec = format!("{:.*}{}/s", 0, converted_rps.0, converted_rps.1);
let write_per_sec = format!("{:.*}{}/s", 0, converted_wps.0, converted_wps.1);
let total_read = format!("{:.*}{}", 0, converted_total_read.0, converted_total_read.1);
let total_write = format!(
"{:.*}{}",
0, converted_total_write.0, converted_total_write.1
); );
let mem_usage_str = get_decimal_bytes(process.mem_usage_bytes); let mem_usage_str = get_decimal_bytes(process.mem_usage_bytes);
@ -1161,19 +1176,17 @@ pub fn tree_process_data(
p.tw_f64 += child_total_write; p.tw_f64 += child_total_write;
} }
let converted_rps = get_decimal_bytes(p.rps_f64 as u64); let disk_io_strings = get_disk_io_strings(
let converted_wps = get_decimal_bytes(p.wps_f64 as u64); p.rps_f64 as u64,
let converted_total_read = get_decimal_bytes(p.tr_f64 as u64); p.wps_f64 as u64,
let converted_total_write = get_decimal_bytes(p.tw_f64 as u64); p.tr_f64 as u64,
p.tw_f64 as u64,
p.read_per_sec = format!("{:.*}{}/s", 0, converted_rps.0, converted_rps.1);
p.write_per_sec = format!("{:.*}{}/s", 0, converted_wps.0, converted_wps.1);
p.total_read =
format!("{:.*}{}", 0, converted_total_read.0, converted_total_read.1);
p.total_write = format!(
"{:.*}{}",
0, converted_total_write.0, converted_total_write.1
); );
p.read_per_sec = disk_io_strings.0;
p.write_per_sec = disk_io_strings.1;
p.total_read = disk_io_strings.2;
p.total_write = disk_io_strings.3;
} }
} }
@ -1253,6 +1266,9 @@ pub fn stringify_process_data(
.collect() .collect()
} }
/// Takes a set of converted process data and groups it together.
///
/// To be honest, I really don't like how this is done, even though I've rewritten this like 3 times.
pub fn group_process_data( pub fn group_process_data(
single_process_data: &[ConvertedProcessData], is_using_command: bool, single_process_data: &[ConvertedProcessData], is_using_command: bool,
) -> Vec<ConvertedProcessData> { ) -> Vec<ConvertedProcessData> {
@ -1299,18 +1315,11 @@ pub fn group_process_data(
.map(|(identifier, process_details)| { .map(|(identifier, process_details)| {
let p = process_details.clone(); let p = process_details.clone();
// FIXME: Unify this step in the three locations it is used to one function. let (read_per_sec, write_per_sec, total_read, total_write) = get_disk_io_strings(
let converted_rps = get_decimal_bytes(p.read_per_sec as u64); p.read_per_sec as u64,
let converted_wps = get_decimal_bytes(p.write_per_sec as u64); p.write_per_sec as u64,
let converted_total_read = get_decimal_bytes(p.total_read as u64); p.total_read as u64,
let converted_total_write = get_decimal_bytes(p.total_write as u64); p.total_write as u64,
let read_per_sec = format!("{:.*}{}/s", 0, converted_rps.0, converted_rps.1);
let write_per_sec = format!("{:.*}{}/s", 0, converted_wps.0, converted_wps.1);
let total_read = format!("{:.*}{}", 0, converted_total_read.0, converted_total_read.1);
let total_write = format!(
"{:.*}{}",
0, converted_total_write.0, converted_total_write.1
); );
ConvertedProcessData { ConvertedProcessData {

View File

@ -320,6 +320,7 @@ pub fn handle_force_redraws(app: &mut App) {
app.cpu_state.force_update = None; app.cpu_state.force_update = None;
} }
// FIXME: [OPT] Prefer reassignment over new vecs?
if app.mem_state.force_update.is_some() { if app.mem_state.force_update.is_some() {
app.canvas_data.mem_data = convert_mem_data_points(&app.data_collection, app.is_frozen); app.canvas_data.mem_data = convert_mem_data_points(&app.data_collection, app.is_frozen);
app.canvas_data.swap_data = convert_swap_data_points(&app.data_collection, app.is_frozen); app.canvas_data.swap_data = convert_swap_data_points(&app.data_collection, app.is_frozen);