Port before_v0.60/data_extraction before_v0.60/examples before_v0.60/duplicates (#847)

This PR is part of porting all old scripts #221 and includes a set of
modules:

- `data_extraction`
- `examples` 
- `duplicates`

## 7 changed files:

### `data_extraction`
- `data_extraction/ultimate_extractor.nu`: removed. Has already been
ported to `modules/data_extraction/ultimate_extractor.nu`

### `duplicates`
- `duplicates/duplicates.nu` -> `modules/duplicates/duplicates.nu`
- `duplicates/example.nu` -> `modules/duplicates/example.nu`
- `duplicates/README.md` -> `modules/duplicates/README.md`: unchanged

### `examples`
- `examples/netstat.nu` -> `modules/examples/netstat.nu`
- `examples/date_in_local_timezones.nu` ->
`modules/examples/date_in_local_timezones.nu`
- `befove_v0.60/assets/core_team.nu`: removed. This table has been
embedded into `date_in_local_timezones.nu`
This commit is contained in:
Igor 2024-05-26 21:37:29 +04:00 committed by GitHub
parent 9d399d8902
commit 275a0f8f26
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 81 additions and 93 deletions

View File

@ -1,12 +0,0 @@
def "nu core-team" [] {
[
[ 'name', 'tz'];
[ 'andres', 'America/Guayaquil']
[ 'fdncred', 'US/Central']
[ 'gedge', 'US/Eastern']
[ 'jt', 'NZ']
[ 'wycats', 'US/Pacific']
[ 'kubouch', 'Europe/Helsinki']
['elferherrera', 'Europe/London']
]
}

View File

@ -1,27 +0,0 @@
#Function to extract archives with different extensions
def extract [name:string #name of the archive to extract
] {
let exten = [ [ex com];
['.tar.bz2' 'tar xjf']
['.tar.gz' 'tar xzf']
['.bz2' 'bunzip2']
['.rar' 'unrar x']
['.tbz2' 'tar xjf']
['.tgz' 'tar xzf']
['.zip' 'unzip']
['.7z' '/usr/bin/7z x']
['.deb' 'ar x']
['.tar.xz' 'tar xvf']
['.tar.zst' 'tar xvf']
['.tar' 'tar xvf']
['.gz' 'gunzip']
['.Z' 'uncompress']
]
let command = ($exten|where $name =~ $it.ex|first)
if ($command|empty?) {
echo 'Error! Unsupported file extension'
} {
nu -c (build-string $command.com ' ' $name)
}
}

View File

@ -1,25 +0,0 @@
source lib/scripts.nu
source assets/core_team.nu
let next_call = ("2021-08-31 15:00:21.290597200 -05:00" | str to-datetime);
let people = (nu core-team | date local $next_call);
def say [block] {
each {|person|
do $block (
$person | merge {
[[name]; [($person.name | str capitalize)]]
}
)
} | str collect (char newline)
}
$people | say {|person| $"($person.name)'s timezone is ($person.tz)"}
$"
for the next call happening on ($next_call | date format '%c').. in their timezones they would be:
"
$people | say {|person| $"($person.name)'s: ($person.time)"}

View File

@ -1,17 +0,0 @@
let ns = (netstat | lines | skip 1)
let first-batch = ($ns | keep until $it =~ Active | str collect (char nl) | from ssv -m 1)
let second-batch = ($ns |
skip until $it =~ Active |
skip 1 |
str collect (char nl) |
str find-replace "\[ \]" "[]" --all |
from ssv -m 1 |
default I-Node "" |
default Path "" |
each {|row| if $row.Type == DGRAM { $row | update Path { get I-Node } | update I-Node { get State } | update State "" } { $row } }
)
$first-batch
$second-batch

View File

@ -1,13 +1,15 @@
# duplicates example
echo $info | from json
use mod.nu *
let info = "[{name: "John", lastname: "Doe"}, {name: "John", lastname: "Roe"}, {name: "Jane", lastname: "Soe"}]"
echo $info | from json | duplicates name
print ($info | from json)
print ($info | from json | duplicates name)
#duplicates files example
echo A | save A.txt
echo A | save B.txt
# note that if I used "echo B | save B.txt" the function will give a false positive
echo ABC | save C.txt
ls
duplicates files
rm A.txt B.txt C.txt --permanent
print (ls)
print (duplicates files)
rm A.txt B.txt C.txt --permanent

View File

@ -1,21 +1,21 @@
# duplicates returns the rows that correspond to duplicates of the given column.
def duplicates [
export def duplicates [
column: string # Column to look duplicates at
--count(-c) # set it to display the number of times the value is repeated.
] {
group-by $column |
pivot |
insert count { $it.Column1 | flatten | length } |
group-by {get $column | into string} |
transpose |
insert count { $in.column1 | flatten | length } |
where count > 1 |
reject Column0 |
if ($count | empty?) { reject count } { each { $it } } |
reject column0 |
if ($count | is-empty) { reject count } else { each { $in } } |
flatten |
flatten
}
# duplicates files recursively finds duplicate files in the current working folder.
# It uses a heuristic based on duplicate files having the same size.
def "duplicates files" [] {
export def "duplicates files" [] {
do -i {ls **/*} | duplicates size
}

View File

@ -0,0 +1,42 @@
def "nu core-team" [] {
[
[ 'name', 'tz'];
[ 'andres', 'America/Guayaquil']
[ 'fdncred', 'US/Central']
[ 'gedge', 'US/Eastern']
[ 'jt', 'NZ']
[ 'wycats', 'US/Pacific']
[ 'kubouch', 'Europe/Helsinki']
['elferherrera', 'Europe/London']
]
}
def "date local" [now] {
insert time {|value|
let converted = ($now | date to-timezone $value.tz);
$converted | format date '%c'
}
}
let next_call = ("2021-08-31 15:00:21.290597200 -05:00" | into datetime);
let people = (nu core-team | date local $next_call);
def say [closure] {
$in | each {|person|
do $closure (
$person | update name {|row| $row.name | str capitalize}
)
} | str join (char newline)
}
print ($people | say {|person| $"($person.name)'s timezone is ($person.tz)"})
print ($"
for the next call happening on ($next_call | format date '%c').. in their timezones they would be:
")
print ($people | say {|person| $"($person.name)'s: ($person.time)"})

View File

@ -0,0 +1,25 @@
let ns = (netstat | lines | skip 1)
let first_batch = ($ns | take until {|it| $it =~ Active } | str join (char nl) | from ssv -m 1)
let second_batch = ($ns |
skip until {|it| $it =~ Active } |
skip 1 |
str join (char nl) |
str replace '\[ \]' "[]" --all |
from ssv -m 1 |
default I-Node "" |
default Path ""
| each {|row| if $row.Type == DGRAM {
$row | update Path { get I-Node } | update I-Node { get State } | update State ""
} else {
$row
}
}
)
print $first_batch
print $second_batch