-
Notifications
You must be signed in to change notification settings - Fork 0
UNIX (and command line tools)
⏣SUPERIOR ULTRABEST™⏣ Bash config. Provides fancy-pants aliases/prompt, sane default settings, and other useful miscellanea:
https://github.com/mitochondrion/dotfiles/blob/master/.bash_profile
Useful if, for example, ports are not exposed on the remote host.
Tunnels http://localhost:LOCAL_PORT
to http://TARGET_HOST:HOST_PORT
via ssh to USERNAME@SSH_HOST
ssh -L LOCAL_PORT:TARGET_HOST:HOST_PORT USERNAME@SSH_HOST -N
Listen on localhost:port and dump incoming requests to stdout
. Point anything making requests to localhost:port to see what it's trying to send.
nc -l [PORT] -v
host domain.com 8.8.8.8 # Use Google's DNS server because default DNS could be abetting domain frontrunning
~/.ssh/config
Host hostnameprefix*
IdentityFile ~/.ssh/private_key_for_hosts
User username_for_hosts
Host someotherhostnameprefix*
IdentityFile ~/.ssh/private_key_for_other_hosts
User username_for_other_hosts
More: https://programminghistorian.org/en/lessons/json-and-jq.
Prettify json:
jq '.' /path/to/input/ugly.json > /path/to/output/pretty.json
Find objects in array without key:
jq '.[] | select(has("some_key") | not)' some.json
Extract and concat specific fields from list of objects:
# Input
[
{
"crap": 123,
"first_name": "Bobson",
"last_name": "Dugnutt"
},
{
"crap": 456,
"first_name": "Onson",
"last_name": "Sweemey"
}
]
# Filter
jq '[ .[] | "\(.first_name) \(.last_name)" ]' json
# Output
[ "Bobson Dugnutt", "Onson Sweemey" ]
Reshape keys in nested arrays/objects and filter out nulls
# Input
[
{
"parent": "p1",
"inner": "i1",
"data": [
{
"stuff": {"foo": 1, "bar": [{"a": 2, "b": 3, "c": 4}, {"a": 5, "b": 6}]}
},
{
"stuff": {"foo": 7, "bar": [{"a": 8, "c": 9}]}
}
]
},
{
"parent": "p2",
"inner": "i2",
"data": [
{
"stuff": {"foo": 7, "bar": [{"c": 9}, {"b": 10}]}
}
]
}
]
# Filter:
jq '[ .[] | .inner as $i | { parent, "bars": [ .data[].stuff.bar[] | {$i, a, c} | with_entries(select(.value != null)) ] } ]' json
# Result:
[
{
"parent": "p1",
"bars": [
{"i": "i1", "a": 2, "c": 4},
{"i": "i1", "a": 5},
{"i": "i1", "a": 8, "c": 9}
]
},
{
"parent": "p2",
"bars": [
{"i": "i2", "c": 9},
{"i": "i2"}
]
}
]
Convert to CSV:
# Input:
{
"things": [
{
"field1": 123,
"field2": "456",
"doodad": {
"field3": 789
}
},
{
"field1": 321,
"field2": "654",
"doodad": {
"field3": 987
}
}
]
}
# Filter:
jq -r '.things[] | [.field1, .field2, .doodad.field3] | @csv'
# Result:
123,"456",789
321,"654",987
SET_FLAG="true"
FLAG="" && [[ "$SET_FLAG" == "true" ]] && FLAG="--flag"
MY_COMMAND="my_special_command $FLAG"
echo "My special command: $MY_COMMAND"
$MY_COMMAND
Processes:
sysctl -a | grep maxproc
sudo sysctl -w kern.maxproc=2500
sudo sysctl -w kern.maxprocperuid=2048
Files Handles:
sysctl -a | grep files
sudo sysctl -w kern.maxfiles=12288
sudo sysctl -w kern.maxfilesperproc=10240
Sockets:
sysctl -a | grep somax
sudo sysctl -w kern.ipc.somaxconn=2048
ulimit -n 10240
Rank | Speed (~200MB input) | Collision Avoidance |
---|---|---|
1. |
crc32 1x |
shasum -a 512 512 bit |
2. |
sum 2.5x |
shasum -a 256 256 bit |
3. |
md5 3.5x |
shasum 160 bit |
4. |
cksum 4x |
md5 128 bit |
5. |
shasum 4.5x |
cksum 64 bit |
5. |
shasum -a 512 6.5x |
crc32 32 bit |
6. |
shasum -a 256 9x |
sum 16 bit |
lsof | awk '{print $1}' | uniq -c | sort -rn | head`
aka "Who usin' muh port(s)?"
lsof -nPi tcp[:PORT]
# If you don't care about file size differences
sdiff <(printf '%s\n' "`tree -pu /path/to/dir/a`") <(printf '%s\n' "`tree -pu /path/to/dir/b`") -w 200 | less
# If you care about file size differences
sdiff <(printf '%s\n' "`tree -pus /path/to/dir/a`") <(printf '%s\n' "`tree -pus /path/to/dir/b`") -w 200 | less
nmap -v -A --fuzzy [IP OR HOST] # IP can be a range e.g. 192.168.0.0-255
Send/receive
# Receiver
gpg --gen-key
gpg --list-public-keys
gpg --armor --export [KEY ID] > /tmp/some-public-key-name.asc
# Sender
gpg --import /tmp/some-public-key-name.asc
gpg --output encrypted.gpg --encrypt MY_SECRET_FILE.txt
# Receiver
gpg --output decrypted.txt --decrypt encrypted.gpg
Export/import
# Export
gpg --list-secret-keys [email protected]
gpg --export-secret-keys KEY_ID_GOES_HERE > private.key
# Import
gpg --import private.key
THIS_FILE := $(lastword $(MAKEFILE_LIST))
SOME_VAR="my special var"
default: foo
.PHONY: all-the-things
all-the-things: foo bar baz missing-var
.PHONY: foo
foo: bar
@echo "DON'T PRINT MY COMMAND"
echo "PRINT MY COMMAND"
$(MAKE) -f $(THIS_FILE) baz
.PHONY: bar
bar:
echo "DOING BAR!"
ls /asdf || : # don't exit on failure
.PHONY: baz
baz:
echo "DOING BAZ!"
.PHONY: missing-var
missing-var:
echo "CHECKING FOR SOME_VAR..."
ifeq ($(SOME_VAR), )
echo "SOME_VAR NOT FOUND! KTHXBYE."
exit -42
endif
echo "SOME_VAR FOUND: ${SOME_VAR}"
echo "CHECKING FOR MISSING_VAR..."
ifeq ($(MISSING_VAR), )
echo "MISSING_VAR NOT FOUND! KTHXBYE."
exit -42
endif
echo "MISSING_VAR FOUND: ${MISSING_VAR}"
type -a function_name
Send directory:
rsync -rhavHEP --stats /path/source_dir user@remote:/path/destination_dir
tr -s ' ' | cut -d ' ' -f [column #s]
# Ex: Extract size and name of files in current directory
ls -l | tr -s ' ' | cut -d ' ' -f "5 9"
find . -name '*media*.mp3' -delete
find . -type f -exec mv '{}' '{}'.jpg \;
# OR
for file in file_prefix*.csv; do mv -- "$file" "${file%.csv}.txt"; done
YYYY/MM/DD hh:mm:ss
date "+%Y/%m/%d %H:%M:%S"
UNIX -> HUMAN:
date -u -r 1498780800
HUMAN -> UNIX:
date -u -j -f "%Y%m%d%H%M%S" "20170630000000" "+%s"
List everything
systemctl
List only running services
systemctl list-units --type=service --state=running
See what port a service is listening on
netstat -ltup | grep service_name
Service configs live here:
/etc/systemd/system
Service status with wildcard
systemctl status prefix*
Start/stop/restart service
systemctl start service_name
systemctl restart service_name
systemctl stop service_name
# OR for backwards compatibility with init.d
service service_name start
service service_name restart
service service_name stop
tree -pufi /some/path
printf "%'d" 123456
if [ -z "$SOME_VAR" ]; then echo "LOL EMPTY"; else echo "SOME_VAR is $SOME_VAR"; fi
basename $SOME_PATH
# Ex: Single list of values in a file
awk '{s+=$1} END {printf "%.0f", s}' [file path]
# Ex: Sum csv file column
cut -f3 -d, stuff.csv | awk '{s+=$1} END {printf "%.0f\n", s}'
Get the SMTP server:
nslookup -q=mx [EMAIL DOMAIN]
Connect via telnet and initialize SMTP connection:
telnet [SMTP SERVER] 25
helo hi
MAIL FROM:<[email protected]>
Check for target email:
RCPT TO:<[TARGET ACCOUNT]@[EMAIL DOMAIN]>
Invalid email response:
550-5.1.1 The email account that you tried to reach does not exist.
Valid email response:
250 2.1.5 OK b15-v6si115561qvd.78 - gsmtp
Quit:
quit
This will hammer an endpoint and log the status and request duration to /tmp/hammer_status
.
To monitor the current curl count (start this in separate terminal before starting The Hammer):
watch -n .2 -d "ps aux | grep curl | grep -v 'watch\|grep' | wc -l"
The Hammer (x10,000):
time for i in {1..10000}; do curl -NSso /dev/null -w "%{http_code}, %{time_total}\n" [ENDPOINT] >> /tmp/hammer_status & done
Infinite Hammer:
while true; do curl -NSso /dev/null -w "%{http_code}, %{time_total}\n" [ENDPOINT] >> /tmp/hammer_status & done
ps aux | awk '"[Zz]" ~ $8 { printf("%s, PID = %d\n", $8, $2); }'
seq [# runs] | xargs -P[max threads] -I"asdf" [command]
# Ex:
seq 100 | xargs -P20 -I"asdf" echo curl www.google.com
find $PWD -name ".*" -prune -o -print
ls -1q some_path* | wc -l
Convert gif
to mp4
:
# (brew|apt) install imagemagick
convert -coalesce SOME_FILE.gif frames%04d.png
ffmpeg -r 10 -i frames%04d.png -vcodec mjpeg -y SOME_FILE.mp4
Concat .ts files and transcode to .mp4:
find . -name "*.ts" -prune -print | xargs cat > all.ts
ffmpeg -i all.ts -acodec copy -vcodec copy all.mp4
Concat any media files:
- Make a file
whatever.txt
:
file '/path/to/file1.mp4'
file '/path/to/file2.mp4'
- Then run:
ffmpeg -f concat -safe 0 -i whatever.txt -c copy output.mp4
Merge two audio files into panned channels:
ffmpeg -i left_source.mp3 -i ~/Downloads/right.mp3 -filter_complex "[0:a]pan=1c|c0=c0[left];[1:a]pan=1c|c0=c0[right];[left][right]amerge=inputs=2[aout]" -map "[aout]" -ac 2 panned.wav
Raw:
openssl s_client -showcerts -servername ems.tradingticket.com -connect ems.tradingticket.com:443
Decoded:
echo | \
openssl s_client -servername ems.tradingticket.com -connect ems.tradingticket.com:443 2>/dev/null | \
openssl x509 -text
- Append carriage return
"\r"
without a new line - wipe out entire line with
"\x1b[2K\r"
END = '\033[0m'
GREY = '\033[90m'
RED = '\033[91m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
BLUE = '\033[94m'
MAGENTA = '\033[95m'
CYAN = '\033[96m'
BLACK = '\033[30m'
D_RED = '\033[31m'
D_GREEN = '\033[32m'
D_YELLOW = '\033[33m'
D_BLUE = '\033[34m'
D_MAGENTA = '\033[35m'
D_CYAN = '\033[36m'
B_GREY = '\033[100m'
B_RED = '\033[101m'
B_GREEN = '\033[102m'
B_YELLOW = '\033[103m'
B_BLUE = '\033[104m'
B_MAGENTA = '\033[105m'
B_CYAN = '\033[106m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
ifconfig | grep "inet " | tail -1 | cut -d " " -f2'
curl 'https://api.ipify.org'
curl 'https://ifconfig.co'
* * * * * command to execute
│ │ │ │ │
│ │ │ │ └─── day of week (0 - 6) (0 to 6 are Sunday to Saturday, or use names; 7 is Sunday, the same as 0)
│ │ │ └──────── month (1 - 12)
│ │ └───────────── day of month (1 - 31)
│ └────────────────── hour (0 - 23)
└─────────────────────── min (0 - 59)
diff -qr dir1/ dir2/
Using git
git diff --no-index dir1/ dir2/
Uses block size; works best when filtering for larger files (in this example > 1MB):
find . -type f -size +1000000c -exec ls -s {} \; | sort -g | tee /tmp/files_sorted_by_size.out | cut -f 1 -d ' ' | uniq -d > /tmp/dupe_sizes.out; grep -ihf /tmp/dupe_sizes.out /tmp/files_sorted_by_size.out > /tmp/dupes.out
Uses checksums:
find . -type f -exec cksum {} \; | sort -g | tee /tmp/files_sorted_by_checksum.out | cut -f 1,2 -d ' ' | uniq -d > /tmp/dupe_checksums.out; grep -ihf /tmp/dupe_checksums.out /tmp/files_sorted_by_checksum.out > /tmp/dupes.out
Repeatedly run command and display output with specific interval (e.g. 1 second):
#!/bin/bash
set -euo pipefail
watch -n 1 -d "SOME_COMMAND"
Anywhere in entire script including unset env vars:
set -euo pipefail
Specific command:
my_special_command arg1 arg2 || { echo "Some error message!"; exit 1; }
ssh-keygen -l -E md5 -f ~/.ssh/[PUBLIC KEY FILE]
echo 'SOME BASE 64' | base64 --decode
Restart
wg-quick down /etc/wireguard/wg0.conf && wg-quick up /etc/wireguard/wg0.conf
Server Config:
[Interface]
Address = 172.16.0.1/24 # This is the virtual IP address, with the subnet mask we will use for the VPN
PostUp = iptables -A FORWARD -i %i -j ACCEPT; iptables -A FORWARD -o %i -j ACCEPT; iptables -t nat -A POSTROUTING -o ens4 -j MASQUERADE
PostDown = iptables -D FORWARD -i %i -j ACCEPT; iptables -D FORWARD -o %i -j ACCEPT; iptables -t nat -D POSTROUTING -o ens4 -j MASQUERADE
ListenPort = 51820
PrivateKey = XXX
[Peer]
PublicKey = XXX
AllowedIPs = 172.16.0.1/32
...
Client Config:
[Interface]
PrivateKey = XXX
Address = 172.16.0.1/24
MTU = 1360
[Peer]
PublicKey = XXX
AllowedIPs = 172.16.0.0/24, 10.0.0.0/16, 10.150.0.0/20
Endpoint = VPN_HOST:51820
PersistentKeepalive = 25
man [command] - manual page
type [command] - inspect an alias or function
lsof - list open files and the owning process
finger [username] - get user info
crontab -l - list cron jobs
crontab -e - edit crontab(le)
crontab ~/.crontab - load crontab(le) from file
chmod -Rv [permissions] - changes permissions recursively
read: 4, write: 2, exec: 1
user class, group class, "others"
777: anarchy
700: lockdown
755: read-only
du -hcs ./* - disk usage (human readable, silent, include total)
df -lk - number of free disk blocks (in kB, locally-mounted only)
find [path] -name [pattern] - find a file(s)
alias [mnemonic]='command' - assign alias shortcut to a command
alias - print all aliases
xterm -e [command] & - run command in new xterm
top - monitor running processes
ps -eaf - print running processes (all users, all tasks, verbose)
less -iMN [file] - open read only buffer with vim-style search (case insensitive,
file status, line numbers)
cat [file] - print file (to stdout)
echo - write shell args to stdout
time [command] - time a command
wc [file] - counts the number of lines, words, letters in the file
command | wc -l - count lines of output
ls -laFht [path] - list path contents (verbose format, list hidden files,
indicate file type, formatted file size, sort by timestamp)
head -[#] [path] - display the first # lines of a file (default 10 lines)
tail -[#] [path] - display the last # lines of a file (default 10 lines)
tail -f [path] - monitor file to stdout
pwd - print working directory (current path)
clear - clear screen
cd - - go back to last working directory
od -cx [file] - examine a file char by char as hex
od -x [file] - dump raw hex of file
kill -9 [pid] -[process group id] - KILL IT WITH FIRE
set -o emacs - set command line to emacs style
[command] | tee [file] - split stdin to a file and std-out
sum [file] - checksum (old algo)
cksum [file] - checksum
man -k time - timing
mkfifo [file] - create FIFO pipe
which [executable] - path to executable
whereis [executable] - instances of executable in path
whoami - current user
hostname - hostname
xset b [volume(%)] [pitch(hz)] [duration(ms)]
- set terminal bell
xset -b - kill bell
Ctrl+z - background current process
[command] & - run in background
fg - bring backgroud job to foreground
fg %[job number] - bring background job # to foreground
jobs - list background jobs
[command] > [file] - output stdout to new file
[command] >> [file] - append stdout to file
pstack [pid] - stack trace on process
. [command | path] - run in current shell
strings [binary] | grep [string] - search a binary for ascii string
scp [user@host:/path] [user@host:/path]
- secure transfer file from host to host
scp [user@host:/path] [user@host:/path]
- insecure transfer file from host to host
ssh [user@host] - ssh
gzip [files] - zip files
gunzip [files] - unzip files
tar xzvf [tarfile] [files] - decompress and untar a tarball
tar czvf [tarfile] - create a compressed tarball from files
tree -shCL [depth level] - display directory tree to a certain depth
(human readable size, colors)
tree -dshCL [depth level] - display directory tree with directories only to a certain depth
(human readable size, colors)
cut -f # -d ' ' - print column # delineated by space
-------------
NETWORK
-------------
ifconfig - network interface settings
whois [domain] - whois info for a domain
dig - DNS lookup utility
ping [domain|host] - latency to a host
traceroute [domain|host] - network hops to host
netstat – network status (initiated connections)
arp -a - all visible hosts on local network
lsof -i - open network sockets
nslookup [domain] - DNS lookup for domain
tcpdump -i en0 -w [file] -s0 - capture all network traffic on the en0 interface, write to file
tcpdump -i en0 -s0 - dump all network traffic on the en0 interface to stdout
netcat
wget
curl
----------------
SORT/UNIQ
----------------
+[col#] sort by col# (start /at 0)
-k [col#] sort by col# (start at 1)
-n sort numerically
-t'[delimiting char]' set colun delimiter
-r sort in reverse order
-u sort and filter dupes
Print unique records sorted by key:
sort -u [inputfile] +[begin field].[end char position] -[end field].[end char position]
List dupe lines by frequency:
sort [file] | uniq -c | sort -n
Count unique lines in file:
sort -u [file] | wc
Print repeated lines, matching starting 3 characters after the first space delineated field:
uniq -d -f 1 -s 3
----------------
GREP
----------------
grep [pattern] [file/text] - find text in file(s)/text
-s - suppress error messages
-v - invert matching, (return all non-matching lines)
-n - print line numbers of occurences
-i - ignore case
-c - display only count per file
-l - display files that contain pattern
-# - print # preceeding and trailing lines
-A/B # - print # preceeding/trailing lines
-w - match whole words only
-o - output only matching substring
-h - suppress filename
-f [file] - patterns from file
-e pattern - match multiple patterns
for date in 20170721 20170818 20170915 20171215 20180119 20180316 20180615 20180720 20180921 20181221 20190118; do
echo $date "->" `date -u -j -f "%Y%m%d%H%M%S" "${date}000000" "+%s"`;
done
dates=( 20170721 20170818 20170915 20171215 20180119 20180316 20180615 20180720 20180921 20181221 20190118 )
for date in ${dates[@]}
do
echo $date "->" `date -u -j -f "%Y%m%d%H%M%S" "${date}000000" "+%s"`
done
=========
#!/bin/bash
# crontab: every five minutes from five minutes before market open to five minute after market close on weekdays
# 25/5 9 * * 1-5 /usr/local/bin/options >> /tmp/cron.out 2>&1
# */5 10-15 * * 1-5 /usr/local/bin/options >> /tmp/cron.out 2>&1
# 0-5/5 16 * * 1-5 /usr/local/bin/options >> /tmp/cron.out 2>&1
EXPIRY_DATES=(20170721 20170818 20170915 20171215 20180119 20180316 20180615 20180720 20180921 20181221 20190118)
NOW=`date +\%Y\%m\%d.%H%M`
echo -e "\n=====> Scraping options at ${NOW}\n"
for EXPIRY_DATE in ${EXPIRY_DATES[@]}
do
URL=https://query2.finance.yahoo.com/v7/finance/options/SPY?date=`date -u -j -f "%Y%m%d%H%M%S" "${EXPIRY_DATE}000000" "+%s"`
echo $URL
curl $URL > ~/Documents/options/SPY${EXPIRY_DATE}.${NOW}
sleep 1
done
# NOW=`date +\%Y\%m\%d.%H%M`; for EXPIRY_DATE in 20170721 20170818 20170915 20171215 20180119 20180316 20180615 20180720 20180921 20181221 20190118; do echo "curl https://query2.finance.yahoo.com/v7/finance/options/SPY?date="`date -u -j -f "%Y%m%d%H%M%S" "${EXPIRY_DATE}000000" "+%s"`" > ~/Documents/options/SPY${EXPIRY_DATE}.${NOW}; sleep .5"; done
==============
NOW=`date +\%Y\%m\%d.%H%M`; for EXPIRY_DATE in 20170721 20170818 20170915 20171215 20180119 20180316 20180615 20180720 20180921 20181221 20190118; do echo "curl https://query2.finance.yahoo.com/v7/finance/options/SPY?date="`date -u -j -f "%Y%m%d%H%M%S" "${EXPIRY_DATE}000000" "+%s"`" > ~/Documents/options/SPY${EXPIRY_DATE}.${NOW}; sleep .5"; done
==============
find . | grep -i "\(jpg\|mov\|gif\|png\)" | gxargs -d '\n' shasum > /tmp/old_icloud.txt
==============
for f in *.kepub; do mv "$f" "$f.epub"; done;