* fix: ignoring url checker files * fix: url checker * fix: auto fmt and cleanup * fix: moving the bash scripts and known files into the scripts repo * fix: removed all url_results and made it be all in memory * fix: fixed the newline issue * fix: url checking as a step to the static analysis * fix: removed old code * chore: writing documentation on our static checker pattern * fix: updating the docs more to be clearer * fix: copy and paste without understanding requirements of ci cd dependencies? do i need all of these? * fix: updating * fix: I thought this got in? * Update CONTRIBUTING.md Co-authored-by: Jace Browning <jacebrowning@gmail.com> --------- Co-authored-by: Jace Browning <jacebrowning@gmail.com>
57 lines
1.6 KiB
Bash
Executable File
57 lines
1.6 KiB
Bash
Executable File
#!/bin/bash
|
|
set -euo pipefail
|
|
trap 'echo "$BASH_COMMAND"' ERR
|
|
|
|
remove_after_space () {
|
|
sed 's/ .*//'
|
|
}
|
|
|
|
remove_after_backtick () {
|
|
sed 's/`.*//'
|
|
}
|
|
|
|
remove_after_end_paren () {
|
|
sed 's/).*//'
|
|
}
|
|
|
|
remove_after_double_quote () {
|
|
sed 's/".*//'
|
|
}
|
|
|
|
remove_after_gt () {
|
|
sed 's/>.*//'
|
|
}
|
|
|
|
remove_after_comma () {
|
|
sed 's/,.*//'
|
|
}
|
|
|
|
# Search all src/**/*.ts files
|
|
val1=$(grep -Eoh "(https)://[^']+" src/**/*.ts | remove_after_space | remove_after_backtick | remove_after_end_paren | remove_after_double_quote | remove_after_gt | remove_after_comma)
|
|
|
|
# Search all src/**/*.tsx files
|
|
val2=$(grep -Eoh "(https)://[^']+" src/**/*.tsx | remove_after_space | remove_after_backtick | remove_after_end_paren | remove_after_double_quote | remove_after_gt | remove_after_comma)
|
|
|
|
# Required a newline between them when combining since there is not one at the end of val1
|
|
combined="$val1"$'\n'"$val2"
|
|
|
|
# Merge both ts and tsx results and unique them
|
|
uniqued=$(echo "$combined" | sort | uniq)
|
|
|
|
# All urls and status codes
|
|
all="URL\tSTATUS\n"
|
|
|
|
# All non 200 urls and status codes
|
|
problematic="URL\tSTATUS\n"
|
|
while read line; do
|
|
# || true this curl request to bypass any failures and not have the scrip panic.
|
|
# the set -euo pipefail will cause a panic if a curl fails
|
|
status=$(curl -o /dev/null -s -w "%{http_code}\n" $line || true)
|
|
all+="$status\t$line\n"
|
|
if [[ "$status" -ne 200 ]]; then
|
|
# list status first over line because of white space formatting, less annoying for diffing
|
|
problematic+="$status\t$line\n"
|
|
fi
|
|
done < <(echo "$uniqued")
|
|
echo -e $problematic | column -t
|