Loading...
More Shell Posts
#Leif Messinger#For when you want to search a lot of words in a file fast#Arg 1 is the argument the list of words you want to search#Arg 2 is the file you want to search#-z means that it looks at the file as a whole, just treating newlines a characters.#-r is regex. Needed for $, even tho the documentation says you don't need it. They are liars.#First command replaces all . with \. and all - with \-#Second command takes all newlines and replaces them with )|(#Third command takes the trailing |( and deletes it#Forth command puts a /( at the start#Fith command puts /!d at the end. This tells it to not delete any lines that match the pattern.#The second sed takes the output of the first sed as a command that searches any of the combined words#-f - takes a command from the inputsed -z -r -e 's/\./\\\./g ; s/\-/\\\-/g' -e 's/\n/\)\|\(/g' -e 's/\|\($//' -e 'i/\(' -e 'a/!d' $1 | sed -r -f - $2
#!/bin/bash# Set the directory to searchDIRECTORY="src"# Set the output fileOUTPUT_FILE="testids.txt"# Clear the output file> "$OUTPUT_FILE"# Find all .tsx files in the specified directory and its subdirectoriesfind "$DIRECTORY" -type f -name "*.tsx" | while read -r FILEdo# Search for instances of 'data-testid="testid"' and append them to the output filegrep -o 'data-testid="[^"]*"' "$FILE" >> "$OUTPUT_FILE"# Search for instances of "'data-testid': 'testid'" and append them to the output filegrep -o "'data-testid': '[^']*'" "$FILE" >> "$OUTPUT_FILE"doneecho "Search complete. Test IDs written to $OUTPUT_FILE."
#!/bin/bash#Changes the remote url from https to ssh.#Only works for github, because I'd have to store a dictionary of every https to ssh url otherwise.#Made using Bing Chat# Get the remote URL from the consoleREPO_URL=$(git config --get remote.origin.url)# Check that REPO_URL contains https://github.comif [[ $REPO_URL == *"https://github.com"* ]]; then# Replace https with ssh in the URL# Change the remote URL to the SSH versiongit remote set-url origin "$REPO_URL"elseecho "Error: REPO_URL does not contain https://github.com" >&2exit 1fi
#!/bin/bash# Recursively find all .svelte files in the current directory and its subdirectoriesfind . -type f -name "*.svelte" -o -name "*.html" -o -name "*.htm" | while read file; do# Replace all h1 tags with the specified formatsed -i 's/<h1>\(.*\)<\/h1>/<h1 id="\1">\1<\/h1>/g' "$file"# Replace all h2 tags with the specified formatsed -i 's/<h2>\(.*\)<\/h2>/<h2 id="\1">\1<\/h2>/g' "$file"# Remove whitespace from the id attribute valuefor i in {0..10} ; dosed -i 's/\(id="[^"]*\)\W\([^"]*"\)/\1\2/g' "$file"donedone
#!/bin/bashfor branch in $(git branch | cut -c 3-); doread -p "Delete local branch $branch? (y/n) " -n 1 -recho ""if [[ $REPLY =~ ^[Yy]$ ]]; thengit branch -D $branchfidone
#!/bin/bash#Makes a directory ./monkeys and puts every single bored bored ape yacht club monkey in there#Leif Messingerlet OFFSET=0let BATCHSIZE=50let LIMIT=100mkdir monkeysfunction parseResults(){sed 'y/,/\n/' | sed -e '/storage.opensea/d' -e '/https:\/\/lh3.googleusercontent.com\/Ju9CkWtV-1Okvf45wo8UctR-M9He2PjILP0oOvxE89AyiPPGtrR3gysu1Zgy0hjd2xKIgjJJtWIc0ybj4Vd7wv8t3pxDGHoJBzDB=s120/d' | egrep '"image_url":"(.*)"' | tr -d '\"' | sed 's/image_url://'}function downloadMonkeys(){while read -r line; doname=`echo "$line" | sed 's/https:\/\/lh3.googleusercontent.com\///'`wget -q -O "./monkeys/$name.png" "$line" &done}function queryMonkeys(){let progress=($OFFSET*100)/$LIMITecho "Progress: $progress%"result=`curl -s --request GET --url "https://api.opensea.io/api/v1/assets?order_direction=desc&offset=$OFFSET&limit=$BATCHSIZE&collection=boredapeyachtclub"`if [[ "$result" =~ "Request was throttled" ]] || [ "$result" == "" ]; then#Retry downloadsleep 10else#Download Monkeysecho "$result" | parseResults | downloadMonkeyslet OFFSET+=$BATCHSIZEfi#If not out of bounds, recurseif [ "$OFFSET" -lt "$LIMIT" ] || [[ "$result" =~ '"assets":[]' ]]; thenqueryMonkeysfi}echo "Downloading your monkeys into ./monkeys asynchronously."queryMonkeys