Loading...
More Shell Posts
#!/usr/bin/env bash#Splits a command across a number of CELL machinesuser=$(whoami)if [[ -z $user ]]; thenecho "whoami failed. Exiting..."exit 1ficommand="$1"if [[ -z $command ]]; thenecho "Need to put in a command."exit 1fishiftarray=("$@")let start=8let stop=18for ((i = $start; i <= $stop; i++)); doextraZero=$(if [[ "$i" -lt 10 ]]; then echo "0"; fi)domain="CELL${extraZero}${i}-CSE.ENG.UNT.EDU"let "index = i - start"echo ${#array[@]}if [[ ${#array[@]} != 0 ]] && [[ $index -ge ${#array[@]} ]]; thenecho "$index > ${#array[@]}"breakfissh -o StrictHostKeyChecking=accept-new "${user}@${domain}" -t "$command ${array[$index]}" &done
# Run "test" script on all packagesnpm run test --workspaces# Tip - this also works:npm run test -ws----------------------------------------------------# Runs "test" only on package-anpm run test --workspace package-a# Tip - this also works:npm run test -w package-a----------------------------------------------------# Install `lodash` on `package-a`npm install lodash --workspace package-a# Install `tap` on `package-b` as a dev dependencynpm install tap --workspace package-b --save-dev# Install `package-a` on `package-b`npm install package-a --workspace package-b# Install `eslint` in all packagesnpm install eslint --workspaces
#!/bin/bash#Makes a directory ./monkeys and puts every single bored bored ape yacht club monkey in there#Leif Messingerlet OFFSET=0let BATCHSIZE=50let LIMIT=100mkdir monkeysfunction parseResults(){sed 'y/,/\n/' | sed -e '/storage.opensea/d' -e '/https:\/\/lh3.googleusercontent.com\/Ju9CkWtV-1Okvf45wo8UctR-M9He2PjILP0oOvxE89AyiPPGtrR3gysu1Zgy0hjd2xKIgjJJtWIc0ybj4Vd7wv8t3pxDGHoJBzDB=s120/d' | egrep '"image_url":"(.*)"' | tr -d '\"' | sed 's/image_url://'}function downloadMonkeys(){while read -r line; doname=`echo "$line" | sed 's/https:\/\/lh3.googleusercontent.com\///'`wget -q -O "./monkeys/$name.png" "$line" &done}function queryMonkeys(){let progress=($OFFSET*100)/$LIMITecho "Progress: $progress%"result=`curl -s --request GET --url "https://api.opensea.io/api/v1/assets?order_direction=desc&offset=$OFFSET&limit=$BATCHSIZE&collection=boredapeyachtclub"`if [[ "$result" =~ "Request was throttled" ]] || [ "$result" == "" ]; then#Retry downloadsleep 10else#Download Monkeysecho "$result" | parseResults | downloadMonkeyslet OFFSET+=$BATCHSIZEfi#If not out of bounds, recurseif [ "$OFFSET" -lt "$LIMIT" ] || [[ "$result" =~ '"assets":[]' ]]; thenqueryMonkeysfi}echo "Downloading your monkeys into ./monkeys asynchronously."queryMonkeys
#!/bin/bash# Set the directory to searchDIRECTORY="src"# Set the output fileOUTPUT_FILE="testids.txt"# Clear the output file> "$OUTPUT_FILE"# Find all .tsx files in the specified directory and its subdirectoriesfind "$DIRECTORY" -type f -name "*.tsx" | while read -r FILEdo# Search for instances of 'data-testid="testid"' and append them to the output filegrep -o 'data-testid="[^"]*"' "$FILE" >> "$OUTPUT_FILE"# Search for instances of "'data-testid': 'testid'" and append them to the output filegrep -o "'data-testid': '[^']*'" "$FILE" >> "$OUTPUT_FILE"doneecho "Search complete. Test IDs written to $OUTPUT_FILE."
#!/bin/bash#Changes the remote url from https to ssh.#Only works for github, because I'd have to store a dictionary of every https to ssh url otherwise.#Made using Bing Chat# Get the remote URL from the consoleREPO_URL=$(git config --get remote.origin.url)# Check that REPO_URL contains https://github.comif [[ $REPO_URL == *"https://github.com"* ]]; then# Replace https with ssh in the URL# Change the remote URL to the SSH versiongit remote set-url origin "$REPO_URL"elseecho "Error: REPO_URL does not contain https://github.com" >&2exit 1fi
for region in `aws ec2 describe-regions --output text | cut -f4`doecho -e "\nListing Instances in region:'$region'..."aws ec2 describe-instances --query 'Reservations[*].Instances[*].{Instance:InstanceId,Subnet:SubnetId}' --region $regiondone#This script is to be used with any AWS CLI configured environment, it will list any EC2 instances and their associated subnet network ID's in JSON format