waldl (2208B)
1 #!/bin/sh 2 die() { printf '%s\n' "$1" >&2 && exit 1; } 3 4 set -x 5 # script to find and download wallpapers from wallhaven 6 7 [ -z "$HOME" ] && die '$HOME not set.' 8 walldir="$HOME/.local/share/wallhaven" 9 cachedir="$HOME/.cache/wallhaven" 10 mkdir -p "$walldir" 11 rm -rf "$cachedir" 12 mkdir -p "$cachedir" 13 14 sxiv_otps=" -tfpo -z 200" # o is needed 15 max_pages=4 16 # sorting : date_added, relevance, random, views, favorites, toplist 17 sorting=relevance 18 # quality : large original small 19 quality=large 20 # atleast : least res 21 atleast=1920x1080 22 23 24 sh_info () { 25 printf "%s\n" "$*" >&2 26 notify "wallhaven" "$*" 27 } 28 29 dep_ck () { 30 for pr; do 31 if ! command -v $pr >/dev/null 2>&1; then 32 sh_info "command $pr not found, install: $pr" 33 exit 1 34 fi 35 done 36 } 37 dep_ck "sxiv" 38 39 40 query=$* 41 if [ -z "$query" ]; then 42 printf "Search wallhaven: " 43 read -r query 44 [ -z "$query" ] && exit 1 45 fi 46 47 datafile=$(mktemp --tmpdir wald.XXXX) 48 49 # clean up if killed 50 trap "exit" INT TERM 51 trap "clean_up" EXIT 52 53 clean_up () { 54 printf "%s\n" "cleaning up..." >&2 55 rm "$datafile" 56 rm -r "$cachedir" 57 } 58 59 get_ids () { 60 for page_no in $(seq $max_pages) 61 do 62 { 63 curl -s -G "https://wallhaven.cc/api/v1/search" \ 64 -d "q=$1" \ 65 -d "page=$page_no" \ 66 -d "sorting=$sorting" >> "$datafile" 67 } & 68 sleep 0.001 69 done 70 wait 71 } 72 73 # search wallpapers 74 sh_info "getting data..." 75 get_ids "$query" 76 77 { 78 read t < "$datafile" 79 [ -z "$t" ] && { sh_info "no images found"; exit 1; } 80 } 81 82 thumbnails=$( jq -r '.data[]?|.thumbs.'"$quality" < "$datafile") 83 84 if [ -z "$thumbnails" ]; then 85 notify "wallhaven" "no-results found" 86 exit 1 87 fi 88 89 # download the thumbnails 90 sh_info "caching thumbnails..." 91 for url in $thumbnails 92 do 93 printf "url = %s\n" "$url" 94 printf "output = %s\n" "$cachedir/${url##*/}" 95 done | curl -K - 96 #sh_info "downloaded thumbnails..." 97 98 image_ids=$(sxiv $sxiv_otps "$cachedir" | rev | cut -c5-10 | rev ) 99 100 [ -z "$image_ids" ] && exit 101 102 # download the selected wall papers 103 cd "$walldir" 104 sh_info "downloading wallpapers..." 105 for ids in $image_ids 106 do 107 url=$( jq -r '.data[]?|select( .id == "'$ids'" )|.path' < "$datafile" ) 108 printf "url = %s\n" "$url" 109 printf -- "-O\n" 110 done | curl -K - 111 112 sh_info "downloaded wallpapers..."