@linuxjournal
#!/usr/bin/env bash
base_url="linuxjournal.com/books"
for i in $(curl -s $base_url | grep field--name-field-image | awk -F'href=' '{print $2}' | cut -d'>' -f1 | tr -d '"'); do
url2='linuxjournal.com'$i
tmp=$(curl -s $url2 | grep 'sites/default/files/201' | tail -1 | awk -F'href=' '{print $2}' | cut -d'>' -f1 |tr -d '"')
wget linuxjournal.com/node/$tmp
done

@linuxjournal I hope you are OK with this script existing, btw ;)

Sign in to participate in the conversation
LinuxRocks.Online

Linux Geeks doing what Linux Geeks do..