#!/bin/bash echo "Slurping URLs ..." echo sitedomain=$1 wget --spider --recursive --level=inf --no-verbose --output-file=get_urls.txt $sitedomain grep -i URL get_urls.txt | awk -F 'URL:' '{print $2}' | awk '{$1=$1};1' | awk '{print $1}' | sort -u | sed '/^$/d' > urls_$sitedomain.txt rm get_urls.txt rm -r $sitedomain echo "Done !" echo ls -lArth | grep urls_$sitedomain.txt