#!/bin/bash echo "Slurping URLs ..." echo sitedomain=$1 wget --spider --recursive --level=inf --no-verbose --output-file=geturls.tmp $sitedomain grep -i URL geturls.tmp | awk -F 'URL:' '{print $2}' | awk '{$1=$1};1' | awk '{print $1}' | sort -u | sed '/^$/d' > $sitedomain.urls rm geturls.tmp rm -r $sitedomain echo "Done !" echo ls -lArth | grep *.urls