allow the user to not specify a new url while crawling

This commit is contained in:
lickthecheese 2020-03-19 16:16:11 -04:00
parent 285a583492
commit 2d759ed8f7
1 changed files with 6 additions and 3 deletions

9
crawly
View File

@ -4,9 +4,6 @@
cleanxss(){ sed 's/&/\&amp;/g; s/</\&lt;/g; s/>/\&gt;/g; s/"/\&quot;/g; s/'"'"'/\&#39;/g' | tr "\n" " " | sed -e '$a\'; }
SITE=$1
TEMPIDC=/tmp/$RANDOM
function visit(){
@ -35,10 +32,16 @@ echo "link $1 is invalid"
fi
}
if [ ! -z $1 ];then
SITE=$1
visit $SITE urls.txt
fi
URLLIST=`cat urls.txt`
rm urls.txt