From 2fcc9cfc70320cda8d824a9b9fddf0ba4663ca5c Mon Sep 17 00:00:00 2001 From: kleozzy <29057778+kleozzy@users.noreply.github.com> Date: Sun, 24 Mar 2024 21:52:11 +0200 Subject: [PATCH 1/7] Fix nuclei fuzzing -fuzz flag required now for fuzzing --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index d6b5fdf2..355cf296 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -2554,10 +2554,10 @@ function fuzzparams() { if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null git -C ${tools}/fuzzing-templates pull 2>>"$LOGFILE" - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -o .tmp/fuzzparams.txt + cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -fuzz -o .tmp/fuzzparams.txt else axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null - axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -fuzz -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]} From bff75050ad7a350cfc431d75837095f06c9233e3 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Wed, 27 Mar 2024 21:47:34 +0200 Subject: [PATCH 2/7] fix iis shortname scanner dir creations --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 355cf296..c4fb2126 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1758,8 +1758,8 @@ function iishortname() { start_func ${FUNCNAME[0]} "IIS Shortname Scanner" [ -s "nuclei_output/info.txt" ] && cat nuclei_output/info.txt | grep "iis-version" | cut -d " " -f4 > .tmp/iis_sites.txt if [[ -s ".tmp/iis_sites.txt" ]]; then - mkdir -p $$dir/vulns/iis-shortname-shortscan/ - mkdir -p $$dir/vulns/iis-shortname-sns/ + mkdir -p $dir/vulns/iis-shortname-shortscan/ + mkdir -p $dir/vulns/iis-shortname-sns/ interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "shortscan _target_ -F -s -p 1 > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-shortscan/ 2>>"$LOGFILE" >/dev/null find $dir/vulns/iis-shortname-shortscan/ -type f -print0 | xargs --null grep -Z -L 'Vulnerable: Yes' | xargs --null rm 2>>"$LOGFILE" >/dev/null interlace -tL .tmp/iis_sites.txt -threads ${INTERLACE_THREADS} -c "sns -u _target_ > _output_/_cleantarget_.txt" -o $dir/vulns/iis-shortname-sns/ 2>>"$LOGFILE" >/dev/null From 135fb16f4a9fec2b734c2b3a57ce238874c89068 Mon Sep 17 00:00:00 2001 From: j0hnZ3RA Date: Thu, 28 Mar 2024 22:10:28 -0300 Subject: [PATCH 3/7] fix the fuzzparams function with the -fuzz flag --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index d6b5fdf2..c8da8828 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -2554,10 +2554,10 @@ function fuzzparams() { if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null git -C ${tools}/fuzzing-templates pull 2>>"$LOGFILE" - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -o .tmp/fuzzparams.txt + cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -fuzz -o .tmp/fuzzparams.txt else axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null - axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -fuzz -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]} From 521137571e9f2e6cb328d395ec3550b9c9a0955b Mon Sep 17 00:00:00 2001 From: j0hnZ3RA Date: Wed, 3 Apr 2024 21:18:07 -0300 Subject: [PATCH 4/7] Update flags and change base to dev --- reconftw.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index c8da8828..334aae3a 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -2554,10 +2554,10 @@ function fuzzparams() { if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null git -C ${tools}/fuzzing-templates pull 2>>"$LOGFILE" - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -fuzz -o .tmp/fuzzparams.txt + cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -dast -o .tmp/fuzzparams.txt else axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null - axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -fuzz -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -rl $NUCLEI_RATELIMIT -dast -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]} From 3025410327ee5945d445ee63221c642ccaf7b220 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Thu, 4 Apr 2024 16:53:52 +0300 Subject: [PATCH 5/7] Added p1radup to optimize vuln scan speed --- reconftw.sh | 20 +++++++++++--------- requirements.txt | 1 + 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index c4fb2126..d14d9a56 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1836,7 +1836,8 @@ function urlchecks() { cat webs/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null else - cat webs/webs_all.txt | gau --threads $GAU_THREADS | anew -q .tmp/url_extract_tmp.txt + cat webs/webs_all.txt | unfurl -u domains >.tmp/waymore_input.txt + waymore -i .tmp/waymore_input.txt -mode U -f -oU .tmp/url_extract_tmp.txt 2>>"$LOGFILE" >/dev/null # could add -xcc to remove commoncrawl wich takes a bit longer fi if [[ -s ${GITHUB_TOKENS} ]]; then github-endpoints -q -k -d $domain -t ${GITHUB_TOKENS} -o .tmp/github-endpoints.txt 2>>"$LOGFILE" >/dev/null @@ -1909,16 +1910,17 @@ function url_gf() { if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $URL_GF == true ]]; then start_func ${FUNCNAME[0]} "Vulnerable Pattern Search" if [[ -s "webs/url_extract.txt" ]]; then - gf xss webs/url_extract.txt | anew -q gf/xss.txt - gf ssti webs/url_extract.txt | anew -q gf/ssti.txt - gf ssrf webs/url_extract.txt | anew -q gf/ssrf.txt - gf sqli webs/url_extract.txt | anew -q gf/sqli.txt - gf redirect webs/url_extract.txt | anew -q gf/redirect.txt + p1radup -i webs/url_extract.txt -o webs/url_extract_nodupes.txt + gf xss webs/url_extract_nodupes.txt | anew -q gf/xss.txt + gf ssti webs/url_extract_nodupes.txt | anew -q gf/ssti.txt + gf ssrf webs/url_extract_nodupes.txt | anew -q gf/ssrf.txt + gf sqli webs/url_extract_nodupes.txt | anew -q gf/sqli.txt + gf redirect webs/url_extract_nodupes.txt | anew -q gf/redirect.txt [ -s "gf/ssrf.txt" ] && cat gf/ssrf.txt | anew -q gf/redirect.txt - gf rce webs/url_extract.txt | anew -q gf/rce.txt - gf potential webs/url_extract.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt + gf rce webs/url_extract_nodupes.txt | anew -q gf/rce.txt + gf potential webs/url_extract_nodupes.txt | cut -d ':' -f3-5 | anew -q gf/potential.txt [ -s ".tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep -aEiv "\.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)$" | unfurl -u format %s://%d%p 2>>"$LOGFILE" | anew -q gf/endpoints.txt - gf lfi webs/url_extract.txt | anew -q gf/lfi.txt + gf lfi webs/url_extract_nodupes.txt | anew -q gf/lfi.txt fi end_func "Results are saved in $domain/gf folder" ${FUNCNAME[0]} else diff --git a/requirements.txt b/requirements.txt index c9230f8c..01646765 100644 --- a/requirements.txt +++ b/requirements.txt @@ -35,5 +35,6 @@ tqdm # multiple ujson # multiple urllib3 # multiple porch-pirate # Tool +p1radup # Tool git+https://github.com/xnl-h4ck3r/waymore.git # Tool git+https://github.com/xnl-h4ck3r/xnLinkFinder.git # Tool From f56ef45e6f038d79a25a0d10be5819c486a53b80 Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:36:41 +0300 Subject: [PATCH 6/7] updated nuclei fuzzing , pfuzz with no dupe as well --- reconftw.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index d14d9a56..96d9dd20 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -2470,8 +2470,8 @@ function prototype_pollution() { mkdir -p {.tmp,webs,vulns} if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $PROTO_POLLUTION == true ]]; then start_func ${FUNCNAME[0]} "Prototype Pollution checks" - if [[ $DEEP == true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT ]]; then - [ -s "webs/url_extract.txt" ] && cat webs/url_extract.txt | ppmap &>.tmp/prototype_pollution.txt + if [[ $DEEP == true ]] || [[ $(cat webs/url_extract_nodupes.txt | wc -l) -le $DEEP_LIMIT ]]; then + [ -s "webs/url_extract_nodupes.txt" ] && cat webs/url_extract_nodupes.txt | ppmap &>.tmp/prototype_pollution.txt [ -s ".tmp/prototype_pollution.txt" ] && cat .tmp/prototype_pollution.txt | grep "EXPL" | anew -q vulns/prototype_pollution.txt end_func "Results are saved in vulns/prototype_pollution.txt" ${FUNCNAME[0]} else @@ -2552,14 +2552,14 @@ function fuzzparams() { mkdir -p {.tmp,webs,vulns} if { [[ ! -f "$called_fn_dir/.${FUNCNAME[0]}" ]] || [[ $DIFF == true ]]; } && [[ $FUZZPARAMS == true ]]; then start_func ${FUNCNAME[0]} "Fuzzing params values checks" - if [[ $DEEP == true ]] || [[ $(cat webs/url_extract.txt | wc -l) -le $DEEP_LIMIT2 ]]; then + if [[ $DEEP == true ]] || [[ $(cat webs/url_extract_nodupes.txt | wc -l) -le $DEEP_LIMIT2 ]]; then if [[ $AXIOM != true ]]; then nuclei -update 2>>"$LOGFILE" >/dev/null git -C ${tools}/fuzzing-templates pull 2>>"$LOGFILE" - cat webs/url_extract.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -fuzz -o .tmp/fuzzparams.txt + cat webs/url_extract_nodupes.txt 2>/dev/null | nuclei -silent -retries 3 -rl $NUCLEI_RATELIMIT -t ${tools}/fuzzing-templates -fuzz -o .tmp/fuzzparams.txt else axiom-exec "git clone https://github.com/projectdiscovery/fuzzing-templates /home/op/fuzzing-templates" &>/dev/null - axiom-scan webs/url_extract.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -fuzz -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null + axiom-scan webs/url_extract_nodupes.txt -m nuclei -nh -retries 3 -w /home/op/fuzzing-templates -fuzz -rl $NUCLEI_RATELIMIT -o .tmp/fuzzparams.txt $AXIOM_EXTRA_ARGS 2>>"$LOGFILE" >/dev/null fi [ -s ".tmp/fuzzparams.txt" ] && cat .tmp/fuzzparams.txt | anew -q vulns/fuzzparams.txt end_func "Results are saved in vulns/fuzzparams.txt" ${FUNCNAME[0]} From 05c0071989a8020f882b0c4669205871e0ecf77e Mon Sep 17 00:00:00 2001 From: kleoz <29057778+kleozzy@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:05:58 +0300 Subject: [PATCH 7/7] add url_extract_nodupe to nuclei list as well --- reconftw.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/reconftw.sh b/reconftw.sh index 96d9dd20..f9fb9f7c 100755 --- a/reconftw.sh +++ b/reconftw.sh @@ -1680,7 +1680,7 @@ function nuclei_check() { mkdir -p nuclei_output [[ -n $multi ]] && [ ! -f "$dir/subdomains/subdomains.txt" ] && echo "$domain" >"$dir/subdomains/subdomains.txt" && touch webs/webs.txt webs/webs_uncommon_ports.txt [ ! -s "webs/webs_all.txt" ] && cat webs/webs.txt webs/webs_uncommon_ports.txt 2>/dev/null | anew -q webs/webs_all.txt - [ ! -s ".tmp/webs_subs.txt" ] && cat subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt + [ ! -s ".tmp/webs_subs.txt" ] && cat webs/url_extract_nodupes.txt subdomains/subdomains.txt webs/webs_all.txt 2>>"$LOGFILE" | anew -q .tmp/webs_subs.txt [ -s "$dir/fuzzing/fuzzing_full.txt" ] && cat $dir/fuzzing/fuzzing_full.txt | grep -e "^200" | cut -d " " -f3 | anew -q .tmp/webs_fuzz.txt cat .tmp/webs_subs.txt .tmp/webs_fuzz.txt 2>>"$LOGFILE" | anew -q .tmp/webs_nuclei.txt if [[ $AXIOM != true ]]; then # avoid globbing (expansion of *). @@ -3175,11 +3175,11 @@ function recon() { geo_info waf_checks fuzz - nuclei_check iishortname urlchecks jschecks - + nuclei_check + if [[ $AXIOM == true ]]; then axiom_shutdown fi