Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixing some small bugs #192

Merged
merged 4 commits into from
Jun 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 9 additions & 8 deletions dnsrecon.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ def brute_domain(res, dictfile, dom, filter_=None, verbose=False, ignore_wildcar
if type_ in ['A', 'AAAA']:
# Filter Records if filtering was enabled
if filter_:
if address_or_target_ not in wildcard_set:
if wildcard_set and address_or_target_ not in wildcard_set:
print_and_append = True
found_dict["address"] = address_or_target_
else:
Expand Down Expand Up @@ -907,7 +907,7 @@ def check_recursive(res, ns_server, timeout):
return is_recursive


def general_enum(res, domain, do_axfr, do_bing, do_yandex, do_spf, do_whois, do_crt, zw, thread_num=None):
def general_enum(res, domain, do_axfr, do_bing, do_yandex, do_spf, do_whois, do_crt, zw, request_timeout, thread_num=None):
"""
Function for performing general enumeration of a domain. It gets SOA, NS, MX
A, AAAA and SRV records for a given domain. It will first try a Zone Transfer
Expand Down Expand Up @@ -1073,10 +1073,11 @@ def general_enum(res, domain, do_axfr, do_bing, do_yandex, do_spf, do_whois, do_
if do_crt:
print_status("Performing Crt.sh Search Enumeration")
crt_rcd = se_result_process(res, scrape_crtsh(domain))
for r in crt_rcd:
if "address" in crt_rcd:
ip_for_whois.append(r["address"])
returned_records.extend(crt_rcd)
if crt_rcd:
for r in crt_rcd:
if "address" in crt_rcd:
ip_for_whois.append(r["address"])
returned_records.extend(crt_rcd)

if do_whois:
whois_rcd = whois_ips(res, ip_for_whois)
Expand Down Expand Up @@ -1648,9 +1649,9 @@ def main():
elif type_ == 'std':
print_status(f"{type_}: Performing General Enumeration against: {domain}...")
std_enum_records = general_enum(res, domain, xfr, bing, yandex,
spf_enum, do_whois, do_crt, zonewalk,
spf_enum, do_whois, do_crt, zonewalk, request_timeout,
thread_num=thread_num)
if do_output:
if do_output and std_enum_records:
returned_records.extend(std_enum_records)

elif type_ == 'rvl':
Expand Down
10 changes: 9 additions & 1 deletion lib/bingenum.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,15 @@ def scrape_bing(dom):

for n in searches:
url = "http://www.bing.com/search?q=domain%3A" + dom + "&qs=n&first=" + n
sock = urllib.request.urlopen(url)
req = urllib.request.Request(
url,
data=None,
headers={
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'
}
)

sock = urllib.request.urlopen(req, timeout=10)
data = sock.read().decode("utf-8")
results.extend(re.findall(r"([a-zA-Z0-9\-.]+" + dom + ")/?", data))
sock.close()
Expand Down
2 changes: 1 addition & 1 deletion lib/crtenum.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def scrape_crtsh(dom):

req = Request(url=url, headers=headers)
try:
resp = urlopen(req)
resp = urlopen(req, timeout=30)
data = resp.read()
except HTTPError as e:
print_error(f'Bad http status from crt.sh: "{e.code}"')
Expand Down
3 changes: 1 addition & 2 deletions lib/yandexenum.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA


import urllib
import re
import time
Expand All @@ -42,7 +41,7 @@ def scrape_yandex(dom):
for _ in searches:
url = "https://yandex.com/search/?text=site%3A" + dom
try:
sock = urllib.request.urlopen(url)
sock = urllib.request.urlopen(url, timeout=10)
data = sock.read().decode("utf-8")
sock.close()
except Exception as e:
Expand Down