Skip to content

Commit

Permalink
fix: use already existing regexes
Browse files Browse the repository at this point in the history
Signed-off-by: Felipe Zipitria <[email protected]>
  • Loading branch information
fzipi committed Jan 11, 2025
1 parent 408ab9f commit ec9e447
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 19 deletions.
4 changes: 2 additions & 2 deletions lambda_code/scan/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from utils.utils_aws import eb_susceptible
from utils.utils_aws import get_cloudfront_s3_origin_takeover
from utils.utils_aws import is_s3_website_endpoint_url
from utils.utils_aws import list_domains
from utils.utils_aws import list_hosted_zones
from utils.utils_aws import list_resource_record_sets
Expand Down Expand Up @@ -236,8 +237,7 @@ def cname_s3(account_name, record_sets):
for r in record_sets
if r["Type"] in ["CNAME"]
and "ResourceRecords" in r
and "amazonaws.com" in r["ResourceRecords"][0]["Value"]
and ".s3-website" in r["ResourceRecords"][0]["Value"]
and is_s3_website_endpoint_url(r["ResourceRecords"][0]["Value"])
]

for record in record_sets_filtered:
Expand Down
2 changes: 1 addition & 1 deletion lambda_code/takeover/takeover.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def lambda_handler(event, context): # pylint:disable=unused-argument

takeover_domains.append(finding["Domain"])

elif ".elasticbeanstalk.com" in finding["Takeover"]:
elif finding["Takeover"].endswith(".elasticbeanstalk.com"):
resource_type = "Elastic Beanstalk instance"

if eb_takeover(finding["Takeover"], finding["Domain"], finding["Account"]):
Expand Down
4 changes: 2 additions & 2 deletions manual_scans/aws/aws_alias_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import boto3
import requests

from utils.utils_aws import is_s3_website_endpoint_url
from utils.utils_aws_manual import list_hosted_zones_manual_scan
from utils.utils_print import my_print
from utils.utils_print import print_list
Expand Down Expand Up @@ -45,8 +46,7 @@ def route53():
record_sets = [
r
for r in page_records["ResourceRecordSets"]
if "AliasTarget" in r
if ("amazonaws.com" in r["AliasTarget"]["DNSName"]) and "s3-website" in (r["AliasTarget"]["DNSName"])
if "AliasTarget" in r and is_s3_website_endpoint_url(r["AliasTarget"]["DNSName"])
]
for record in record_sets:
i = i + 1
Expand Down
15 changes: 15 additions & 0 deletions utils/utils_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@
from botocore import exceptions


# Compile once
BUCKET_URL_ENDPOINT = re.compile(r"^.+\.s3\.([a-z0-9-]+\.)?amazonaws\.com$")
BUCKET_WEBSITE_ENDPOINT = re.compile(r"^.+\.s3-website[-.]([a-z0-9-]+\.)?amazonaws\.com$")


def generate_role_arn(account, role_name):
return "arn:aws:iam::" + account + ":role/" + role_name

Expand Down Expand Up @@ -313,3 +318,13 @@ def eb_susceptible(domain):

# domain is not an Elastic Beanstalk domain
return False


def is_s3_bucket_url(url):
# bucket.s3.amazonaws.com or bucket.s3.region.amazonaws.com
return url and BUCKET_URL_ENDPOINT.match(url)


def is_s3_website_endpoint_url(url):
# bucket.s3-website-region.amazonaws.com
return url and BUCKET_WEBSITE_ENDPOINT.match(url)
16 changes: 2 additions & 14 deletions utils/utils_aws_manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
import boto3
import requests
import urllib3

# Compile once
BUCKET_URL_ENDPOINT = re.compile(r"^.+\.s3\.([a-z0-9-]+\.)?amazonaws\.com$")
BUCKET_WEBSITE_ENDPOINT = re.compile(r"^.+\.s3-website[-.]([a-z0-9-]+\.)?amazonaws\.com$")
from utils_aws import is_s3_bucket_url
from utils_aws import is_s3_website_endpoint_url


def list_hosted_zones_manual_scan():
Expand Down Expand Up @@ -65,16 +63,6 @@ def get_cloudfront_origin_url(domain_name):
return distribution["Origins"]["Items"][0]["DomainName"]


def is_s3_bucket_url(url):
# bucket.s3.amazonaws.com or bucket.s3.region.amazonaws.com
return url and BUCKET_URL_ENDPOINT.match(url)


def is_s3_website_endpoint_url(url):
# bucket.s3-website-region.amazonaws.com
return url and BUCKET_WEBSITE_ENDPOINT.match(url)


def vulnerable_cloudfront_s3_manual(domain_name):
try:
response = requests.get(f"https://{domain_name}", timeout=1)
Expand Down

0 comments on commit ec9e447

Please sign in to comment.