Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

#789 catalogs removed #790

Merged
merged 36 commits into from
Jun 19, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
cc4891f
#710 extract interesting artifacts from email source
jeromeleonard Apr 22, 2020
f58287d
malwarebazaar hash search
dadokkio Mar 24, 2020
903c5e0
Added more info in long template
dadokkio Mar 30, 2020
68eca6c
hash_not_found error
dadokkio Mar 31, 2020
cbce69d
fix class name
dadokkio Apr 14, 2020
36f4206
add anyrun analyzer
dadokkio Apr 1, 2020
8fce93d
Improve long template: score, tags, fix small errors
dadokkio Apr 1, 2020
26fa591
fix infos_domain
dadokkio Apr 21, 2020
7ce4cd4
yeti fix issues
dadokkio Apr 7, 2020
4c80f7d
Fix bug emlparser when 'content-type' string in mail is in lower case
TofBaasken Mar 26, 2020
22cfd43
Updated vendor lib to python3
milesflo Feb 12, 2020
50f3d27
Minor fix to address different error handling in py3
milesflo Feb 12, 2020
5c5ab21
JSON and datetime were moved to stdlib
milesflo Feb 12, 2020
3fba935
future lib was a bandage left over from python2
milesflo Feb 12, 2020
a80fd98
Adding requests as it will be the log-term solution
milesflo Feb 12, 2020
1330213
vbump to python:3
milesflo Feb 12, 2020
2f1cdda
fix map object vs json
dadokkio Apr 13, 2020
576e673
Add CyberChef analyzer
weslambert Jan 26, 2020
5e2a992
Add cortexutils
weslambert Feb 14, 2020
8d13606
Fix typo
weslambert Feb 14, 2020
0a434a2
Updated script, as results are currently failing
weslambert Mar 21, 2020
9c1fda3
check server response before decode
dadokkio Mar 22, 2020
59375f1
#599 #600 #697 Update short report to avoid being too long, remove in…
jeromeleonard Apr 24, 2020
ac9204b
Add OpenCTI Analyzer
amr-cossi Mar 20, 2020
8077569
Fix template color variable and remove class on ExternalRefs
amr-cossi Mar 21, 2020
091f2d5
added api e emailrep lib to EmailRep analyzer
dadokkio Apr 21, 2020
8c866e0
Add v1 files
mdavis332 Oct 10, 2019
c71ecf0
update "Applies To" section
mdavis332 Oct 10, 2019
fba016d
insert and use postgres as backend for mispwarninglist
dadokkio Mar 26, 2020
d3d06b1
bump version to 2.0
dadokkio Apr 6, 2020
2a7742a
add sqlalchemy in requirements
dadokkio Apr 6, 2020
c7ed4be
added psycopg2-binary in requirements
dadokkio Apr 7, 2020
0e21020
Update changelog
nadouani May 12, 2020
a8cd905
add version to Changelog
jeromeleonard May 12, 2020
8b74836
add version to Changelog
jeromeleonard May 12, 2020
f5daf3b
#789 catalogs removed
jeromeleonard Jun 11, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
332 changes: 243 additions & 89 deletions CHANGELOG.md

Large diffs are not rendered by default.

28 changes: 28 additions & 0 deletions analyzers/AnyRun/AnyRun_Sandbox_Analysis.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"name": "AnyRun_Sandbox_Analysis",
"version": "1.0",
"author": "Andrea Garavaglia, Davide Arcuri, LDO-CERT",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Any.Run Sandbox file analysis",
"dataTypeList": ["file", "url"],
"command": "AnyRun/anyrun_analyzer.py",
"baseConfig": "AnyRun",
"configurationItems": [
{
"name": "token",
"description": "API token",
"type": "string",
"multi": false,
"required": false
},
{
"name": "verify_ssl",
"description": "Verify SSL certificate",
"type": "boolean",
"multi": false,
"required": true,
"defaultValue": true
}
]
}
130 changes: 130 additions & 0 deletions analyzers/AnyRun/anyrun_analyzer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
#!/usr/bin/env python3
# encoding: utf-8
import time
import requests
from os.path import basename
from cortexutils.analyzer import Analyzer
from requests.packages.urllib3.exceptions import InsecureRequestWarning


class AnyRunAnalyzer(Analyzer):
def __init__(self):
Analyzer.__init__(self)
self.url = "https://api.any.run/v1"
self.token = self.get_param("config.token", None, "Service token is missing")
self.verify_ssl = self.get_param("config.verify_ssl", True, None)
if not self.verify_ssl:
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

def summary(self, raw):
taxonomies = []
level = "safe"
namespace = "AnyRun"
predicate = "Sandbox"
value = (
raw.get("analysis", {}).get("scores", {}).get("verdict", {}).get("score", 0)
)
if 50 < value < 100:
level = "suspicious"
elif value == 100:
level = "malicious"

taxonomies.append(
self.build_taxonomy(level, namespace, predicate, "{0}/100".format(value))
)

return {"taxonomies": taxonomies}

def run(self):
Analyzer.run(self)

try:
headers = {"Authorization": "API-Key {0}".format(self.token)}

status_code = None
tries = 0
if self.data_type == "file":
filepath = self.get_param("file", None, "File is missing")
filename = self.get_param("filename", basename(filepath))
while status_code in (None, 429) and tries <= 15:
with open(filepath, "rb") as sample:
files = {"file": (filename, sample)}
response = requests.post(
"{0}/analysis".format(self.url),
files=files,
headers=headers,
verify=self.verify_ssl,
)
status_code = response.status_code
if status_code == 200:
task_id = response.json()["data"]["taskid"]
elif status_code == 201:
task_id = response.json()["taskid"]
elif status_code == 429:
# it not support parallel runs, so we wait and resubmit later
time.sleep(60)
tries += 1
else:
self.error(response.json()["message"])
elif self.data_type == "url":
url = self.get_param("data", None, "Url is missing")
data = {"obj_type": "url", "obj_url": url}
while status_code in (None, 429) and tries <= 15:
response = requests.post(
"{0}/analysis".format(self.url),
data=data,
headers=headers,
verify=self.verify_ssl,
)
status_code = response.status_code
if status_code == 200:
task_id = response.json()["data"]["taskid"]
elif status_code == 201:
task_id = response.json()["taskid"]
elif status_code == 429:
# it not support parallel runs, so we wait and resubmit later
time.sleep(60)
tries += 1
else:
self.error(response.json()["message"])
else:
self.error("Invalid data type!")

finished = False
tries = 0
while not finished and tries <= 15: # wait max 15 mins
time.sleep(60)
response = requests.get(
"{0}/analysis/{1}".format(self.url, task_id),
headers=headers,
verify=self.verify_ssl,
)
if response.status_code == 200:
finished = (
True if response.json()["data"]["status"] == "done" else False
)
elif 400 < response.status_code < 500:
self.error(response.json()["message"])
tries += 1
if not finished:
self.error("AnyRun analysis timed out")

# this items could be huge, we provide link to the report so avoid them in cortex
final_report = response.json()["data"]
final_report.pop("environments", None)
final_report.pop("modified", None)
for incident in final_report.get("incidents", []):
incident.pop("events", None)
for process in final_report.get("processes", []):
process.pop("modules", None)
self.report(final_report)

except requests.exceptions.RequestException as e:
self.error(str(e))

except Exception as e:
self.unexpectedError(e)


if __name__ == "__main__":
AnyRunAnalyzer().run()
2 changes: 2 additions & 0 deletions analyzers/AnyRun/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
requests
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromBase64.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromBase64",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Base64 with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromBase64"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromCharCode.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromCharCode",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Char Code with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromCharCode"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromHex.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromHex",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Hex with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromHex"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
49 changes: 49 additions & 0 deletions analyzers/CyberChef/cyberchef.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env python3
# encoding: utf-8

import json
import requests
from cortexutils.analyzer import Analyzer

class CyberchefAnalyzer(Analyzer):
def __init__(self):
Analyzer.__init__(self)
self.observable = self.get_param('data', None, 'Data missing!')
self.service = self.get_param('config.service', None, 'Service is missing')
self.url = self.get_param('config.url', None, 'URL is missing')

def summary(self, raw):
taxonomies = []
level = 'info'
namespace = 'CyberChef'

# Set predicate for output_data
predicate = self.service
taxonomies.append(self.build_taxonomy(level, namespace, predicate, "baked!"))

return {"taxonomies": taxonomies}

def run(self):
try:
observable = str(self.observable)
url = self.url
if self.service == 'FromHex':
data = {"input": observable, "recipe":{"op":"From Hex", "args": ["Auto"]}}
elif self.service == "FromBase64":
data = { "input": observable, "recipe":[{"op":"From Base64","args":["A-Za-z0-9+/=",True]}]}
elif self.service == "FromCharCode":
# Recipe from https://github.com/mattnotmax/cyberchef-recipes#recipe-3---from-charcode
data = { "input": observable, "recipe":[{"op":"Regular expression","args":["User defined","([0-9]{2,3}(,\\s|))+",True,True,False,False,False,False,"List matches"]},{"op":"From Charcode","args":["Comma",10]},{"op":"Regular expression","args":["User defined","([0-9]{2,3}(,\\s|))+",True,True,False,False,False,False,"List matches"]},{"op":"From Charcode","args":["Space",10]}]}
headers = { 'Content-Type': 'application/json' }
r = requests.post(url.strip('/') + '/bake', headers=headers, data=json.dumps(data))
if r.status_code == 200:
output_data = "".join([chr(x) for x in r.json().get('value', [])])
self.report({ 'input_data': observable, 'output_data': output_data })
else:
self.error('Server responded with %d: %s' % (r.status_code, r.text))
except:
self.error("Could not convert provided data.")

if __name__ == '__main__':
CyberchefAnalyzer().run()

2 changes: 2 additions & 0 deletions analyzers/CyberChef/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
dnspython
2 changes: 1 addition & 1 deletion analyzers/DNSDB/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:2
FROM python:3

WORKDIR /worker
COPY . DNSDB
Expand Down
20 changes: 9 additions & 11 deletions analyzers/DNSDB/dnsdb.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python2
# encoding: utf-8
#!/usr/bin/env python3

import datetime
from urllib2 import HTTPError
from urllib3.exceptions import HTTPError
from dnsdb_query import DnsdbClient, QueryError
from cortexutils.analyzer import Analyzer

Expand Down Expand Up @@ -62,15 +62,13 @@ def run(self):
try:
client = DnsdbClient(self.dnsdb_server, self.dnsdb_key)
self.report({
"records": map(lambda r: self.update_date('time_first', self.update_date('time_last', r)),
self.execute_dnsdb_service(client))
"records": list(map(lambda r: self.update_date('time_first', self.update_date('time_last', r)),
self.execute_dnsdb_service(client)))
})
except HTTPError, e:
if e.code != 404:
self.unexpectedError(e)
else:
self.report({"records": []})
except Exception as e:
self.unexpectedError(e)
self.report({"records": []})


if __name__ == '__main__':
DnsDbAnalyzer().run()
DnsDbAnalyzer().run()
Loading