Skip to content

Commit

Permalink
Merge pull request #790 from TheHive-Project/remove_catalogs
Browse files Browse the repository at this point in the history
#789 catalogs removed
  • Loading branch information
jeromeleonard authored Jun 19, 2020
2 parents e1dbd8f + f5daf3b commit e4b96a4
Show file tree
Hide file tree
Showing 63 changed files with 2,687 additions and 11,804 deletions.
332 changes: 243 additions & 89 deletions CHANGELOG.md

Large diffs are not rendered by default.

28 changes: 28 additions & 0 deletions analyzers/AnyRun/AnyRun_Sandbox_Analysis.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"name": "AnyRun_Sandbox_Analysis",
"version": "1.0",
"author": "Andrea Garavaglia, Davide Arcuri, LDO-CERT",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Any.Run Sandbox file analysis",
"dataTypeList": ["file", "url"],
"command": "AnyRun/anyrun_analyzer.py",
"baseConfig": "AnyRun",
"configurationItems": [
{
"name": "token",
"description": "API token",
"type": "string",
"multi": false,
"required": false
},
{
"name": "verify_ssl",
"description": "Verify SSL certificate",
"type": "boolean",
"multi": false,
"required": true,
"defaultValue": true
}
]
}
130 changes: 130 additions & 0 deletions analyzers/AnyRun/anyrun_analyzer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
#!/usr/bin/env python3
# encoding: utf-8
import time
import requests
from os.path import basename
from cortexutils.analyzer import Analyzer
from requests.packages.urllib3.exceptions import InsecureRequestWarning


class AnyRunAnalyzer(Analyzer):
def __init__(self):
Analyzer.__init__(self)
self.url = "https://api.any.run/v1"
self.token = self.get_param("config.token", None, "Service token is missing")
self.verify_ssl = self.get_param("config.verify_ssl", True, None)
if not self.verify_ssl:
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

def summary(self, raw):
taxonomies = []
level = "safe"
namespace = "AnyRun"
predicate = "Sandbox"
value = (
raw.get("analysis", {}).get("scores", {}).get("verdict", {}).get("score", 0)
)
if 50 < value < 100:
level = "suspicious"
elif value == 100:
level = "malicious"

taxonomies.append(
self.build_taxonomy(level, namespace, predicate, "{0}/100".format(value))
)

return {"taxonomies": taxonomies}

def run(self):
Analyzer.run(self)

try:
headers = {"Authorization": "API-Key {0}".format(self.token)}

status_code = None
tries = 0
if self.data_type == "file":
filepath = self.get_param("file", None, "File is missing")
filename = self.get_param("filename", basename(filepath))
while status_code in (None, 429) and tries <= 15:
with open(filepath, "rb") as sample:
files = {"file": (filename, sample)}
response = requests.post(
"{0}/analysis".format(self.url),
files=files,
headers=headers,
verify=self.verify_ssl,
)
status_code = response.status_code
if status_code == 200:
task_id = response.json()["data"]["taskid"]
elif status_code == 201:
task_id = response.json()["taskid"]
elif status_code == 429:
# it not support parallel runs, so we wait and resubmit later
time.sleep(60)
tries += 1
else:
self.error(response.json()["message"])
elif self.data_type == "url":
url = self.get_param("data", None, "Url is missing")
data = {"obj_type": "url", "obj_url": url}
while status_code in (None, 429) and tries <= 15:
response = requests.post(
"{0}/analysis".format(self.url),
data=data,
headers=headers,
verify=self.verify_ssl,
)
status_code = response.status_code
if status_code == 200:
task_id = response.json()["data"]["taskid"]
elif status_code == 201:
task_id = response.json()["taskid"]
elif status_code == 429:
# it not support parallel runs, so we wait and resubmit later
time.sleep(60)
tries += 1
else:
self.error(response.json()["message"])
else:
self.error("Invalid data type!")

finished = False
tries = 0
while not finished and tries <= 15: # wait max 15 mins
time.sleep(60)
response = requests.get(
"{0}/analysis/{1}".format(self.url, task_id),
headers=headers,
verify=self.verify_ssl,
)
if response.status_code == 200:
finished = (
True if response.json()["data"]["status"] == "done" else False
)
elif 400 < response.status_code < 500:
self.error(response.json()["message"])
tries += 1
if not finished:
self.error("AnyRun analysis timed out")

# this items could be huge, we provide link to the report so avoid them in cortex
final_report = response.json()["data"]
final_report.pop("environments", None)
final_report.pop("modified", None)
for incident in final_report.get("incidents", []):
incident.pop("events", None)
for process in final_report.get("processes", []):
process.pop("modules", None)
self.report(final_report)

except requests.exceptions.RequestException as e:
self.error(str(e))

except Exception as e:
self.unexpectedError(e)


if __name__ == "__main__":
AnyRunAnalyzer().run()
2 changes: 2 additions & 0 deletions analyzers/AnyRun/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
requests
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromBase64.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromBase64",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Base64 with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromBase64"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromCharCode.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromCharCode",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Char Code with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromCharCode"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
24 changes: 24 additions & 0 deletions analyzers/CyberChef/CyberChef_FromHex.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"name": "CyberChef_FromHex",
"version": "1.0",
"author": "Wes Lambert",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Convert Hex with CyberChef Server",
"dataTypeList": ["other"],
"baseConfig": "CyberChef",
"config": {
"service": "FromHex"
},
"command": "CyberChef/cyberchef.py",
"configurationItems": [
{
"name": "url",
"description": "CyberChef Server URL",
"type": "string",
"multi": false,
"required": true,
"defaultValue": "http://192.168.1.178:3000/"
}
]
}
49 changes: 49 additions & 0 deletions analyzers/CyberChef/cyberchef.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env python3
# encoding: utf-8

import json
import requests
from cortexutils.analyzer import Analyzer

class CyberchefAnalyzer(Analyzer):
def __init__(self):
Analyzer.__init__(self)
self.observable = self.get_param('data', None, 'Data missing!')
self.service = self.get_param('config.service', None, 'Service is missing')
self.url = self.get_param('config.url', None, 'URL is missing')

def summary(self, raw):
taxonomies = []
level = 'info'
namespace = 'CyberChef'

# Set predicate for output_data
predicate = self.service
taxonomies.append(self.build_taxonomy(level, namespace, predicate, "baked!"))

return {"taxonomies": taxonomies}

def run(self):
try:
observable = str(self.observable)
url = self.url
if self.service == 'FromHex':
data = {"input": observable, "recipe":{"op":"From Hex", "args": ["Auto"]}}
elif self.service == "FromBase64":
data = { "input": observable, "recipe":[{"op":"From Base64","args":["A-Za-z0-9+/=",True]}]}
elif self.service == "FromCharCode":
# Recipe from https://github.com/mattnotmax/cyberchef-recipes#recipe-3---from-charcode
data = { "input": observable, "recipe":[{"op":"Regular expression","args":["User defined","([0-9]{2,3}(,\\s|))+",True,True,False,False,False,False,"List matches"]},{"op":"From Charcode","args":["Comma",10]},{"op":"Regular expression","args":["User defined","([0-9]{2,3}(,\\s|))+",True,True,False,False,False,False,"List matches"]},{"op":"From Charcode","args":["Space",10]}]}
headers = { 'Content-Type': 'application/json' }
r = requests.post(url.strip('/') + '/bake', headers=headers, data=json.dumps(data))
if r.status_code == 200:
output_data = "".join([chr(x) for x in r.json().get('value', [])])
self.report({ 'input_data': observable, 'output_data': output_data })
else:
self.error('Server responded with %d: %s' % (r.status_code, r.text))
except:
self.error("Could not convert provided data.")

if __name__ == '__main__':
CyberchefAnalyzer().run()

2 changes: 2 additions & 0 deletions analyzers/CyberChef/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
dnspython
2 changes: 1 addition & 1 deletion analyzers/DNSDB/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:2
FROM python:3

WORKDIR /worker
COPY . DNSDB
Expand Down
20 changes: 9 additions & 11 deletions analyzers/DNSDB/dnsdb.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python2
# encoding: utf-8
#!/usr/bin/env python3

import datetime
from urllib2 import HTTPError
from urllib3.exceptions import HTTPError
from dnsdb_query import DnsdbClient, QueryError
from cortexutils.analyzer import Analyzer

Expand Down Expand Up @@ -62,15 +62,13 @@ def run(self):
try:
client = DnsdbClient(self.dnsdb_server, self.dnsdb_key)
self.report({
"records": map(lambda r: self.update_date('time_first', self.update_date('time_last', r)),
self.execute_dnsdb_service(client))
"records": list(map(lambda r: self.update_date('time_first', self.update_date('time_last', r)),
self.execute_dnsdb_service(client)))
})
except HTTPError, e:
if e.code != 404:
self.unexpectedError(e)
else:
self.report({"records": []})
except Exception as e:
self.unexpectedError(e)
self.report({"records": []})


if __name__ == '__main__':
DnsDbAnalyzer().run()
DnsDbAnalyzer().run()
Loading

0 comments on commit e4b96a4

Please sign in to comment.