Skip to content

Commit

Permalink
Merge branch 'release/1.6.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
jeromeleonard committed Jul 28, 2017
2 parents ac5acc3 + b14f023 commit 4e6cd23
Show file tree
Hide file tree
Showing 22 changed files with 796 additions and 6 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,5 @@ thehive-templates/*.sh

.idea
.DS_Store

Cortex-analyzers.iml
9 changes: 6 additions & 3 deletions analyzers/Abuse_Finder/abusefinder.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# -*- coding: utf-8 -*
"""This analyzer leverages abuse_finder, an Open Source Python library provided by CERT Société Générale to help
automatically find the most appropriate contact for abuse reports.
See https://github.com/certsocietegenerale/abuse_finder for further reference.
"""

import sys
import json
Expand Down Expand Up @@ -32,7 +35,7 @@ def abuse(self):
elif self.data_type == "url":
return url_abuse(self.getData())
else:
self.error("datatype not handled")
self.error("invalid datatype")

def run(self):
self.report({'abuse_finder':self.abuse()})
Expand Down
4 changes: 2 additions & 2 deletions analyzers/CIRCLPassiveDNS/circl_passivedns.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def run(self):
elif self.data_type == 'domain':
query = self.getData()
if '/' in query:
self.error('\'/\' in domain. use url data type instead.')
self.error('\'/\' found in the supplied domain. use the URL datatype instead')
else:
self.error('Incompatible data type.')
self.error('invalid datatype')
self.report({'results': self.query(query)})

if __name__ == '__main__':
Expand Down
16 changes: 16 additions & 0 deletions analyzers/CuckooSandbox/CuckooSandbox_File_Analysis.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "CuckooSandbox_File_Analysis_Inet",
"version": "1.0",
"author": "Andrea Garavaglia, LDO-CERT",
"url": "https://github.com/garanews/Cortex-Analyzers",
"license": "AGPL-V3",
"baseConfig": "CuckooSandbox",
"config": {
"check_tlp": true,
"max_tlp":1,
"service": "file_analysis"
},
"description": "Cuckoo Sandbox file analysis with Internet access",
"dataTypeList": ["file"],
"command": "CuckooSandbox/cuckoosandbox_analyzer.py"
}
16 changes: 16 additions & 0 deletions analyzers/CuckooSandbox/CuckooSandbox_Url_Analysis.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "CuckooSandbox_Url_Analysis",
"version": "1.0",
"author": "Andrea Garavaglia, LDO-CERT",
"url": "https://github.com/garanews/Cortex-Analyzers",
"license": "AGPL-V3",
"baseConfig": "CuckooSandbox",
"config": {
"check_tlp": true,
"max_tlp":1,
"service": "url_analysis"
},
"description": "Cuckoo Sandbox URL analysis",
"dataTypeList": ["url"],
"command": "CuckooSandbox/cuckoosandbox_analyzer.py"
}
121 changes: 121 additions & 0 deletions analyzers/CuckooSandbox/cuckoosandbox_analyzer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
#!/usr/bin/env python
# encoding: utf-8

from cortexutils.analyzer import Analyzer

import requests
import time
from os.path import basename

class CuckooSandboxAnalyzer(Analyzer):

def __init__(self):
Analyzer.__init__(self)
self.service = self.getParam('config.service', None, 'CuckooSandbox service is missing')
self.url = self.getParam('config.url', None, 'CuckooSandbox url is missing')
#self.analysistimeout = self.getParam('config.analysistimeout', 30*60, None)
#self.networktimeout = self.getParam('config.networktimeout', 30, None)

def summary(self, raw):
taxonomies = []
level = "safe"
namespace = "Cuckoo"
predicate = "Malscore"
value = "\"0\""

result = {
'service': self.service,
'dataType': self.data_type
}
result["malscore"] = raw.get("malscore", None)
result["malfamily"] = raw.get("malfamily", None)

if result["malscore"] > 6.5:
level = "malicious"
elif result["malscore"] > 2:
level = "suspicious"
elif result["malscore"] > 0:
level = "safe"

taxonomies.append(self.build_taxonomy(level, namespace, predicate, "\"{}\"".format(result["malscore"])))
taxonomies.append(self.build_taxonomy(level, namespace, "Malfamily", "\"{}\"".format(result["malfamily"])))

return {"taxonomies": taxonomies}

def run(self):
Analyzer.run(self)

try:

# file analysis
if self.service in ['file_analysis']:
filepath = self.getParam('file', None, 'File is missing')
filename = basename(filepath)
with open(filepath, "rb") as sample:
files = {"file": (filename, sample)}
response = requests.post(self.url + 'tasks/create/file', files=files)
task_id = response.json()['task_ids'][0]

# url analysis
elif self.service == 'url_analysis':
data = {"url": self.getData()}
response = requests.post(self.url + 'tasks/create/url', data=data)
task_id = response.json()['task_id']

else:
self.error('Unknown CuckooSandbox service')

finished = False
tries = 0
while not finished and tries <= 15: #wait max 15 mins
time.sleep(60)
response = requests.get(self.url + 'tasks/view/' + str(task_id))
content = response.json()['task']['status']
if content == 'reported':
finished = True
tries += 1
if not finished:
self.error('CuckooSandbox analysis timed out')

# Download the report
response = requests.get(self.url + 'tasks/report/' + str(task_id) + '/json')
resp_json = response.json()
list_description = [x['description'] for x in resp_json['signatures']]
if 'suricata' in resp_json.keys() and 'alerts' in resp_json['suricata'].keys():
suri_alerts = [(x['signature'],x['dstip'],x['dstport'],x['severity']) for x in resp_json['suricata']['alerts']]
else:
suri_alerts = []
hosts = [(x['ip'],x['hostname'],x['country_name']) for x in resp_json['network']['hosts']]
uri = [(x['uri']) for x in resp_json['network']['http']]
if self.service == 'url_analysis':
self.report({
'signatures': list_description,
'suricata_alerts': suri_alerts,
'hosts': hosts,
'uri': uri,
'malscore': resp_json['malscore'],
'malfamily': resp_json['malfamily'],
'file_type': 'url',
'yara': resp_json['target']['url'] if 'target' in resp_json.keys() and 'url' in resp_json['target'].keys() else '-'
})
else:
self.report({
'signatures': list_description,
'suricata_alerts': suri_alerts,
'hosts': hosts,
'uri': uri,
'malscore': resp_json['malscore'],
'malfamily': resp_json['malfamily'],
'file_type': "".join([x for x in resp_json['target']['file']['type']]),
'yara': [ x['name'] + " - " + x['meta']['description'] if 'description' in x['meta'].keys() else x['name'] for x in resp_json['target']['file']['yara'] ]
})

except requests.exceptions.RequestException as e:
self.error(e)

except Exception as e:
self.unexpectedError(e)

if __name__ == '__main__':
CuckooSandboxAnalyzer().run()

2 changes: 2 additions & 0 deletions analyzers/CuckooSandbox/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
requests
2 changes: 1 addition & 1 deletion analyzers/MISP/MISP.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"check_tlp": false,
"max_tlp": 3
},
"description": "Check if this IOC has been processed in different MISP instances.",
"description": "Query multiple MISP instances for events containing an observable.",
"dataTypeList": ["domain", "ip", "url", "fqdn", "uri_path","user-agent", "hash", "email", "mail", "mail_subject" , "registry", "regexp", "other", "filename"],
"command": "MISP/misp.py"
}
16 changes: 16 additions & 0 deletions analyzers/WOT/WOT_lookup.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "WOT_Lookup",
"version": "1.0",
"author": "Andrea Garavaglia - LDO-CERT",
"url": "https://github.com/garanews/Cortex-Analyzers",
"license": "AGPL-V3",
"description": "Check a Domain against Web of Trust (WOT) a website reputation service",
"dataTypeList": ["domain", "fqdn"],
"baseConfig": "WOT",
"config": {
"check_tlp": true,
"max_tlp": 1,
"service": "query"
},
"command": "WOT/WOT_lookup.py"
}
115 changes: 115 additions & 0 deletions analyzers/WOT/WOT_lookup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
#!/usr/bin/env python
# encoding: utf-8

import json
import requests
import datetime
from cortexutils.analyzer import Analyzer

class WOTAnalyzer(Analyzer):

def __init__(self):
Analyzer.__init__(self)
self.service = self.getParam(
'config.service', None, 'Service parameter is missing')
self.WOT_key = self.getParam('config.key', None,
'Missing WOT API key')
self.categories = {
"101": "Malware or viruses",
"102": "Poor customer experience",
"103": "Phishing",
"104": "Scam",
"105": "Potentially illegal",
"201": "Misleading claims or unethical",
"202": "Privacy risks",
"203": "Suspicious",
"204": "Hate, discrimination",
"205": "Spam",
"206": "Potentially unwanted programs",
"207": "Ads / pop-ups",
"301": "Online tracking",
"302": "Alternative or controversial medicine",
"303": "Opinions, religion, politics",
"304": "Other",
"401": "Adult content",
"402": "Incidental nudity",
"403": "Gruesome or shocking",
"404": "Site for kids",
"501": "Good site"
}

def points_to_verbose(self, points):
if points >= 80:
return "Excellent"
elif points >= 60:
return "Good"
elif points >= 40:
return "Unsatisfactory"
elif points >= 20:
return "Poor"
else:
return "Very poor"

def WOT_checkurl(self, data):
url = 'http://api.mywot.com/0.4/public_link_json2?hosts=' + data + '/&callback=process&key=' + self.WOT_key
r = requests.get(url)
return json.loads(r.text.replace("process(","").replace(")",""))

def summary(self, raw):
taxonomies = []
level = "safe"
value = "-"

categories = raw.get("Categories", None)
blacklists = raw.get("Blacklists", None)
num_categories = raw.get("Categories Identifier", None)

if categories:
value = "|".join(categories)
if blacklists:
value = "|".join([x[0] for x in blacklists])
level = "malicious"
else:
if num_categories:
min_cat = min([int(x) for x in num_categories])
else:
min_cat = 501
if min_cat > 300:
level = "safe"
elif min_cat > 200:
level = "suspicious"
else:
level = "malicious"

taxonomies.append(self.build_taxonomy(level, "WOT", "Category", "\"{}\"".format(value)))
return {"taxonomies": taxonomies}

def run(self):
if self.service == 'query':
if self.data_type in ['domain', 'fqdn']:
data = self.getParam('data', None, 'Data is missing')
r = self.WOT_checkurl(data)
if data in r.keys():
info = r[data]
r_dict = {}
if '0' in info.keys():
r_dict['Trustworthiness'] = {}
r_dict['Trustworthiness']['Reputation'] = self.points_to_verbose(info['0'][0])
r_dict['Trustworthiness']['Confidence'] = self.points_to_verbose(info['0'][1])
if '4' in info.keys():
r_dict['Child_Safety'] = {}
r_dict['Child_Safety']['Reputation'] = self.points_to_verbose(info['4'][0])
r_dict['Child_Safety']['Confidence'] = self.points_to_verbose(info['4'][1])
if 'blacklists' in info.keys():
r_dict['Blacklists'] = [(k, datetime.datetime.fromtimestamp(v).strftime('%Y-%m-%d %H:%M:%S') ) for k,v in info['blacklists'].items()]
if 'categories' in info.keys():
r_dict['Categories'] = [self.categories[x] for x in list(info['categories'].keys())]
r_dict['Categories Identifier'] = list(info['categories'].keys())
self.report(r_dict)
else:
self.error('Invalid data type')
else:
self.error('Invalid service')

if __name__ == '__main__':
WOTAnalyzer().run()
1 change: 1 addition & 0 deletions analyzers/WOT/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
cortexutils
15 changes: 15 additions & 0 deletions analyzers/Yeti/Yeti.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"name": "Yeti",
"author": "CERT-BDF",
"license": "AGPL-V3",
"url": "https://github.com/CERT/cortex-analyzers",
"version": "1.0",
"baseConfig": "Yeti",
"config": {
"check_tlp": false,
"max_tlp": 3
},
"description": "Fetch observable details from a Yeti",
"dataTypeList": ["domain", "fqdn", "ip", "url", "hash"],
"command": "Yeti/yeti.py"
}
2 changes: 2 additions & 0 deletions analyzers/Yeti/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
cortexutils
git+https://github.com/yeti-platform/pyeti
Loading

0 comments on commit 4e6cd23

Please sign in to comment.