Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Quick updates #1262

Merged
merged 13 commits into from
Oct 17, 2024
37 changes: 33 additions & 4 deletions analyzers/AbuseIPDB/abuseipdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ class AbuseIPDBAnalyzer(Analyzer):
def extract_abuse_ipdb_category(category_number):
# Reference: https://www.abuseipdb.com/categories
mapping = {
"1": "DNS Compromise",
"2": "DNS Poisoning",
"3": "Fraud Orders",
"4": "DDOS Attack",
"5": "FTP Brute-Force",
Expand All @@ -36,7 +38,7 @@ def extract_abuse_ipdb_category(category_number):
"22": "SSH",
"23": "IoT Targeted",
}
return mapping.get(str(category_number), 'unknown category')
return mapping.get(str(category_number), 'Unknown Category')

def run(self):

Expand Down Expand Up @@ -76,11 +78,38 @@ def run(self):
except Exception as e:
self.unexpectedError(e)


def summary(self, raw):
taxonomies = []
taxonomies = [] # level, namespace, predicate, value

is_whitelisted = False
data = {}
if raw and 'values' in raw:
data = raw['values'][0]['data']
else:
return {'taxonomies': []}

if data.get('isWhitelisted', False):
is_whitelisted = True
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Is Whitelist', 'True'))

if data.get('isTor', False):
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Is Tor', 'True'))

if raw and 'values' in raw and raw['values'][0]['data']['totalReports'] > 0 :
taxonomies.append(self.build_taxonomy('malicious', 'AbuseIPDB', 'Records', raw['values'][0]['data']['totalReports']))
if 'usageType' in data:
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Usage Type', data['usageType']))

if 'abuseConfidenceScore' in data:
if data['abuseConfidenceScore'] > 0:
taxonomies.append(self.build_taxonomy('suspicious', 'AbuseIPDB', 'Abuse Confidence Score', data['abuseConfidenceScore']))
else:
taxonomies.append(self.build_taxonomy('safe', 'AbuseIPDB', 'Abuse Confidence Score', 0))

if data['totalReports'] > 0 :
if is_whitelisted:
taxonomies.append(self.build_taxonomy('info', 'AbuseIPDB', 'Records', data['totalReports']))
else:
taxonomies.append(self.build_taxonomy('malicious', 'AbuseIPDB', 'Records', data['totalReports']))
else:
taxonomies.append(self.build_taxonomy('safe', 'AbuseIPDB', 'Records', 0))

Expand Down
18 changes: 9 additions & 9 deletions analyzers/Abuse_Finder/abusefinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
class AbuseFinderAnalyzer(Analyzer):

def summary(self, raw):

taxonomies = []
if raw['abuse_finder'] and raw['abuse_finder'].get('abuse'):
for abuse in raw['abuse_finder']['abuse']:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", abuse))
else:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", "None"))
return {"taxonomies": taxonomies}

return {}
try:
if raw and raw['abuse_finder'].get('abuse'):
for abuse in raw['abuse_finder']['abuse']:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", abuse))
else:
taxonomies.append(self.build_taxonomy("info", "Abuse_Finder", "Address", "None"))
except:
pass
return {"taxonomies": taxonomies}

def abuse(self):
if self.data_type == "ip":
Expand Down
1 change: 1 addition & 0 deletions analyzers/Abuse_Finder/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
cortexutils
git+https://github.com/monoidic/ipwhois.git
abuse_finder
future
3 changes: 2 additions & 1 deletion analyzers/Censys/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
cortexutils
censys==1.1.1
argcomplete
censys==2.2.11
23 changes: 12 additions & 11 deletions analyzers/MISPWarningLists/mispwarninglists.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,18 +161,19 @@ def run(self):
"SELECT list_name, list_version, concat(subdomain, '.', domain, '.', tld) as value FROM warninglists WHERE (subdomain = '%s' or subdomain = '*') and domain = '%s' and tld = '%s'"
% (subdomain, domain, tld)
)
values = self.engine.execute(sql)
with self.engine.connect() as conn:
values = conn.execute(db.text(sql))
if values.rowcount > 0:
for row in values:
results.append(
{
key: value
for (key, value) in zip(
["list_name", "list_version", "value"], row
)
}
)
self.engine.dispose()
if values.rowcount > 0:
for row in values:
results.append(
{
key: value
for (key, value) in zip(
["list_name", "list_version", "value"], row
)
}
)
self.report({"results": results, "mode": "db", "is_uptodate": "N/A"})

def summary(self, raw):
Expand Down
7 changes: 5 additions & 2 deletions analyzers/MISPWarningLists/warninglists_create_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
conn_string = "<insert_postgres_conn_strin>"
warninglists_path = "misp-warninglists/**/list.json"

engine = create_engine(conn_string, use_batch_mode=True)
engine = create_engine(conn_string))
conn = engine.connect()

# UPDATE TLD FROM MOZILLA
Expand Down Expand Up @@ -148,7 +148,10 @@


# CHECK IF OLD RELEASE ARE IN DB
s = select([warninglists.c.list_name, warninglists.c.list_version]).distinct()
try:
s = select([warninglists.c.list_name, warninglists.c.list_version]).distinct()
except sqlalchemy.exc.ArgumentError:
s = select(warninglists.c.list_name, warninglists.c.list_version).distinct()
last_versions = [x for x in conn.execute(s)]
print(f"{len(last_versions)} list already available in db")

Expand Down
4 changes: 2 additions & 2 deletions analyzers/MalwareClustering/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ requests
pyimpfuzzy==0.5
# py2neo is EOL and older versions were deleted from pipy https://github.com/neo4j-contrib/py2neo
py2neo==2021.2.4
apiscout==1.1.5
python-magic==0.4.22
apiscout
python-magic==0.4.27
2 changes: 1 addition & 1 deletion analyzers/Malwares/malwares_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class Api():

def __init__(self, api_key=None):
self.api_key = api_key
self.base = 'https://public.api.malwares.com/v3/'
self.base = 'https://public.api.ctx.io/api/v22/'
self.version = 2
if api_key is None:
raise ApiError("You must supply a valid Malwares API key.")
Expand Down
6 changes: 5 additions & 1 deletion analyzers/ProofPoint/proofpoint_lookup.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,11 @@ def run(self):
filename = self.get_param('attachment.name', 'noname.ext')
filepath = self.get_param('file', None, 'File is missing')
with open(filepath, "rb") as f:
digest = hashlib.file_digest(f, "sha256")
try:
digest = hashlib.file_digest(f, "sha256")
except AttributeError:
# python 3.9
digest = hashlib.sha256(open(filepath, 'r').read())
sha256 = digest.hexdigest()
elif self.data_type == 'hash' and len(self.get_data()) == 64:
sha256 = self.get_data()
Expand Down
30 changes: 16 additions & 14 deletions analyzers/Virusshare/getHashes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


display_usage() {
echo "getHashes v0.2"
echo "getHashes v0.3"
echo " Fetch all Virusshare.com hashes"
echo -e "\n Usage: $0 <path> \n"
}
Expand All @@ -20,17 +20,19 @@ if [ ! -d $1 ]; then

fi

cd $1
for u in `curl https://virusshare.com/hashes.4n6 | grep -E "VirusShare_[0-9]{5}\.md5" | c\
ut -d\" -f2 | cut -d\/ -f2`
WD=$1
declare -a base_urls=($(printf 'url=https://virusshare.com/hashfiles/%0.s\n' {1..1}))
declare -a base_outs=($(printf 'output=./%0.s\n' {1..1}))

pushd $WD
while mapfile -t -n 8 ary && ((${#ary[@]}));
do
echo $u
if [ -e $1/$u ]; then
echo "File already downloaded"
else
wget https://virusshare.com/hashes/$u
sleep 3
fi

done | tee -a ../$0.log
cd ..
rm -f ../config
IFS=,
eval echo "${base_urls[*]}"{"${ary[*]}"} | tr " " "\n" >> ../config
eval echo "${base_outs[*]}"{"${ary[*]}"} | tr " " "\n" >> ../config
curl -s -N --parallel --parallel-immediate --parallel-max 8 --config config | tee -a ../$0.log
sleep 3
done <<< `curl -s -L https://virusshare.com/hashes.4n6 | grep -E "VirusShare_[0-9]{5}\.md5" | cut -d\" -f2 | cut -d\/ -f2`
popd