Skip to content

Commit

Permalink
Added links to child jobs in interface (thanks to marirs).Bugfix in
Browse files Browse the repository at this point in the history
Sedreco package. Sigs for Loki and Cryptoshield payloads.
  • Loading branch information
kevoreilly committed Jun 13, 2017
1 parent d2a4769 commit 54a4bc0
Show file tree
Hide file tree
Showing 12 changed files with 192 additions and 5 deletions.
Binary file modified analyzer/windows/dll/CAPE.dll
Binary file not shown.
Binary file modified analyzer/windows/dll/CAPE_x64.dll
Binary file not shown.
Binary file modified analyzer/windows/dll/Sedreco.dll
Binary file not shown.
Binary file modified analyzer/windows/dll/Sedreco_x64.dll
Binary file not shown.
13 changes: 13 additions & 0 deletions data/yara/CAPE/Cryptoshield.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
rule Cryptoshield
{
meta:
author = "kevoreilly"
description = "Cryptoshield Payload"
cape_type = "Cryptoshield Payload"
strings:
$a1 = "CRYPTOSHIELD." wide
$a2 = "Click on Yes in the next window for restore work explorer" wide
$a3 = "[email protected] - SUPPORT"
condition:
uint16(0) == 0x5A4D and (all of ($a*))
}
12 changes: 12 additions & 0 deletions data/yara/CAPE/Loki.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
rule Loki
{
meta:
author = "kevoreilly"
description = "Loki Payload"
cape_type = "Loki Payload"
strings:
$a1 = "DlRycq1tP2vSeaogj5bEUFzQiHT9dmKCn6uf7xsOY0hpwr43VINX8JGBAkLMZW"
$a2 = "last_compatible_version"
condition:
uint16(0) == 0x5A4D and (all of ($a*))
}
3 changes: 2 additions & 1 deletion modules/processing/CAPE.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,8 @@ def process_file(self, file_path, CAPE_output, append_file):
cape_config["cape_name"] = format(cape_name)
if not "cape" in self.results:
#self.results["cape"] = []
self.results["cape"] = cape_name
if cape_name != "UPX":
self.results["cape"] = cape_name
#if cape_name not in self.results["cape"]:
# self.results["cape"].append(cape_name)

Expand Down
99 changes: 99 additions & 0 deletions modules/reporting/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,18 @@

import logging
import os
import json
import six
import imagehash
import zlib

from lib.cuckoo.common.abstracts import Report
from lib.cuckoo.common.exceptions import CuckooDependencyError
from lib.cuckoo.common.exceptions import CuckooReportError
from lib.cuckoo.common.objects import File
from bson import ObjectId
from bson.binary import Binary
from PIL import Image

try:
from pymongo import MongoClient
Expand All @@ -19,6 +26,46 @@

log = logging.getLogger(__name__)

class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return json.JSONEncoder.default(self, o)

def deduplicate_images(userpath, hashfunc = imagehash.average_hash):
"""
remove duplicate images from a path
:userpath: path of the image files
:hashfunc: type of image hashing method
"""
def is_image(filename):
img_ext = [".jpg", ".png", ".gif", ".bmp", ".gif"]
f = filename.lower()
return any(f.endswith(ext) for ext in img_ext)

#log.debug("Deduplicate images...{}".format(userpath))
"""
Available hashs functions:
ahash: Average hash
phash: Perceptual hash
dhash: Difference hash
whash-haar: Haar wavelet hash
whash-db4: Daubechies wavelet hash
"""
dd_img_set = []

image_filenames = [os.path.join(userpath, path) for path in os.listdir(userpath) if is_image(path)]
images = {}
for img in sorted(image_filenames):
hash = hashfunc(Image.open(img))
images[hash] = images.get(hash, []) + [img]
for k, img_list in six.iteritems(images):
#if len(img_list) > 1:
dd_img_set.append(os.path.basename(img_list[0]))
#print(",".join(img_list))
dd_img_set.sort()
return dd_img_set

class MongoDB(Report):
"""Stores report in MongoDB."""
order = 9999
Expand All @@ -43,6 +90,9 @@ def connect(self):
raise CuckooReportError("Cannot connect to MongoDB")

def debug_dict_size(self, dct):
if type(dct) == list:
dct = dct[0]

totals = dict((k, 0) for k in dct)
def walk(root, key, val):
if isinstance(val, dict):
Expand Down Expand Up @@ -93,8 +143,23 @@ def run(self, results):

# Add screenshot paths
report["shots"] = []
report["deduplicated_shots"] = []

hashmethod = "whash-db4"
if hashmethod == 'ahash':
hashfunc = imagehash.average_hash
elif hashmethod == 'phash':
hashfunc = imagehash.phash
elif hashmethod == 'dhash':
hashfunc = imagehash.dhash
elif hashmethod == 'whash-haar':
hashfunc = imagehash.whash
elif hashmethod == 'whash-db4':
hashfunc = lambda img: imagehash.whash(img, mode='db4') # sg_052017

shots_path = os.path.join(self.analysis_path, "shots")
if os.path.exists(shots_path):
report["deduplicated_shots"] = [f.replace(".jpg","") for f in deduplicate_images(userpath=shots_path, hashfunc=hashfunc)] #sg_052017
shots = [shot for shot in os.listdir(shots_path)
if shot.endswith(".jpg")]
for shot_file in sorted(shots):
Expand Down Expand Up @@ -176,6 +241,31 @@ def run(self, results):
# Note: Silently ignores the creation if the index already exists.
self.db.analysis.create_index("info.id", background=True)

# In case data exceeds mongodb limit of 16MB,
# be prepared to save into a json file
save_json_analyses = os.path.join(self.analysis_path, "analyses.json")
json_data = JSONEncoder().encode(report)

# Compress CAPE output
if "CAPE" in report:
cape_json = json.dumps(report["CAPE"]).encode('utf8')
compressed_CAPE = zlib.compress(cape_json)
report["CAPE"] = Binary(compressed_CAPE)
#log.debug("CAPE output size before compression: {}, after compression: {}".format(len(cape_json), len(compressed_CAPE)))

# Compress behavioural analysis (enhanced & summary)
if "enhanced" in report["behavior"]:
compressed_behavior_enhanced = zlib.compress(JSONEncoder().encode(report["behavior"]["enhanced"]).encode('utf8'))
report["behavior"]["enhanced"] = Binary(compressed_behavior_enhanced)
if "summary" in report["behavior"]:
compressed_behavior_summary = zlib.compress(JSONEncoder().encode(report["behavior"]["summary"]).encode('utf8'))
report["behavior"]["summary"] = Binary(compressed_behavior_summary)

# Compress virustotal results
if "virustotal" in report:
compressed_vt = zlib.compress(JSONEncoder().encode(report["virustotal"]).encode('utf8'))
report["virustotal"] = Binary(compressed_vt)

# Store the report and retrieve its object id.
try:
self.db.analysis.save(report)
Expand All @@ -193,6 +283,15 @@ def run(self, results):
while error_saved:
log.warn("results['%s']['%s'] deleted due to >16MB size (%dMB)" %
(parent_key, child_key, int(psize) / 1048576))

if type(report) == list:
report = report[0]

with open(save_json_analyses, "w") as f:
f.write(json_data)
log.warn("results['%s']['%s'](%dMB) > saved as %s" %
(parent_key, child_key, int(psize) / 1048576, save_json_analyses))

del report[parent_key][child_key]
try:
self.db.analysis.save(report)
Expand Down
15 changes: 13 additions & 2 deletions modules/reporting/submitCAPE.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,10 @@ def run(self, results):
self.task_options=','.join(self.task_options_stack)

if package:
self.task_custom="Parent_Task_ID:%s" % report["info"]["id"]
if report["info"].has_key("custom") and report["info"]["custom"]:
self.task_custom = "%s Parent_Custom:%s" % (self.task_custom,report["info"]["custom"])

task_id = db.add_path(file_path=self.task["target"],
package=package,
timeout=self.task["timeout"],
Expand All @@ -272,7 +276,8 @@ def run(self, results):
memory=self.task["memory"],
enforce_timeout=self.task["enforce_timeout"],
clock=None,
tags=None)
tags=None,
parent_id=int(report["info"]["id"]))
if task_id:
log.info(u"CAPE detection on file \"{0}\": {1} - added as CAPE task with ID {2}".format(self.task["target"], package, task_id))
else:
Expand All @@ -281,6 +286,11 @@ def run(self, results):
else: # nothing submitted, only 'dumpers' left
if parent_package == "Extraction" or parent_package == "Injection" or parent_package == "Compression":
return

self.task_custom="Parent_Task_ID:%s" % report["info"]["id"]
if report["info"].has_key("custom") and report["info"]["custom"]:
self.task_custom = "%s Parent_Custom:%s" % (self.task_custom,report["info"]["custom"])

for dumper in detections:
task_id = db.add_path(file_path=self.task["target"],
package=dumper,
Expand All @@ -292,7 +302,8 @@ def run(self, results):
memory=self.task["memory"],
enforce_timeout=self.task["enforce_timeout"],
clock=None,
tags=None)
tags=None,
parent_id=int(report["info"]["id"]))
if task_id:
log.info(u"CAPE detection on file \"{0}\": {1} - added as CAPE task with ID {2}".format(self.task["target"], dumper, task_id))
else:
Expand Down
36 changes: 36 additions & 0 deletions web/analysis/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
import json
import zipfile
import tempfile
import zlib

from bson.binary import Binary
from bson.binary import Binary
from django.conf import settings
from wsgiref.util import FileWrapper
from django.http import HttpResponse, StreamingHttpResponse
Expand Down Expand Up @@ -284,6 +287,7 @@ def index(request, page=1):
paging["urls_page_range"] = urls_pages
paging["pcaps_page_range"] = pcaps_pages
paging["current_page"] = page
analyses_files.sort(key=lambda x: x["id"], reverse=True)
return render(request, "analysis/index.html",
{"files": analyses_files, "urls": analyses_urls, "pcaps": analyses_pcaps,
"paging": paging, "config": enabledconf})
Expand Down Expand Up @@ -741,6 +745,37 @@ def report(request, task_id):
return render(request, "error.html",
{"error": "The specified analysis does not exist"})

children = 0
# decompress CAPE data
if "CAPE" in report:
try:
report["CAPE"] = json.loads(zlib.decompress(report["CAPE"]))
session = db.Session()
children = [c for c in session.query(Task.id,Task.package).filter(Task.parent_id == task_id)]
except:
# backward compatability
pass

# decompress behaviour analysis (enhanced & summary)
if "enhanced" in report["behavior"]:
try:
report["behavior"]["enhanced"] = json.loads(zlib.decompress(report["behavior"]["enhanced"]))
except:
# backward compatibility
pass
if "summary" in report["behavior"]:
try:
report["behavior"]["summary"] = json.loads(zlib.decompress(report["behavior"]["summary"]))
except:
pass

# decompress virustotal
if "virustotal" in report:
try:
report["virustotal"] = json.loads(zlib.decompress(report["virustotal"]))
except:
pass

if settings.MOLOCH_ENABLED and "suricata" in report:
suricata = report["suricata"]
if settings.MOLOCH_BASE[-1] != "/":
Expand Down Expand Up @@ -805,6 +840,7 @@ def report(request, task_id):

return render(request, "analysis/report.html",
{"analysis": report,
"children" : children,
"domainlookups": domainlookups,
"iplookups": iplookups,
"similar": similarinfo,
Expand Down
10 changes: 9 additions & 1 deletion web/templates/analysis/overview/_screenshots.html
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
<section id="screenshots">
<h4>Screenshots</h4>
{% if analysis.shots %}
{% if analysis.deduplicated_shots %}
<div>
{% for shot in analysis.deduplicated_shots %}
<a data-lightbox="screenshot" href="{% url "file" "screenshot" analysis.info.id shot %}">
<img class="opaque" src="{% url "file" "screenshot" analysis.info.id shot %}" style="height: 120px;" />
</a>
{% endfor %}
</div>
{% elif analysis.shots %}
<div>
{% for shot in analysis.shots %}
<a data-lightbox="screenshot" href="{% url "file" "screenshot" analysis.info.id shot %}">
Expand Down
9 changes: 8 additions & 1 deletion web/templates/analysis/overview/index.html
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
{% include "analysis/overview/_info.html" %}
{% if children|length > 0 %}
<hr />
<h4>Related CAPE Tasks</h4>
{% for child in children %}
Task <a href="/submit/status/{{ child.0 }}/">#{{ child.0 }}: {{ child.1 }}</a><br/>
{% endfor %}
{% endif %}
<hr />
{% if analysis.info.category != "pcap" %}
{% if analysis.info.category == "file" and analysis.target %}
Expand All @@ -7,7 +14,7 @@
{% include "analysis/overview/_url.html" %}
{% endif %}
{% endif %}
<hr />
<hr/>
{% include "analysis/overview/_signatures.html" %}
{% if analysis.info.category != "pcap" %}
{% if analysis.usage %}
Expand Down

0 comments on commit 54a4bc0

Please sign in to comment.