diff --git a/analyzer/windows/dll/CAPE.dll b/analyzer/windows/dll/CAPE.dll index dd3220eeb..e69a4c358 100644 Binary files a/analyzer/windows/dll/CAPE.dll and b/analyzer/windows/dll/CAPE.dll differ diff --git a/analyzer/windows/dll/CAPE_x64.dll b/analyzer/windows/dll/CAPE_x64.dll index 1482f5f27..f6a710956 100644 Binary files a/analyzer/windows/dll/CAPE_x64.dll and b/analyzer/windows/dll/CAPE_x64.dll differ diff --git a/analyzer/windows/dll/Sedreco.dll b/analyzer/windows/dll/Sedreco.dll index 6ac6fb02f..fa27f97af 100644 Binary files a/analyzer/windows/dll/Sedreco.dll and b/analyzer/windows/dll/Sedreco.dll differ diff --git a/analyzer/windows/dll/Sedreco_x64.dll b/analyzer/windows/dll/Sedreco_x64.dll index eea8cf065..2b750e822 100644 Binary files a/analyzer/windows/dll/Sedreco_x64.dll and b/analyzer/windows/dll/Sedreco_x64.dll differ diff --git a/data/yara/CAPE/Cryptoshield.yar b/data/yara/CAPE/Cryptoshield.yar new file mode 100644 index 000000000..1e2c3affd --- /dev/null +++ b/data/yara/CAPE/Cryptoshield.yar @@ -0,0 +1,13 @@ +rule Cryptoshield +{ + meta: + author = "kevoreilly" + description = "Cryptoshield Payload" + cape_type = "Cryptoshield Payload" + strings: + $a1 = "CRYPTOSHIELD." wide + $a2 = "Click on Yes in the next window for restore work explorer" wide + $a3 = "r_sp@india.com - SUPPORT" + condition: + uint16(0) == 0x5A4D and (all of ($a*)) +} diff --git a/data/yara/CAPE/Loki.yar b/data/yara/CAPE/Loki.yar new file mode 100644 index 000000000..d11baec15 --- /dev/null +++ b/data/yara/CAPE/Loki.yar @@ -0,0 +1,12 @@ +rule Loki +{ + meta: + author = "kevoreilly" + description = "Loki Payload" + cape_type = "Loki Payload" + strings: + $a1 = "DlRycq1tP2vSeaogj5bEUFzQiHT9dmKCn6uf7xsOY0hpwr43VINX8JGBAkLMZW" + $a2 = "last_compatible_version" + condition: + uint16(0) == 0x5A4D and (all of ($a*)) +} diff --git a/modules/processing/CAPE.py b/modules/processing/CAPE.py index 4cbae49e0..953ef8dbf 100644 --- a/modules/processing/CAPE.py +++ b/modules/processing/CAPE.py @@ -433,7 +433,8 @@ def process_file(self, file_path, CAPE_output, append_file): cape_config["cape_name"] = format(cape_name) if not "cape" in self.results: #self.results["cape"] = [] - self.results["cape"] = cape_name + if cape_name != "UPX": + self.results["cape"] = cape_name #if cape_name not in self.results["cape"]: # self.results["cape"].append(cape_name) diff --git a/modules/reporting/mongodb.py b/modules/reporting/mongodb.py index 98b6ac3d8..8f1502663 100644 --- a/modules/reporting/mongodb.py +++ b/modules/reporting/mongodb.py @@ -4,11 +4,18 @@ import logging import os +import json +import six +import imagehash +import zlib from lib.cuckoo.common.abstracts import Report from lib.cuckoo.common.exceptions import CuckooDependencyError from lib.cuckoo.common.exceptions import CuckooReportError from lib.cuckoo.common.objects import File +from bson import ObjectId +from bson.binary import Binary +from PIL import Image try: from pymongo import MongoClient @@ -19,6 +26,46 @@ log = logging.getLogger(__name__) +class JSONEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, ObjectId): + return str(o) + return json.JSONEncoder.default(self, o) + +def deduplicate_images(userpath, hashfunc = imagehash.average_hash): + """ + remove duplicate images from a path + :userpath: path of the image files + :hashfunc: type of image hashing method + """ + def is_image(filename): + img_ext = [".jpg", ".png", ".gif", ".bmp", ".gif"] + f = filename.lower() + return any(f.endswith(ext) for ext in img_ext) + + #log.debug("Deduplicate images...{}".format(userpath)) + """ + Available hashs functions: + ahash: Average hash + phash: Perceptual hash + dhash: Difference hash + whash-haar: Haar wavelet hash + whash-db4: Daubechies wavelet hash + """ + dd_img_set = [] + + image_filenames = [os.path.join(userpath, path) for path in os.listdir(userpath) if is_image(path)] + images = {} + for img in sorted(image_filenames): + hash = hashfunc(Image.open(img)) + images[hash] = images.get(hash, []) + [img] + for k, img_list in six.iteritems(images): + #if len(img_list) > 1: + dd_img_set.append(os.path.basename(img_list[0])) + #print(",".join(img_list)) + dd_img_set.sort() + return dd_img_set + class MongoDB(Report): """Stores report in MongoDB.""" order = 9999 @@ -43,6 +90,9 @@ def connect(self): raise CuckooReportError("Cannot connect to MongoDB") def debug_dict_size(self, dct): + if type(dct) == list: + dct = dct[0] + totals = dict((k, 0) for k in dct) def walk(root, key, val): if isinstance(val, dict): @@ -93,8 +143,23 @@ def run(self, results): # Add screenshot paths report["shots"] = [] + report["deduplicated_shots"] = [] + + hashmethod = "whash-db4" + if hashmethod == 'ahash': + hashfunc = imagehash.average_hash + elif hashmethod == 'phash': + hashfunc = imagehash.phash + elif hashmethod == 'dhash': + hashfunc = imagehash.dhash + elif hashmethod == 'whash-haar': + hashfunc = imagehash.whash + elif hashmethod == 'whash-db4': + hashfunc = lambda img: imagehash.whash(img, mode='db4') # sg_052017 + shots_path = os.path.join(self.analysis_path, "shots") if os.path.exists(shots_path): + report["deduplicated_shots"] = [f.replace(".jpg","") for f in deduplicate_images(userpath=shots_path, hashfunc=hashfunc)] #sg_052017 shots = [shot for shot in os.listdir(shots_path) if shot.endswith(".jpg")] for shot_file in sorted(shots): @@ -176,6 +241,31 @@ def run(self, results): # Note: Silently ignores the creation if the index already exists. self.db.analysis.create_index("info.id", background=True) + # In case data exceeds mongodb limit of 16MB, + # be prepared to save into a json file + save_json_analyses = os.path.join(self.analysis_path, "analyses.json") + json_data = JSONEncoder().encode(report) + + # Compress CAPE output + if "CAPE" in report: + cape_json = json.dumps(report["CAPE"]).encode('utf8') + compressed_CAPE = zlib.compress(cape_json) + report["CAPE"] = Binary(compressed_CAPE) + #log.debug("CAPE output size before compression: {}, after compression: {}".format(len(cape_json), len(compressed_CAPE))) + + # Compress behavioural analysis (enhanced & summary) + if "enhanced" in report["behavior"]: + compressed_behavior_enhanced = zlib.compress(JSONEncoder().encode(report["behavior"]["enhanced"]).encode('utf8')) + report["behavior"]["enhanced"] = Binary(compressed_behavior_enhanced) + if "summary" in report["behavior"]: + compressed_behavior_summary = zlib.compress(JSONEncoder().encode(report["behavior"]["summary"]).encode('utf8')) + report["behavior"]["summary"] = Binary(compressed_behavior_summary) + + # Compress virustotal results + if "virustotal" in report: + compressed_vt = zlib.compress(JSONEncoder().encode(report["virustotal"]).encode('utf8')) + report["virustotal"] = Binary(compressed_vt) + # Store the report and retrieve its object id. try: self.db.analysis.save(report) @@ -193,6 +283,15 @@ def run(self, results): while error_saved: log.warn("results['%s']['%s'] deleted due to >16MB size (%dMB)" % (parent_key, child_key, int(psize) / 1048576)) + + if type(report) == list: + report = report[0] + + with open(save_json_analyses, "w") as f: + f.write(json_data) + log.warn("results['%s']['%s'](%dMB) > saved as %s" % + (parent_key, child_key, int(psize) / 1048576, save_json_analyses)) + del report[parent_key][child_key] try: self.db.analysis.save(report) diff --git a/modules/reporting/submitCAPE.py b/modules/reporting/submitCAPE.py index 249d4800d..988980b82 100644 --- a/modules/reporting/submitCAPE.py +++ b/modules/reporting/submitCAPE.py @@ -262,6 +262,10 @@ def run(self, results): self.task_options=','.join(self.task_options_stack) if package: + self.task_custom="Parent_Task_ID:%s" % report["info"]["id"] + if report["info"].has_key("custom") and report["info"]["custom"]: + self.task_custom = "%s Parent_Custom:%s" % (self.task_custom,report["info"]["custom"]) + task_id = db.add_path(file_path=self.task["target"], package=package, timeout=self.task["timeout"], @@ -272,7 +276,8 @@ def run(self, results): memory=self.task["memory"], enforce_timeout=self.task["enforce_timeout"], clock=None, - tags=None) + tags=None, + parent_id=int(report["info"]["id"])) if task_id: log.info(u"CAPE detection on file \"{0}\": {1} - added as CAPE task with ID {2}".format(self.task["target"], package, task_id)) else: @@ -281,6 +286,11 @@ def run(self, results): else: # nothing submitted, only 'dumpers' left if parent_package == "Extraction" or parent_package == "Injection" or parent_package == "Compression": return + + self.task_custom="Parent_Task_ID:%s" % report["info"]["id"] + if report["info"].has_key("custom") and report["info"]["custom"]: + self.task_custom = "%s Parent_Custom:%s" % (self.task_custom,report["info"]["custom"]) + for dumper in detections: task_id = db.add_path(file_path=self.task["target"], package=dumper, @@ -292,7 +302,8 @@ def run(self, results): memory=self.task["memory"], enforce_timeout=self.task["enforce_timeout"], clock=None, - tags=None) + tags=None, + parent_id=int(report["info"]["id"])) if task_id: log.info(u"CAPE detection on file \"{0}\": {1} - added as CAPE task with ID {2}".format(self.task["target"], dumper, task_id)) else: diff --git a/web/analysis/views.py b/web/analysis/views.py index 7d576e733..167b66d03 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -14,7 +14,10 @@ import json import zipfile import tempfile +import zlib +from bson.binary import Binary +from bson.binary import Binary from django.conf import settings from wsgiref.util import FileWrapper from django.http import HttpResponse, StreamingHttpResponse @@ -284,6 +287,7 @@ def index(request, page=1): paging["urls_page_range"] = urls_pages paging["pcaps_page_range"] = pcaps_pages paging["current_page"] = page + analyses_files.sort(key=lambda x: x["id"], reverse=True) return render(request, "analysis/index.html", {"files": analyses_files, "urls": analyses_urls, "pcaps": analyses_pcaps, "paging": paging, "config": enabledconf}) @@ -741,6 +745,37 @@ def report(request, task_id): return render(request, "error.html", {"error": "The specified analysis does not exist"}) + children = 0 + # decompress CAPE data + if "CAPE" in report: + try: + report["CAPE"] = json.loads(zlib.decompress(report["CAPE"])) + session = db.Session() + children = [c for c in session.query(Task.id,Task.package).filter(Task.parent_id == task_id)] + except: + # backward compatability + pass + + # decompress behaviour analysis (enhanced & summary) + if "enhanced" in report["behavior"]: + try: + report["behavior"]["enhanced"] = json.loads(zlib.decompress(report["behavior"]["enhanced"])) + except: + # backward compatibility + pass + if "summary" in report["behavior"]: + try: + report["behavior"]["summary"] = json.loads(zlib.decompress(report["behavior"]["summary"])) + except: + pass + + # decompress virustotal + if "virustotal" in report: + try: + report["virustotal"] = json.loads(zlib.decompress(report["virustotal"])) + except: + pass + if settings.MOLOCH_ENABLED and "suricata" in report: suricata = report["suricata"] if settings.MOLOCH_BASE[-1] != "/": @@ -805,6 +840,7 @@ def report(request, task_id): return render(request, "analysis/report.html", {"analysis": report, + "children" : children, "domainlookups": domainlookups, "iplookups": iplookups, "similar": similarinfo, diff --git a/web/templates/analysis/overview/_screenshots.html b/web/templates/analysis/overview/_screenshots.html index 52b45498d..712cf07b4 100644 --- a/web/templates/analysis/overview/_screenshots.html +++ b/web/templates/analysis/overview/_screenshots.html @@ -1,6 +1,14 @@

Screenshots

- {% if analysis.shots %} + {% if analysis.deduplicated_shots %} +
+ {% for shot in analysis.deduplicated_shots %} + + + + {% endfor %} +
+ {% elif analysis.shots %}
{% for shot in analysis.shots %} diff --git a/web/templates/analysis/overview/index.html b/web/templates/analysis/overview/index.html index 33018e26f..36a068381 100644 --- a/web/templates/analysis/overview/index.html +++ b/web/templates/analysis/overview/index.html @@ -1,4 +1,11 @@ {% include "analysis/overview/_info.html" %} +{% if children|length > 0 %} +
+

Related CAPE Tasks

+ {% for child in children %} + Task
#{{ child.0 }}: {{ child.1 }}
+ {% endfor %} +{% endif %}
{% if analysis.info.category != "pcap" %} {% if analysis.info.category == "file" and analysis.target %} @@ -7,7 +14,7 @@ {% include "analysis/overview/_url.html" %} {% endif %} {% endif %} -
+
{% include "analysis/overview/_signatures.html" %} {% if analysis.info.category != "pcap" %} {% if analysis.usage %}