diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index b6d21ddeb..84158dbf6 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -32,7 +32,7 @@ from lib.common.defines import EVENT_MODIFY_STATE, SECURITY_DESCRIPTOR, SECURITY_ATTRIBUTES, SYSTEMTIME from lib.common.exceptions import CuckooError, CuckooPackageError from lib.common.hashing import hash_file -from lib.common.results import upload_to_host +from lib.common.results import upload_to_host, upload_to_host_with_metadata from lib.core.config import Config from lib.core.packages import choose_package from lib.core.privileges import grant_debug_privilege @@ -49,6 +49,7 @@ FILES_LIST = [] DUMPED_LIST = [] CAPE_DUMPED_LIST = [] +PROC_DUMPED_LIST = [] UPLOADPATH_LIST = [] PROCESS_LIST = [] PROTECTED_PATH_LIST = [] @@ -205,8 +206,18 @@ def cape_file(file_path): upload_path = CAPE_DUMPED_LIST[idx] else: upload_path = os.path.join("CAPE", sha256) + + if os.path.exists(file_path + "_info.txt"): + metadata = [line.strip() for line in open(file_path + "_info.txt")] + metastring = "" + for line in metadata: + metastring = metastring + line + ',' + else: + log.warning("No metadata file for CAPE dump at path \"%s\"", file_path.encode("utf-8", "replace")) + metastring = file_path + try: - upload_to_host(file_path, upload_path, duplicate) + upload_to_host_with_metadata(file_path, upload_path, metastring) if not duplicate: CAPE_DUMPED_LIST.append(sha256) CAPE_DUMPED_LIST.append(upload_path) @@ -215,6 +226,51 @@ def cape_file(file_path): log.error("Unable to upload CAPE file at path \"%s\": %s", file_path.encode("utf-8", "replace"), e) +def proc_dump(file_path): + """Create a copy of the given process dump file path.""" + duplicate = False + try: + if os.path.exists(file_path): + sha256 = hash_file(hashlib.sha256, file_path) + if sha256 in PROC_DUMPED_LIST: + # The file was already dumped, just upload the alternate name for it. + duplicate = True + else: + log.warning("Process dump at path \"%s\" does not exist, skip.", + file_path.encode("utf-8", "replace")) + return + except IOError as e: + log.warning("Unable to access process dump at path \"%s\"", file_path.encode("utf-8", "replace")) + return + + if os.path.isdir(file_path): + return + file_name = os.path.basename(file_path) + if duplicate: + idx = PROC_DUMPED_LIST.index(sha256) + upload_path = PROC_DUMPED_LIST[idx] + else: + upload_path = os.path.join("procdump", sha256) + + if os.path.exists(file_path + "_info.txt"): + metadata = [line.strip() for line in open(file_path + "_info.txt")] + metastring = "" + for line in metadata: + metastring = metastring + line + ',' + else: + log.warning("No metadata file for process dump at path \"%s\": %s", file_path.encode("utf-8", "replace"), e) + metastring = file_path + + try: + upload_to_host_with_metadata(file_path, upload_path, metastring) + if not duplicate: + CAPE_DUMPED_LIST.append(sha256) + CAPE_DUMPED_LIST.append(upload_path) + log.info("Added new CAPE file to list with path: %s", unicode(file_path).encode("utf-8", "replace")) + except (IOError, socket.error) as e: + log.error("Unable to upload process dump at path \"%s\": %s", + file_path.encode("utf-8", "replace"), e) + def del_file(fname): global FILES_LIST @@ -557,9 +613,10 @@ def run(self): if event_handle: KERNEL32.SetEvent(event_handle) KERNEL32.CloseHandle(event_handle) - if self.options.get("procmemdump"): - p = Process(pid=process_id) - p.dump_memory() + # Process dumping is now handled in-process (CAPE) + #if self.options.get("procmemdump"): + # p = Process(pid=process_id) + # p.dump_memory() dump_files() PROCESS_LOCK.release() # Handle case of malware terminating a process -- notify the target @@ -577,9 +634,9 @@ def run(self): else: log.info("Notified of termination of process with pid %u.", process_id) # dump the memory of exiting processes - if self.options.get("procmemdump"): - p = Process(pid=process_id) - p.dump_memory() + #if self.options.get("procmemdump"): + # p = Process(pid=process_id) + # p.dump_memory() # make sure process is aware of the termination KERNEL32.SetEvent(event_handle) KERNEL32.CloseHandle(event_handle) @@ -750,6 +807,11 @@ def run(self): file_path = unicode(command[10:].decode("utf-8")) # We dump immediately. cape_file(file_path) + elif command.startswith("FILE_DUMP:"): + # We extract the file path. + file_path = unicode(command[10:].decode("utf-8")) + # We dump immediately. + proc_dump(file_path) # In case of FILE_DEL, the client is trying to notify an ongoing # deletion of an existing file, therefore we need to dump it # straight away. diff --git a/analyzer/windows/dll/CAPE.dll b/analyzer/windows/dll/CAPE.dll index 213ddde11..9e3ec061f 100644 Binary files a/analyzer/windows/dll/CAPE.dll and b/analyzer/windows/dll/CAPE.dll differ diff --git a/analyzer/windows/dll/CAPE_x64.dll b/analyzer/windows/dll/CAPE_x64.dll index 817907e0d..1b87cb9f4 100644 Binary files a/analyzer/windows/dll/CAPE_x64.dll and b/analyzer/windows/dll/CAPE_x64.dll differ diff --git a/analyzer/windows/lib/api/process.py b/analyzer/windows/lib/api/process.py index 74ca4a0ee..02da9877c 100644 --- a/analyzer/windows/lib/api/process.py +++ b/analyzer/windows/lib/api/process.py @@ -621,6 +621,10 @@ def inject(self, dll=None, injectmode=INJECT_QUEUEUSERAPC, interest=None, noslee if optname in cfgoptions: config.write("{0}={1}\n".format(optname, cfgoptions[optname])) + if "procmemdump" in cfgoptions: + config.write("procmemdump={0}\n".format(cfgoptions["procmemdump"])) + if "import_reconstruction" in cfgoptions: + config.write("import_reconstruction={0}\n".format(cfgoptions["import_reconstruction"])) if "breakpoint" in cfgoptions: config.write("breakpoint={0}\n".format(cfgoptions["breakpoint"])) @@ -742,6 +746,10 @@ def debug_inject(self, dll=None, interest=None, childprocess=False, nosleepskip= if firstproc: Process.first_process = False + if "procmemdump" in cfgoptions: + config.write("procmemdump={0}\n".format(cfgoptions["procmemdump"])) + if "import_reconstruction" in cfgoptions: + config.write("import_reconstruction={0}\n".format(cfgoptions["import_reconstruction"])) if "breakpoint" in cfgoptions: config.write("breakpoint={0}\n".format(cfgoptions["breakpoint"])) diff --git a/analyzer/windows/lib/common/abstracts.py b/analyzer/windows/lib/common/abstracts.py index d0b21b8ce..ecc65320b 100644 --- a/analyzer/windows/lib/common/abstracts.py +++ b/analyzer/windows/lib/common/abstracts.py @@ -186,10 +186,11 @@ def finish(self): If specified to do so, this method dumps the memory of all running processes. """ - if self.options.get("procmemdump"): - for pid in self.pids: - p = Process(pid=pid) - p.dump_memory() + # Process dumping is now handled in-process (CAPE) + #if self.options.get("procmemdump"): + # for pid in self.pids: + # p = Process(pid=pid) + # p.dump_memory() return True diff --git a/analyzer/windows/lib/common/results.py b/analyzer/windows/lib/common/results.py index a537edb0d..5db2e0df3 100644 --- a/analyzer/windows/lib/common/results.py +++ b/analyzer/windows/lib/common/results.py @@ -30,6 +30,24 @@ def upload_to_host(file_path, dump_path, duplicate): if nc: nc.close() +def upload_to_host_with_metadata(file_path, dump_path, metadata): + nc = infd = None + duplicate = 0 + try: + nc = NetlogBinary(unicode(metadata).encode("utf-8", "replace"), dump_path, duplicate) + infd = open(file_path, "rb") + buf = infd.read(BUFSIZE) + while buf: + nc.send(buf, retry=True) + buf = infd.read(BUFSIZE) + except Exception as e: + log.error("Exception uploading file {0} to host: {1}".format(unicode(file_path).encode("utf-8", "replace"), e)) + finally: + if infd: + infd.close() + if nc: + nc.close() + class NetlogConnection(object): def __init__(self, proto=""): config = Config(cfg="analysis.conf") diff --git a/conf/processing.conf b/conf/processing.conf index bf97f66dc..e03b52ebc 100644 --- a/conf/processing.conf +++ b/conf/processing.conf @@ -22,6 +22,9 @@ enabled = yes # Amount of text to carve from plaintext files (bytes) buffer = 8192 +[procdump] +enabled = yes + [memory] enabled = no diff --git a/lib/cuckoo/common/abstracts.py b/lib/cuckoo/common/abstracts.py index e7ebd3baa..e5a95d380 100644 --- a/lib/cuckoo/common/abstracts.py +++ b/lib/cuckoo/common/abstracts.py @@ -636,6 +636,7 @@ def set_path(self, analysis_path): self.file_path = os.path.realpath(os.path.join(self.analysis_path, "binary")) self.dropped_path = os.path.join(self.analysis_path, "files") + self.procdump_path = os.path.join(self.analysis_path, "procdump") self.CAPE_path = os.path.join(self.analysis_path, "CAPE") self.logs_path = os.path.join(self.analysis_path, "logs") self.shots_path = os.path.join(self.analysis_path, "shots") diff --git a/modules/processing/procdump.py b/modules/processing/procdump.py new file mode 100644 index 000000000..06e1f96e2 --- /dev/null +++ b/modules/processing/procdump.py @@ -0,0 +1,61 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +import os + +from lib.cuckoo.common.abstracts import Processing +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.objects import File +from lib.cuckoo.common.utils import convert_to_printable + +class ProcDump(Processing): + """ProcDump files analysis.""" + + def run(self): + """Run analysis. + @return: list of process dumps with related information. + """ + self.key = "procdump" + procdump_files = [] + buf = self.options.get("buffer", 8192) + + if not os.path.exists(self.procdump_path): + return None + file_names = os.listdir(self.procdump_path) + for file_name in file_names: + file_path = os.path.join(self.procdump_path, file_name) + if not os.path.isfile(file_path): + continue + if file_name.endswith("_info.txt"): + continue + with open(file_path + "_info.txt", 'r') as f: + metastring = f.readline() + file_info = File(file_path=file_path,guest_paths=metastring, file_name=file_name).get_all() + metastrings = metastring.split(",") + file_info["process_path"] = metastrings[2] + file_info["module_path"] = metastrings[3] + file_info["process_name"] = file_info["process_path"].split("\\")[-1] + file_info["pid"] = metastrings[1] + texttypes = [ + "ASCII", + "Windows Registry text", + "XML document text", + "Unicode text", + ] + readit = False + for texttype in texttypes: + if texttype in file_info["type"]: + readit = True + break + if readit: + with open(file_info["path"], "r") as drop_open: + filedata = drop_open.read(buf + 1) + if len(filedata) > buf: + file_info["data"] = convert_to_printable(filedata[:buf] + " ") + else: + file_info["data"] = convert_to_printable(filedata) + + procdump_files.append(file_info) + + return procdump_files diff --git a/modules/reporting/submitCAPE.py b/modules/reporting/submitCAPE.py index 415984c56..e9cec40af 100644 --- a/modules/reporting/submitCAPE.py +++ b/modules/reporting/submitCAPE.py @@ -224,7 +224,7 @@ def run(self, results): if package: task_id = db.add_path(file_path=self.task["target"], - package=dumper, + package=package, timeout=self.task["timeout"], options=self.task_options, priority=self.task["priority"]+1, # increase priority to expedite related submission diff --git a/modules/signatures/CAPE.py b/modules/signatures/CAPE.py index 15c3ad40b..a25a059bc 100644 --- a/modules/signatures/CAPE.py +++ b/modules/signatures/CAPE.py @@ -74,12 +74,6 @@ def on_call(self, call, process): if "MZ" in buf: self.compressed_binary = True - #if call["api"] == "memcpy": - #count = self.get_raw_argument(call, "count") - #if (count > 0xa00) and \ - #(count < 0x5000): - #self.config_copy = True - def on_complete(self): if self.config_copy == True and self.compressed_binary == True: self.plugx = True @@ -179,4 +173,26 @@ def on_complete(self): else: return False +class AllocationX(Signature): + name = "allocation_rwx" + description = "CAPE detection: Extraction" + severity = 1 + categories = ["allocation"] + authors = ["Context"] + minimum = "1.2" + evented = True + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + + filter_apinames = set(["NtAllocateVirtualMemory"]) + + def on_call(self, call, process): + if call["api"] == "NtAllocateVirtualMemory": + protection = self.get_argument(call, "Protection") + regionsize = int(self.get_raw_argument(call, "RegionSize"), 0) + # PAGE_EXECUTE_READWRITE + if protection == "0x00000040" and regionsize > 0x2000: + return True + else: + return False diff --git a/modules/signatures/allocation_rwx.py b/modules/signatures/allocation_rwx.py deleted file mode 100644 index e08575b32..000000000 --- a/modules/signatures/allocation_rwx.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) 2014 Accuvant, Inc. (bspengler@accuvant.com) -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.cuckoo.common.abstracts import Signature - -class InjectionRWX(Signature): - name = "allocation_rwx" - description = "Allocates RWX memory" - severity = 2 - categories = ["injection"] - authors = ["Accuvant"] - minimum = "1.2" - evented = True - - def __init__(self, *args, **kwargs): - Signature.__init__(self, *args, **kwargs) - - filter_apinames = set(["NtAllocateVirtualMemory","NtProtectVirtualMemory","VirtualProtectEx"]) - - def on_call(self, call, process): - if call["api"] == "NtAllocateVirtualMemory": - protection = self.get_argument(call, "Protection") - # PAGE_EXECUTE_READWRITE - if protection == "0x00000040": - return True diff --git a/modules/signatures/injection_runpe.py b/modules/signatures/injection_runpe.py index d85a194fe..946d4873e 100644 --- a/modules/signatures/injection_runpe.py +++ b/modules/signatures/injection_runpe.py @@ -16,7 +16,7 @@ class InjectionRUNPE(Signature): name = "injection_runpe" - description = "RunPE or Process Hollowing detected" + description = "CAPE detection: Injection (Process Hollowing)" severity = 3 categories = ["injection"] authors = ["glysbaysb", "Accuvant"] diff --git a/web/analysis/views.py b/web/analysis/views.py index bf2d46ab6..abdfc5305 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -861,6 +861,19 @@ def file(request, category, task_id, dlfile): else: path = buf file_name += ".bin" + elif category == "procdump": + buf = os.path.join(CUCKOO_ROOT, "storage", "analyses", + task_id, "procdump", file_name) + if os.path.isdir(buf): + # Backward compat for when each dropped file was in a separate dir + # Grab smaller file name as we store guest paths in the + # [orig file name]_info.exe + dfile = min(os.listdir(buf), key=len) + path = os.path.join(buf, dfile) + file_name = dfile + ".bin" + else: + path = buf + file_name += ".bin" elif category == "CAPE": buf = os.path.join(CUCKOO_ROOT, "storage", "analyses", task_id, "CAPE", file_name) diff --git a/web/submission/views.py b/web/submission/views.py index 8b3984cf8..8b9116cce 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -98,7 +98,16 @@ def index(request): if request.POST.get("process_memory"): if options: options += "," - options += "procmemdump=yes" + options += "procmemdump=0" + else: + if options: + options += "," + options += "procmemdump=1" + + if request.POST.get("import_reconstruction"): + if options: + options += "," + options += "import_reconstruction=1" if request.POST.get("kernel_analysis"): if options: diff --git a/web/templates/analysis/procdump/index.html b/web/templates/analysis/procdump/index.html new file mode 100644 index 000000000..4508f4b24 --- /dev/null +++ b/web/templates/analysis/procdump/index.html @@ -0,0 +1,88 @@ + +{% if analysis.procdump|length > 0 %} + {% for file in analysis.procdump %} +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% if file.dropdir %} + + + + + {% else %} + + + + + {% endif %} +
Process Name{{file.process_name}}
PID{{file.pid}}
Dump Size{{file.size}} bytes
Module Path{{file.module_path}}
PE Type{{file.type}}
MD5{{file.md5}}
SHA1{{file.sha1}}
SHA256{{file.sha256}}
CRC32{{file.crc32}}
Ssdeep{{file.ssdeep}}
ClamAV{{file.clamav}}
Yara + {% if file.yara %} +
    + {% for sign in file.yara %} +
  • {{sign.name}} - {{sign.meta.description}}
  • + {% endfor %} +
+ {% else %} + None matched + {% endif %} +
Dump Filename{{file.name}}
Download +
+ Download +
+
+ {% endfor %} +{% else %} +
Sorry! No process dumps.
+{% endif %} diff --git a/web/templates/analysis/report.html b/web/templates/analysis/report.html index d9735e1ea..e978cae0b 100644 --- a/web/templates/analysis/report.html +++ b/web/templates/analysis/report.html @@ -35,11 +35,14 @@
  • Dropped Files
  • {% endif %} {% endif %} - {% if analysis.CAPE|length %} -
  • CAPE ({{analysis.CAPE|length}})
  • - {% endif %} {% if analysis.procmemory %}
  • Process Memory
  • {% endif %} {% if analysis.memory %}
  • Memory Analysis
  • {% endif %} + {% if analysis.procdump|length %} +
  • Process Dumps ({{analysis.procdump|length}})
  • + {% endif %} + {% if analysis.CAPE|length %} +
  • CAPE ({{analysis.CAPE|length}})
  • + {% endif %} {% if config.malheur %} {% if analysis.info.category == "file" or analysis.info.category == "url" %} {% if similar|length %} @@ -86,6 +89,9 @@
    {% include "analysis/CAPE/index.html" %}
    +
    + {% include "analysis/procdump/index.html" %} +
    {% if analysis.procmemory %}
    {% include "analysis/procmemory/index.html" %} diff --git a/web/templates/submission/index.html b/web/templates/submission/index.html index 452d3f309..d28f9b373 100644 --- a/web/templates/submission/index.html +++ b/web/templates/submission/index.html @@ -307,61 +307,66 @@
    + {% if config.procmemory %}
    - {% if config.gateways %} + {% endif %}
    +
    + {% if config.memory %} +
    +
    {% endif %}
    - {% if config.tor %}
    - {% endif %} - {% if config.kernel %} + {% if config.gateways %}
    {% endif %}
    - {% if config.procmemory %}
    - {% endif %} - {% if config.memory %} + {% if config.tor %}
    {% endif %} + {% if config.kernel %}
    + {% endif %}