Skip to content

Commit

Permalink
Improved handling of routine process dumps, misc bugfixes.
Browse files Browse the repository at this point in the history
  • Loading branch information
kevoreilly committed Oct 11, 2016
1 parent d708f93 commit 62c2c5c
Show file tree
Hide file tree
Showing 18 changed files with 331 additions and 66 deletions.
78 changes: 70 additions & 8 deletions analyzer/windows/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from lib.common.defines import EVENT_MODIFY_STATE, SECURITY_DESCRIPTOR, SECURITY_ATTRIBUTES, SYSTEMTIME
from lib.common.exceptions import CuckooError, CuckooPackageError
from lib.common.hashing import hash_file
from lib.common.results import upload_to_host
from lib.common.results import upload_to_host, upload_to_host_with_metadata
from lib.core.config import Config
from lib.core.packages import choose_package
from lib.core.privileges import grant_debug_privilege
Expand All @@ -49,6 +49,7 @@
FILES_LIST = []
DUMPED_LIST = []
CAPE_DUMPED_LIST = []
PROC_DUMPED_LIST = []
UPLOADPATH_LIST = []
PROCESS_LIST = []
PROTECTED_PATH_LIST = []
Expand Down Expand Up @@ -205,8 +206,18 @@ def cape_file(file_path):
upload_path = CAPE_DUMPED_LIST[idx]
else:
upload_path = os.path.join("CAPE", sha256)

if os.path.exists(file_path + "_info.txt"):
metadata = [line.strip() for line in open(file_path + "_info.txt")]
metastring = ""
for line in metadata:
metastring = metastring + line + ','
else:
log.warning("No metadata file for CAPE dump at path \"%s\"", file_path.encode("utf-8", "replace"))
metastring = file_path

try:
upload_to_host(file_path, upload_path, duplicate)
upload_to_host_with_metadata(file_path, upload_path, metastring)
if not duplicate:
CAPE_DUMPED_LIST.append(sha256)
CAPE_DUMPED_LIST.append(upload_path)
Expand All @@ -215,6 +226,51 @@ def cape_file(file_path):
log.error("Unable to upload CAPE file at path \"%s\": %s",
file_path.encode("utf-8", "replace"), e)

def proc_dump(file_path):
"""Create a copy of the given process dump file path."""
duplicate = False
try:
if os.path.exists(file_path):
sha256 = hash_file(hashlib.sha256, file_path)
if sha256 in PROC_DUMPED_LIST:
# The file was already dumped, just upload the alternate name for it.
duplicate = True
else:
log.warning("Process dump at path \"%s\" does not exist, skip.",
file_path.encode("utf-8", "replace"))
return
except IOError as e:
log.warning("Unable to access process dump at path \"%s\"", file_path.encode("utf-8", "replace"))
return

if os.path.isdir(file_path):
return
file_name = os.path.basename(file_path)
if duplicate:
idx = PROC_DUMPED_LIST.index(sha256)
upload_path = PROC_DUMPED_LIST[idx]
else:
upload_path = os.path.join("procdump", sha256)

if os.path.exists(file_path + "_info.txt"):
metadata = [line.strip() for line in open(file_path + "_info.txt")]
metastring = ""
for line in metadata:
metastring = metastring + line + ','
else:
log.warning("No metadata file for process dump at path \"%s\": %s", file_path.encode("utf-8", "replace"), e)
metastring = file_path

try:
upload_to_host_with_metadata(file_path, upload_path, metastring)
if not duplicate:
CAPE_DUMPED_LIST.append(sha256)
CAPE_DUMPED_LIST.append(upload_path)
log.info("Added new CAPE file to list with path: %s", unicode(file_path).encode("utf-8", "replace"))
except (IOError, socket.error) as e:
log.error("Unable to upload process dump at path \"%s\": %s",
file_path.encode("utf-8", "replace"), e)

def del_file(fname):
global FILES_LIST

Expand Down Expand Up @@ -557,9 +613,10 @@ def run(self):
if event_handle:
KERNEL32.SetEvent(event_handle)
KERNEL32.CloseHandle(event_handle)
if self.options.get("procmemdump"):
p = Process(pid=process_id)
p.dump_memory()
# Process dumping is now handled in-process (CAPE)
#if self.options.get("procmemdump"):
# p = Process(pid=process_id)
# p.dump_memory()
dump_files()
PROCESS_LOCK.release()
# Handle case of malware terminating a process -- notify the target
Expand All @@ -577,9 +634,9 @@ def run(self):
else:
log.info("Notified of termination of process with pid %u.", process_id)
# dump the memory of exiting processes
if self.options.get("procmemdump"):
p = Process(pid=process_id)
p.dump_memory()
#if self.options.get("procmemdump"):
# p = Process(pid=process_id)
# p.dump_memory()
# make sure process is aware of the termination
KERNEL32.SetEvent(event_handle)
KERNEL32.CloseHandle(event_handle)
Expand Down Expand Up @@ -750,6 +807,11 @@ def run(self):
file_path = unicode(command[10:].decode("utf-8"))
# We dump immediately.
cape_file(file_path)
elif command.startswith("FILE_DUMP:"):
# We extract the file path.
file_path = unicode(command[10:].decode("utf-8"))
# We dump immediately.
proc_dump(file_path)
# In case of FILE_DEL, the client is trying to notify an ongoing
# deletion of an existing file, therefore we need to dump it
# straight away.
Expand Down
Binary file modified analyzer/windows/dll/CAPE.dll
Binary file not shown.
Binary file modified analyzer/windows/dll/CAPE_x64.dll
Binary file not shown.
8 changes: 8 additions & 0 deletions analyzer/windows/lib/api/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,6 +621,10 @@ def inject(self, dll=None, injectmode=INJECT_QUEUEUSERAPC, interest=None, noslee
if optname in cfgoptions:
config.write("{0}={1}\n".format(optname, cfgoptions[optname]))

if "procmemdump" in cfgoptions:
config.write("procmemdump={0}\n".format(cfgoptions["procmemdump"]))
if "import_reconstruction" in cfgoptions:
config.write("import_reconstruction={0}\n".format(cfgoptions["import_reconstruction"]))
if "breakpoint" in cfgoptions:
config.write("breakpoint={0}\n".format(cfgoptions["breakpoint"]))

Expand Down Expand Up @@ -742,6 +746,10 @@ def debug_inject(self, dll=None, interest=None, childprocess=False, nosleepskip=
if firstproc:
Process.first_process = False

if "procmemdump" in cfgoptions:
config.write("procmemdump={0}\n".format(cfgoptions["procmemdump"]))
if "import_reconstruction" in cfgoptions:
config.write("import_reconstruction={0}\n".format(cfgoptions["import_reconstruction"]))
if "breakpoint" in cfgoptions:
config.write("breakpoint={0}\n".format(cfgoptions["breakpoint"]))

Expand Down
9 changes: 5 additions & 4 deletions analyzer/windows/lib/common/abstracts.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,10 +186,11 @@ def finish(self):
If specified to do so, this method dumps the memory of
all running processes.
"""
if self.options.get("procmemdump"):
for pid in self.pids:
p = Process(pid=pid)
p.dump_memory()
# Process dumping is now handled in-process (CAPE)
#if self.options.get("procmemdump"):
# for pid in self.pids:
# p = Process(pid=pid)
# p.dump_memory()

return True

Expand Down
18 changes: 18 additions & 0 deletions analyzer/windows/lib/common/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,24 @@ def upload_to_host(file_path, dump_path, duplicate):
if nc:
nc.close()

def upload_to_host_with_metadata(file_path, dump_path, metadata):
nc = infd = None
duplicate = 0
try:
nc = NetlogBinary(unicode(metadata).encode("utf-8", "replace"), dump_path, duplicate)
infd = open(file_path, "rb")
buf = infd.read(BUFSIZE)
while buf:
nc.send(buf, retry=True)
buf = infd.read(BUFSIZE)
except Exception as e:
log.error("Exception uploading file {0} to host: {1}".format(unicode(file_path).encode("utf-8", "replace"), e))
finally:
if infd:
infd.close()
if nc:
nc.close()

class NetlogConnection(object):
def __init__(self, proto=""):
config = Config(cfg="analysis.conf")
Expand Down
3 changes: 3 additions & 0 deletions conf/processing.conf
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ enabled = yes
# Amount of text to carve from plaintext files (bytes)
buffer = 8192

[procdump]
enabled = yes

[memory]
enabled = no

Expand Down
1 change: 1 addition & 0 deletions lib/cuckoo/common/abstracts.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,6 +636,7 @@ def set_path(self, analysis_path):
self.file_path = os.path.realpath(os.path.join(self.analysis_path,
"binary"))
self.dropped_path = os.path.join(self.analysis_path, "files")
self.procdump_path = os.path.join(self.analysis_path, "procdump")
self.CAPE_path = os.path.join(self.analysis_path, "CAPE")
self.logs_path = os.path.join(self.analysis_path, "logs")
self.shots_path = os.path.join(self.analysis_path, "shots")
Expand Down
61 changes: 61 additions & 0 deletions modules/processing/procdump.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# Copyright (C) 2010-2015 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.

import os

from lib.cuckoo.common.abstracts import Processing
from lib.cuckoo.common.config import Config
from lib.cuckoo.common.objects import File
from lib.cuckoo.common.utils import convert_to_printable

class ProcDump(Processing):
"""ProcDump files analysis."""

def run(self):
"""Run analysis.
@return: list of process dumps with related information.
"""
self.key = "procdump"
procdump_files = []
buf = self.options.get("buffer", 8192)

if not os.path.exists(self.procdump_path):
return None
file_names = os.listdir(self.procdump_path)
for file_name in file_names:
file_path = os.path.join(self.procdump_path, file_name)
if not os.path.isfile(file_path):
continue
if file_name.endswith("_info.txt"):
continue
with open(file_path + "_info.txt", 'r') as f:
metastring = f.readline()
file_info = File(file_path=file_path,guest_paths=metastring, file_name=file_name).get_all()
metastrings = metastring.split(",")
file_info["process_path"] = metastrings[2]
file_info["module_path"] = metastrings[3]
file_info["process_name"] = file_info["process_path"].split("\\")[-1]
file_info["pid"] = metastrings[1]
texttypes = [
"ASCII",
"Windows Registry text",
"XML document text",
"Unicode text",
]
readit = False
for texttype in texttypes:
if texttype in file_info["type"]:
readit = True
break
if readit:
with open(file_info["path"], "r") as drop_open:
filedata = drop_open.read(buf + 1)
if len(filedata) > buf:
file_info["data"] = convert_to_printable(filedata[:buf] + " <truncated>")
else:
file_info["data"] = convert_to_printable(filedata)

procdump_files.append(file_info)

return procdump_files
2 changes: 1 addition & 1 deletion modules/reporting/submitCAPE.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def run(self, results):

if package:
task_id = db.add_path(file_path=self.task["target"],
package=dumper,
package=package,
timeout=self.task["timeout"],
options=self.task_options,
priority=self.task["priority"]+1, # increase priority to expedite related submission
Expand Down
28 changes: 22 additions & 6 deletions modules/signatures/CAPE.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,6 @@ def on_call(self, call, process):
if "MZ" in buf:
self.compressed_binary = True

#if call["api"] == "memcpy":
#count = self.get_raw_argument(call, "count")
#if (count > 0xa00) and \
#(count < 0x5000):
#self.config_copy = True

def on_complete(self):
if self.config_copy == True and self.compressed_binary == True:
self.plugx = True
Expand Down Expand Up @@ -179,4 +173,26 @@ def on_complete(self):
else:
return False

class AllocationX(Signature):
name = "allocation_rwx"
description = "CAPE detection: Extraction"
severity = 1
categories = ["allocation"]
authors = ["Context"]
minimum = "1.2"
evented = True

def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)

filter_apinames = set(["NtAllocateVirtualMemory"])

def on_call(self, call, process):
if call["api"] == "NtAllocateVirtualMemory":
protection = self.get_argument(call, "Protection")
regionsize = int(self.get_raw_argument(call, "RegionSize"), 0)
# PAGE_EXECUTE_READWRITE
if protection == "0x00000040" and regionsize > 0x2000:
return True
else:
return False
26 changes: 0 additions & 26 deletions modules/signatures/allocation_rwx.py

This file was deleted.

2 changes: 1 addition & 1 deletion modules/signatures/injection_runpe.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

class InjectionRUNPE(Signature):
name = "injection_runpe"
description = "RunPE or Process Hollowing detected"
description = "CAPE detection: Injection (Process Hollowing)"
severity = 3
categories = ["injection"]
authors = ["glysbaysb", "Accuvant"]
Expand Down
13 changes: 13 additions & 0 deletions web/analysis/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -861,6 +861,19 @@ def file(request, category, task_id, dlfile):
else:
path = buf
file_name += ".bin"
elif category == "procdump":
buf = os.path.join(CUCKOO_ROOT, "storage", "analyses",
task_id, "procdump", file_name)
if os.path.isdir(buf):
# Backward compat for when each dropped file was in a separate dir
# Grab smaller file name as we store guest paths in the
# [orig file name]_info.exe
dfile = min(os.listdir(buf), key=len)
path = os.path.join(buf, dfile)
file_name = dfile + ".bin"
else:
path = buf
file_name += ".bin"
elif category == "CAPE":
buf = os.path.join(CUCKOO_ROOT, "storage", "analyses",
task_id, "CAPE", file_name)
Expand Down
11 changes: 10 additions & 1 deletion web/submission/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,16 @@ def index(request):
if request.POST.get("process_memory"):
if options:
options += ","
options += "procmemdump=yes"
options += "procmemdump=0"
else:
if options:
options += ","
options += "procmemdump=1"

if request.POST.get("import_reconstruction"):
if options:
options += ","
options += "import_reconstruction=1"

if request.POST.get("kernel_analysis"):
if options:
Expand Down
Loading

0 comments on commit 62c2c5c

Please sign in to comment.