Skip to content

Commit

Permalink
Merge branch 'main' into add_appinspect_rest_support
Browse files Browse the repository at this point in the history
  • Loading branch information
patel-bhavin authored Oct 23, 2023
2 parents 6df42f6 + 33ded94 commit 00e9435
Show file tree
Hide file tree
Showing 12 changed files with 99 additions and 23 deletions.
4 changes: 2 additions & 2 deletions contentctl/actions/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def execute(self, input_dto: GenerateInputDto) -> DirectorOutputDto:
return director_output_dto

elif input_dto.director_input_dto.product == SecurityContentProduct.SSA:
output_path = os.path.join(input_dto.director_input_dto.input_path, input_dto.director_input_dto.config.build_ssa.output_path)
output_path = os.path.join(input_dto.director_input_dto.input_path, input_dto.director_input_dto.config.build_ssa.path_root)
shutil.rmtree(output_path + '/srs/', ignore_errors=True)
shutil.rmtree(output_path + '/complex/', ignore_errors=True)
os.makedirs(output_path + '/complex/')
Expand All @@ -62,7 +62,7 @@ def execute(self, input_dto: GenerateInputDto) -> DirectorOutputDto:
ba_yml_output.writeObjects(director_output_dto.ssa_detections, output_path)

elif input_dto.director_input_dto.product == SecurityContentProduct.API:
output_path = os.path.join(input_dto.director_input_dto.input_path, input_dto.director_input_dto.config.build_api.output_path)
output_path = os.path.join(input_dto.director_input_dto.input_path, input_dto.director_input_dto.config.build_api.path_root)
shutil.rmtree(output_path, ignore_errors=True)
os.makedirs(output_path)
api_json_output = ApiJsonOutput()
Expand Down
24 changes: 20 additions & 4 deletions contentctl/input/backend_splunk_ba.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from sigma.exceptions import SigmaFeatureNotSupportedByBackendError
from sigma.pipelines.splunk.splunk import splunk_sysmon_process_creation_cim_mapping, splunk_windows_registry_cim_mapping, splunk_windows_file_event_cim_mapping

from contentctl.objects.detection import Detection
from contentctl.objects.ssa_detection import SSADetection

from typing import ClassVar, Dict, List, Optional, Pattern, Tuple

Expand Down Expand Up @@ -67,7 +67,7 @@ class SplunkBABackend(TextQueryBackend):
wildcard_match_expression : ClassVar[Optional[str]] = "{field} LIKE {value}"


def __init__(self, processing_pipeline: Optional["sigma.processing.pipeline.ProcessingPipeline"] = None, collect_errors: bool = False, min_time : str = "-30d", max_time : str = "now", detection : Detection = None, field_mapping: dict = None, **kwargs):
def __init__(self, processing_pipeline: Optional["sigma.processing.pipeline.ProcessingPipeline"] = None, collect_errors: bool = False, min_time : str = "-30d", max_time : str = "now", detection : SSADetection = None, field_mapping: dict = None, **kwargs):
super().__init__(processing_pipeline, collect_errors, **kwargs)
self.min_time = min_time or "-30d"
self.max_time = max_time or "now"
Expand Down Expand Up @@ -110,13 +110,29 @@ def finalize_query_data_model(self, rule: SigmaRule, query: str, index: int, sta
parent = new_val
i = i + 1
continue
parser_str = '| eval ' + new_val + ' = ' + parent + '.' + val + ' '
new_val_spaces = new_val + "="
if new_val_spaces not in query:
parser_str = '| eval ' + new_val + ' = ' + parent + '.' + val + ' '
else:
parser_str = '| eval ' + new_val + ' = ' + 'lower(' + parent + '.' + val + ') '
detection_str = detection_str + parser_str
parsed_fields.append(new_val)
parent = new_val
i = i + 1

detection_str = detection_str + "| where " + query
### Convert sigma values into lower case
lower_query = ""
in_quotes = False
for char in query:
if char == '"':
in_quotes = not in_quotes
if in_quotes:
lower_query += char.lower()
else:
lower_query += char

detection_str = detection_str + "| where " + lower_query

detection_str = detection_str.replace("\\\\\\\\", "\\\\")
return detection_str

Expand Down
6 changes: 3 additions & 3 deletions contentctl/input/director.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from contentctl.input.story_builder import StoryBuilder
from contentctl.objects.enums import SecurityContentType
from contentctl.objects.enums import SecurityContentProduct
from contentctl.objects.enums import DetectionStatus
from contentctl.helper.utils import Utils
from contentctl.enrichments.attack_enrichment import AttackEnrichment
from contentctl.objects.config import Config
Expand Down Expand Up @@ -99,9 +100,8 @@ def execute(self, input_dto: DirectorInputDto) -> None:


def createSecurityContent(self, type: SecurityContentType) -> None:
objects = []
if type == SecurityContentType.ssa_detections:
files = Utils.get_all_yml_files_from_directory(os.path.join(self.input_dto.input_path, 'ssa_detections'))
files = Utils.get_all_yml_files_from_directory(os.path.join(self.input_dto.input_path, 'ssa_detections'))
elif type == SecurityContentType.unit_tests:
files = Utils.get_all_yml_files_from_directory(os.path.join(self.input_dto.input_path, 'tests'))
else:
Expand Down Expand Up @@ -167,7 +167,7 @@ def createSecurityContent(self, type: SecurityContentType) -> None:
elif type == SecurityContentType.ssa_detections:
self.constructSSADetection(self.ssa_detection_builder, file)
detection = self.ssa_detection_builder.getObject()
if detection.status == "production" or detection.status == "validated":
if detection.status in [DetectionStatus.production.value, DetectionStatus.validation.value]:
self.output_dto.ssa_detections.append(detection)

else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,13 @@ def type_valid(cls, v, values):
def encode_error(cls, v, values, field):
return SecurityContentObject.free_text_field_valid(cls,v,values,field)

@root_validator
def validation_for_ba_only(cls, values):
# Ensure that only a BA detection can have status: validation
if values["status"] == DetectionStatus.validation.value:
raise ValueError(f"The following is NOT an ssa_ detection, but has 'status: {values['status']} which may ONLY be used for ssa_ detections:' {values['file_path']}")
return values

# @root_validator
# def search_validation(cls, values):
# if 'ssa_' not in values['file_path']:
Expand Down
4 changes: 2 additions & 2 deletions contentctl/objects/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,10 @@ class ConfigEnrichments(BaseModel):


class ConfigBuildSSA(BaseModel):
output_path: str
path_root: str

class ConfigBuildApi(BaseModel):
output_path: str
path_root: str

class Config(BaseModel, extra=Extra.forbid):
#general: ConfigGlobal = ConfigGlobal()
Expand Down
1 change: 1 addition & 0 deletions contentctl/objects/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class DetectionStatus(enum.Enum):
production = "production"
deprecated = "deprecated"
experimental = "experimental"
validation = "validation"

class LogLevel(enum.Enum):
NONE = "NONE"
Expand Down
3 changes: 2 additions & 1 deletion contentctl/objects/ssa_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,9 @@ class SSADetection(BaseModel):
mappings: dict = None
file_path: str = None
source: str = None
runtime: str = None
test: Union[UnitTest, dict] = None
runtime: str = None
internalVersion: int = None

# @validator('name')v
# def name_max_length(cls, v, values):
Expand Down
6 changes: 4 additions & 2 deletions contentctl/objects/ssa_detection_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class SSADetectionTags(BaseModel):
asset_type: str
automated_detection_testing: str = None
cis20: list = None
confidence: str
confidence: int
impact: int
kill_chain_phases: list = None
message: str
Expand Down Expand Up @@ -40,7 +40,9 @@ class SSADetectionTags(BaseModel):
risk_level: str = None
observable_str: str = None
evidence_str: str = None
kill_chain_phases_id: list = None
analytics_story_str: str = None
kill_chain_phases_id:dict = None
kill_chain_phases_str:str = None
research_site_url: str = None
event_schema: str = None
mappings: list = None
Expand Down
9 changes: 7 additions & 2 deletions contentctl/output/api_json_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ def writeObjects(self, objects: list, output_path: str, type: SecurityContentTyp
obj_array.append(baseline.dict(
exclude =
{
"deployment": True
"deployment": True,
"check_references":True,
"file_path":True,
}
))

Expand Down Expand Up @@ -116,6 +118,9 @@ def writeObjects(self, objects: list, output_path: str, type: SecurityContentTyp
obj_array = []
for deployment in objects:
deployment.id = str(deployment.id)
obj_array.append(deployment.dict(exclude_none=True))
obj_array.append(deployment.dict(exclude_none=True, exclude =
{
"file_path":True,
}))

JsonWriter.writeJsonObject(os.path.join(output_path, 'deployments.json'), {'deployments': obj_array })
4 changes: 3 additions & 1 deletion contentctl/output/ba_yml_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ def writeObjects(self, objects: list, output_path: str, type: SecurityContentTyp
"nist": obj.tags.nist
}

obj.runtime = "SPL-DSP"
obj.runtime = "SPL2"
obj.internalVersion = 2

# remove unncessary fields
YmlWriter.writeYmlFile(file_path, obj.dict(
Expand All @@ -78,6 +79,7 @@ def writeObjects(self, objects: list, output_path: str, type: SecurityContentTyp
"known_false_positives": True,
"references": True,
"runtime": True,
"internalVersion": True,
"tags":
{
#"analytic_story": True,
Expand Down
9 changes: 6 additions & 3 deletions contentctl/output/conf_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,19 +122,20 @@ def writeObjects(self, objects: list, type: SecurityContentType = None) -> None:
'transforms.j2',
self.config, objects)

#import code
#code.interact(local=locals())

if self.input_path is None:
raise(Exception(f"input_path is required for lookups, but received [{self.input_path}]"))

files = glob.iglob(os.path.join(self.input_path, 'lookups', '*.csv'))
#we want to copy all *.mlmodel files as well, not just csvs
files = list(glob.iglob(os.path.join(self.input_path, 'lookups', '*.csv'))) + list(glob.iglob(os.path.join(self.input_path, 'lookups', '*.mlmodel')))
lookup_folder = self.output_path/"lookups"
if lookup_folder.exists():
# Remove it since we want to remove any previous lookups that are not
# currently part of the app
if lookup_folder.is_dir():
shutil.rmtree(lookup_folder)
else:
#it's a file, but there should not be a file called lookups
lookup_folder.unlink()

# Make the new folder for the lookups
Expand All @@ -146,6 +147,8 @@ def writeObjects(self, objects: list, type: SecurityContentType = None) -> None:
if lookup_path.is_file():
lookup_target_path = self.output_path/"lookups"/lookup_path.name
shutil.copy(lookup_path, lookup_target_path)
else:
raise(Exception(f"Error copying lookup/mlmodel file. Path {lookup_path} does not exist or is not a file."))

elif type == SecurityContentType.macros:
ConfWriter.writeConfFile(self.output_path/'default/macros.conf',
Expand Down
45 changes: 42 additions & 3 deletions contentctl/output/finding_report_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,37 @@ class FindingReportObject():
@staticmethod
def writeFindingReport(detection : SSADetection) -> None:

if detection.tags.confidence < 33:
detection.tags.confidence_id = 1
elif detection.tags.confidence < 66:
detection.tags.confidence_id = 2
else:
detection.tags.confidence_id = 3

if detection.tags.impact < 20:
detection.tags.impact_id = 1
elif detection.tags.impact < 40:
detection.tags.impact_id = 2
elif detection.tags.impact < 60:
detection.tags.impact_id = 3
elif detection.tags.impact < 80:
detection.tags.impact_id = 4
else:
detection.tags.impact_id = 5

detection.tags.kill_chain_phases_id = dict()
for kill_chain_phase in detection.tags.kill_chain_phases:
detection.tags.kill_chain_phases_id[kill_chain_phase] = SES_KILL_CHAIN_MAPPINGS[kill_chain_phase]

kill_chain_phase_str = "["
i = 0
for kill_chain_phase in detection.tags.kill_chain_phases_id.keys():
kill_chain_phase_str = kill_chain_phase_str + '{"phase": "' + kill_chain_phase + '", "phase_id": ' + str(detection.tags.kill_chain_phases_id[kill_chain_phase]) + "}"
if not i == (len(detection.tags.kill_chain_phases_id.keys()) - 1):
kill_chain_phase_str = kill_chain_phase_str + ', '
i = i + 1
kill_chain_phase_str = kill_chain_phase_str + ']'
detection.tags.kill_chain_phases_str = kill_chain_phase_str

if detection.tags.risk_score < 20:
detection.tags.risk_level_id = 0
Expand All @@ -27,15 +58,23 @@ def writeFindingReport(detection : SSADetection) -> None:
detection.tags.risk_level_id = 4
detection.tags.risk_level = "Critical"

evidence_str = "create_map("
evidence_str = "{"
for i in range(len(detection.tags.observable)):
evidence_str = evidence_str + '"' + detection.tags.observable[i]["name"] + '", ' + detection.tags.observable[i]["name"].replace(".", "_")
evidence_str = evidence_str + '"' + detection.tags.observable[i]["name"] + '": ' + detection.tags.observable[i]["name"].replace(".", "_")
if not i == (len(detection.tags.observable) - 1):
evidence_str = evidence_str + ', '
evidence_str = evidence_str + ')'
evidence_str = evidence_str + '}'

detection.tags.evidence_str = evidence_str

analytics_story_str = "["
for i in range(len(detection.tags.analytic_story)):
analytics_story_str = analytics_story_str + '"' + detection.tags.analytic_story[i] + '"'
if not i == (len(detection.tags.analytic_story) - 1):
analytics_story_str = analytics_story_str + ', '
analytics_story_str = analytics_story_str + ']'
detection.tags.analytics_story_str = analytics_story_str

if "actor.user.name" in detection.tags.required_fields:
actor_user_name = "actor_user_name"
else:
Expand Down

0 comments on commit 00e9435

Please sign in to comment.