From 7e0e62c5fbb903e2738ca0f1c8d04c4ac157145e Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Wed, 22 Mar 2017 15:55:56 -0600 Subject: [PATCH 1/9] Add deb_packages query https://github.com/hubblestack/hubblestack_data/pull/10 --- hubblestack_nebula/hubblestack_nebula_queries.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hubblestack_nebula/hubblestack_nebula_queries.yaml b/hubblestack_nebula/hubblestack_nebula_queries.yaml index 4f2e950..efed2d6 100644 --- a/hubblestack_nebula/hubblestack_nebula_queries.yaml +++ b/hubblestack_nebula/hubblestack_nebula_queries.yaml @@ -11,6 +11,8 @@ hour: day: - query_name: rpm_packages query: SELECT rpm.name, rpm.version, rpm.release, rpm.source AS package_source, rpm.size, rpm.sha1, rpm.arch, t.iso_8601 FROM rpm_packages AS rpm JOIN time AS t; + - query_name: deb_packages + query: SELECT deb.name, deb.version, deb.revision, deb.source AS package_source, deb.size, deb.arch, t.iso_8601 FROM deb_packages AS deb JOIN time AS t; - query_name: os_info query: select * from os_version; - query_name: interface_addresses From 8c1af5dd283faf3f3973f5fa78ed58efc9c502cd Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 27 Mar 2017 14:52:10 -0600 Subject: [PATCH 2/9] Add win_pulsar as a module Since the FIM dependency is in the operating system itself and is completely independent of the running salt process, we can just run this as a module. The key is the run period in the scheduler must match the interval in the config. --- _modules/win_pulsar.py | 553 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 553 insertions(+) create mode 100644 _modules/win_pulsar.py diff --git a/_modules/win_pulsar.py b/_modules/win_pulsar.py new file mode 100644 index 0000000..9b8e9b9 --- /dev/null +++ b/_modules/win_pulsar.py @@ -0,0 +1,553 @@ +#win_notify +''' +This will setup your computer to enable auditing for specified folders inputted into a yaml file. It will +then scan the event log for changes to those folders and report when it finds one. +''' + + +from __future__ import absolute_import + +import collections +import datetime +import fnmatch +import logging +import os +import glob +import yaml +import re + +import salt.ext.six +import salt.loader + +log = logging.getLogger(__name__) +DEFAULT_MASK = ['ExecuteFile', 'Write', 'Delete', 'DeleteSubdirectoriesAndFiles', 'ChangePermissions', + 'TakeOwnership'] #ExecuteFile Is really chatty +DEFAULT_TYPE = 'all' + +__virtualname__ = 'pulsar' +CONFIG = None +CONFIG_STALENESS = 0 + + +def __virtual__(): + if not salt.utils.is_windows(): + return False, 'This module only works on windows' + return __virtualname__ + + +def process(configfile='salt://hubblestack_pulsar/hubblestack_pulsar_config.yaml', + verbose=False): + ''' + Watch the configured files + + Example yaml config on fileserver (targeted by configfile option) + + .. code-block:: yaml + + C:\Users: {} + C:\Windows: + mask: + - Write + - Delete + - DeleteSubdirectoriesAndFiles + - ChangePermissions + - TakeOwnership + exclude: + - C:\Windows\System32 + C:\temp: {} + win_notify_interval: 30 # MUST be the same as interval in schedule + return: splunk_pulsar_return + batch: True + + Note that if `batch: True`, the configured returner must support receiving + a list of events, rather than single one-off events. + + The mask list can contain the following events (the default mask is create, delete, and modify): + + 1. ExecuteFile - Traverse folder / execute file + 2. ReadData - List folder / read data + 3. ReadAttributes - Read attributes of object + 4. ReadExtendedAttributes - Read extended attributes of object + 5. CreateFiles - Create files / write data + 6. AppendData - Create folders / append data + 7. WriteAttributes - Write attributes of object + 8. WriteExtendedAttributes - Write extended attributes of object + 9. DeleteSubdirectoriesAndFiles - Delete subfolders and files + 10. Delete - Delete an object + 11. ReadPermissions - Read Permissions of an object + 12. ChangePermissions - Change permissions of an object + 13. TakeOwnership - Take ownership of an object + 14. Write - Combination of 5, 6, 7, 8 + 15. Read - Combination of 2, 3, 4, 11 + 16. ReadAndExecute - Combination of 1, 2, 3, 4, 11 + 17. Modify - Combination of 1, 2, 3, 4, 5, 6, 7, 8, 10, 11 + + *If you want to monitor everything (A.K.A. Full Control) then you want options 9, 12, 13, 17 + + wtype: + Type of Audit to watch for: + 1. Success - Only report successful attempts + 2. Fail - Only report failed attempts + 3. All - Report both Success and Fail + exclude: + Exclude directories or files from triggering events in the watched directory. + Note that directory excludes should *not* have a trailing slash. + + :return: + ''' + config = __opts__.get('pulsar', {}) + if isinstance(configfile, list): + config['paths'] = configfile + else: + config['paths'] = [configfile] + config['verbose'] = verbose + global CONFIG_STALENESS + global CONFIG + if config.get('verbose'): + log.debug('Pulsar module called.') + log.debug('Pulsar module config from pillar:\n{0}'.format(config)) + ret = [] + sys_check = 0 + + # Get config(s) from filesystem if we don't have them already + update_acls= False + if CONFIG and CONFIG_STALENESS < config.get('refresh_frequency', 60): + CONFIG_STALENESS += 1 + CONFIG.update(config) + CONFIG['verbose'] = config.get('verbose') + config = CONFIG + else: + if config.get('verbose'): + log.debug('No cached config found for pulsar, retrieving fresh from fileserver.') + new_config = config + if isinstance(config.get('paths'), list): + for path in config['paths']: + if 'salt://' in path: + path = __salt__['cp.cache_file'](path) + if os.path.isfile(path): + with open(path, 'r') as f: + new_config = _dict_update(new_config, + yaml.safe_load(f), + recursive_update=True, + merge_lists=True) + else: + log.error('Path {0} does not exist or is not a file'.format(path)) + else: + log.error('Pulsar beacon \'paths\' data improperly formatted. Should be list of paths') + update_acls = True + + new_config.update(config) + config = new_config + CONFIG_STALENESS = 0 + CONFIG = config + + if config.get('verbose'): + log.debug('Pulsar beacon config (compiled from config list):\n{0}'.format(config)) + + # Validate Global Auditing with Auditpol + global_check = __salt__['cmd.run']('auditpol /get /category:"Object Access" /r | find "File System"', + python_shell=True) + if global_check: + if not 'Success and Failure' in global_check: + __salt__['cmd.run']('auditpol /set /subcategory:"file system" /success:enable /failure:enable', + python_shell=True) + sys_check = 1 + + # Validate ACLs on watched folders/files and add if needed + if update_acls: + for path in config: + if path == 'win_notify_interval' or path == 'return' or path == 'batch' or path == 'checksum' or path == 'stats': + continue + if not os.path.exists(path): + continue + if isinstance(config[path], dict): + mask = config[path].get('mask', DEFAULT_MASK) + wtype = config[path].get('wtype', DEFAULT_TYPE) + recurse = config[path].get('recurse', True) + if isinstance(mask, list) and isinstance(wtype, str) and isinstance(recurse, bool): + success = _check_acl(path, mask, wtype, recurse) + if not success: + confirm = _add_acl(path, mask, wtype, recurse) + sys_check = 1 + if config[path].get('exclude', False): + for exclude in config[path]['exclude']: + if not isinstance(exclude, str): + continue + if '*' in exclude: + for wildcard_exclude in glob.iglob(exclude): + _remove_acl(wildcard_exclude) + else: + _remove_acl(exclude) + + # Read in events since last call. Time_frame in minutes + ret = _pull_events(config['win_notify_interval'], config.get('checksum', 'sha256')) + if sys_check == 1: + log.error('The ACLs were not setup correctly, or global auditing is not enabled. This could have ' + 'been remedied, but GP might need to be changed') + + if __salt__['config.get']('hubblestack:pulsar:maintenance', False): + # We're in maintenance mode, throw away findings + ret = [] + + # Handle excludes + new_ret = [] + for r in ret: + _append = True + config_found = False + for path in config: + if not r['Object Name'].startswith(path): + continue + config_found = True + if isinstance(config[path], dict) and 'exclude' in config[path]: + for exclude in config[path]['exclude']: + if isinstance(exclude, dict) and exclude.values()[0].get('regex', False): + if re.search(exclude.keys()[0], r['Object Name']): + _append = False + else: + if fnmatch.fnmatch(r['Object Name'], exclude): + _append = False + elif r['Object Name'].startswith(exclude): + # Startswith is well and good, but it needs to be a parent directory or it doesn't count + _, _, leftover = r['Object Name'].partition(exclude) + if leftover.startswith(os.sep): + _append = False + if _append and config_found: + new_ret.append(r) + ret = new_ret + + return ret + + +def _return(args, returner): + __returners__ = salt.loader.returners(__opts__, __salt__) + __returners__[returner](*args) + + +def _check_acl(path, mask, wtype, recurse): + audit_dict = {} + success = True + if 'all' in wtype.lower(): + wtype = ['Success', 'Failure'] + else: + wtype = [wtype] + + audit_acl = __salt__['cmd.run']('(Get-Acl {0} -Audit).Audit | fl'.format(path), shell='powershell', + python_shell=True) + if not audit_acl: + success = False + return success + audit_acl = audit_acl.replace('\r','').split('\n') + newlines= [] + count = 0 + for line in audit_acl: + if ':' not in line and count > 0: + newlines[count-1] += line.strip() + else: + newlines.append(line) + count += 1 + for line in newlines: + if line: + if ':' in line: + d = line.split(':') + audit_dict[d[0].strip()] = d[1].strip() + for item in mask: + if item not in audit_dict['FileSystemRights']: + success = False + for item in wtype: + if item not in audit_dict['AuditFlags']: + success = False + if 'Everyone' not in audit_dict['IdentityReference']: + success = False + if recurse: + if 'ContainerInherit' and 'ObjectInherit' not in audit_dict['InheritanceFlags']: + success = False + else: + if 'None' not in audit_dict['InheritanceFlags']: + success = False + if 'None' not in audit_dict['PropagationFlags']: + success = False + return success + + +def _add_acl(path, mask, wtype, recurse): + ''' + This will apply the needed audit ALC to the folder in question using PowerShells access to the .net library and + WMI with the code below: + $path = "C:\Path\here" + $path = path.replace("\","\\") + $user = "Everyone" + + $SD = ([WMIClass] "Win32_SecurityDescriptor").CreateInstance() + $Trustee = ([WMIClass] "Win32_Trustee").CreateInstance() + + # One for Success and other for Failure events + $ace1 = ([WMIClass] "Win32_ace").CreateInstance() + $ace2 = ([WMIClass] "Win32_ace").CreateInstance() + + $SID = (new-object security.principal.ntaccount $user).translate([security.principal.securityidentifier]) + + [byte[]] $SIDArray = ,0 * $SID.BinaryLength + $SID.GetBinaryForm($SIDArray,0) + + $Trustee.Name = $user + $Trustee.SID = $SIDArray + + # Auditing + $ace2.AccessMask = 2032127 # [System.Security.AccessControl.FileSystemRights]::FullControl + $ace2.AceFlags = 131 # FAILED_ACCESS_ACE_FLAG (128), CONTAINER_INHERIT_ACE (2), OBJECT_INHERIT_ACE (1) + $ace2.AceType =2 # Audit + $ace2.Trustee = $Trustee + + $SD.SACL += $ace1.psobject.baseobject + $SD.SACL += $ace2.psobject.baseobject + $SD.ControlFlags=16 + $wPrivilege = Get-WmiObject Win32_LogicalFileSecuritySetting -filter "path='$path'" -EnableAllPrivileges + $wPrivilege.setsecuritydescriptor($SD) + + The ACE accessmask map key is below: + + 1. ReadData - 1 + 2. CreateFiles - 2 + 3. AppendData - 4 + 4. ReadExtendedAttributes - 8 + 5. WriteExtendedAttributes - 16 + 6. ExecuteFile - 32 + 7. DeleteSubdirectoriesAndFiles - 64 + 8. ReadAttributes - 128 + 9. WriteAttributes - 256 + 10. Write - 278 (Combo of CreateFiles, AppendData, WriteAttributes, WriteExtendedAttributes) + 11. Delete - 65536 + 12. ReadPermissions - 131072 + 13. ChangePermissions - 262144 + 14. TakeOwnership - 524288 + 15. Read - 131209 (Combo of ReadData, ReadAttributes, ReadExtendedAttributes, ReadPermissions) + 16. ReadAndExecute - 131241 (Combo of ExecuteFile, ReadData, ReadAttributes, ReadExtendedAttributes, + ReadPermissions) + 17. Modify - 197055 (Combo of ExecuteFile, ReadData, ReadAttributes, ReadExtendedAttributes, + CreateFiles, AppendData, WriteAttributes, WriteExtendedAttributes, + Delete, ReadPermissions) + The Ace flags map key is below: + 1. ObjectInherit - 1 + 2. ContainerInherit - 2 + 3. NoPorpagateInherit - 4 + 4. SuccessfulAccess - 64 (Used with System-audit to generate audit messages for successful access + attempts) + 5. FailedAccess - 128 (Used with System-audit to generate audit messages for Failed access attempts) + + The Ace type map key is below: + 1. Access Allowed - 0 + 2. Access Denied - 1 + 3. Audit - 2 + + If you want multiple values you just add them together to get a desired outcome: + ACCESSMASK of file_add_file, file_add_subdirectory, delete, file_delete_child, write_dac, write_owner: + 852038 = 2 + 4 + 65536 + 64 + 262144i + + FLAGS of ObjectInherit, ContainerInherit, SuccessfullAccess, FailedAccess: + 195 = 1 + 2 + 64 + 128 + + This calls The function _get_ace_translation() to return the number it needs to set. + :return: + ''' + path = path.replace('\\','\\\\') + audit_user = 'Everyone' + audit_rules = ','.join(mask) + if recurse: + inherit_type = 'ContainerInherit,ObjectInherit' + if 'all' in wtype: + audit_type = 'Success,Failure' + else: + audit_type = wtype + + access_mask = _get_ace_translation(audit_rules) + flags = _get_ace_translation(inherit_type, audit_type) + + __salt__['cmd.run']('$SD = ([WMIClass] "Win32_SecurityDescriptor").CreateInstance();' + '$Trustee = ([WMIClass] "Win32_Trustee").CreateInstance();' + '$ace = ([WMIClass] "Win32_ace").CreateInstance();' + '$SID = (new-object System.Security.Principal.NTAccount {0}).translate([security.principal.securityidentifier]);' + '[byte[]] $SIDArray = ,0 * $SID.BinaryLength;' + '$SID.GetBinaryForm($SIDArray,0);' + '$Trustee.Name = "{0}";' + '$Trustee.SID = $SIDArray;' + '$ace.AccessMask = {1};' + '$ace.AceFlags = {2};' + '$ace.AceType = 2;' + '$ace.Trustee = $Trustee;' + '$SD.SACL += $ace.psobject.baseobject;' + '$SD.ControlFlags=16;' + '$wPrivilege = Get-WmiObject Win32_LogicalFileSecuritySetting -filter "path=\'{3}\'" -EnableAllPrivileges;' + '$wPrivilege.setsecuritydescriptor($SD)'.format(audit_user, access_mask, flags, path), + shell='powershell', python_shell=True) + return 'ACL set up for {0} - with {1} user, {2} access mask, {3} flags'.format(path, audit_user, access_mask, flags) + + +def _remove_acl(path): + ''' + This will remove a currently configured ACL on the folder submited as item. This will be needed when you have + a sub file or folder that you want to explicitly ignore within a folder being monitored. You need to pass in the + full folder path name for this to work properly + :param item: + :return: + ''' + path = path.replace('\\','\\\\') + __salt__['cmd.run']('$SD = ([WMIClass] "Win32_SecurityDescriptor").CreateInstance();' + '$SD.ControlFlags=16;' + '$wPrivilege = Get-WmiObject Win32_LogicalFileSecuritySetting -filter "path=\'{0}\'" -EnableAllPrivileges;' + '$wPrivilege.setsecuritydescriptor($SD)'.format(path), shell='powershell', python_shell=True) + + + +def _pull_events(time_frame, checksum): + events_list = [] + events_output = __salt__['cmd.run_stdout']('mode con:cols=1000 lines=1000; Get-EventLog -LogName Security ' + '-After ((Get-Date).AddSeconds(-{0})) -InstanceId 4663 | fl'.format( + time_frame), shell='powershell', python_shell=True) + events = events_output.split('\r\n\r\n') + for event in events: + if event: + event_dict = {} + items = event.split('\r\n') + for item in items: + if ':' in item: + item.replace('\t', '') + k, v = item.split(':', 1) + event_dict[k.strip()] = v.strip() + event_dict['Accesses'] = _get_access_translation(event_dict['Accesses']) + event_dict['Hash'] = _get_item_hash(event_dict['Object Name'], checksum) + #needs hostname, checksum, filepath, time stamp, action taken + # Generate the dictionary without a dictionary comp, for py2.6 + tmpdict = {} + for k in ('EntryType', 'Accesses', 'TimeGenerated', 'Object Name', 'Hash'): + tmpdict[k] = event_dict[k] + events_list.append(tmpdict) + return events_list + + +def _get_ace_translation(value, *args): + ''' + This will take the ace name and return the total number accosciated to all the ace accessmasks and flags + Below you will find all the names accosiated to the numbers: + + ''' + ret = 0 + ace_dict = {'ReadData': 1, 'CreateFiles': 2, 'AppendData': 4, 'ReadExtendedAttributes': 8, + 'WriteExtendedAttributes': 16, 'ExecuteFile': 32, 'DeleteSubdirectoriesAndFiles': 64, + 'ReadAttributes': 128, 'WriteAttributes': 256, 'Write': 278, 'Delete': 65536, 'ReadPermissions': 131072, + 'ChangePermissions': 262144, 'TakeOwnership': 524288, 'Read': 131209, 'ReadAndExecute': 131241, + 'Modify': 197055, 'ObjectInherit': 1, 'ContainerInherit': 2, 'NoPropagateInherit': 4, 'Success': 64, + 'Failure': 128} + aces = value.split(',') + for arg in args: + aces.extend(arg.split(',')) + + for ace in aces: + if ace in ace_dict: + ret += ace_dict[ace] + return ret + + +def _get_access_translation(access): + ''' + This will take the access number within the event, and return back a meaningful translation. + These are all the translations of accesses: + 1537 DELETE - used to grant or deny delete access. + 1538 READ_CONTROL - used to grant or deny read access to the security descriptor and owner. + 1539 WRITE_DAC - used to grant or deny write access to the discretionary ACL. + 1540 WRITE_OWNER - used to assign a write owner. + 1541 SYNCHRONIZE - used to synchronize access and to allow a process to wait for an object to enter the signaled state. + 1542 ACCESS_SYS_SEC + 4416 ReadData + 4417 WriteData + 4418 AppendData + 4419 ReadEA (Extended Attribute) + 4420 WriteEA (Extended Attribute) + 4421 Execute/Traverse + 4423 ReadAttributes + 4424 WriteAttributes + 4432 Query Key Value + 4433 Set Key Value + 4434 Create Sub Key + 4435 Enumerate sub-keys + 4436 Notify about changes to keys + 4437 Create Link + 6931 Print + :param access: + :return access_return: + ''' + access_dict = {'1537': 'Delete', '1538': 'Read Control', '1539': 'Write DAC', '1540': 'Write Owner', + '1541': 'Synchronize', '1542': 'Access Sys Sec', '4416': 'Read Data', '4417': 'Write Data', + '4418': 'Append Data', '4419': 'Read EA', '4420': 'Write EA', '4421': 'Execute/Traverse', + '4423': 'Read Attributes', '4424': 'Write Attributes', '4432': 'Query Key Value', + '4433': 'Set Key Value', '4434': 'Create Sub Key', '4435': 'Enumerate Sub-Keys', + '4436': 'Notify About Changes to Keys', '4437': 'Create Link', '6931': 'Print', } + + access = access.replace('%%', '').strip() + ret_str = access_dict.get(access, False) + if ret_str: + return ret_str + else: + return 'Access number {0} is not a recognized access code.'.format(access) + + +def _get_item_hash(item, checksum): + item = item.replace('\\\\','\\') + test = os.path.isfile(item) + if os.path.isfile(item): + try: + hashy = __salt__['file.get_hash']('{0}'.format(item), form=checksum) + return hashy + except: + return '' + else: + return 'Item is a directory' + + +def _dict_update(dest, upd, recursive_update=True, merge_lists=False): + ''' + Recursive version of the default dict.update + + Merges upd recursively into dest + + If recursive_update=False, will use the classic dict.update, or fall back + on a manual merge (helpful for non-dict types like FunctionWrapper) + + If merge_lists=True, will aggregate list object types instead of replace. + This behavior is only activated when recursive_update=True. By default + merge_lists=False. + ''' + if (not isinstance(dest, collections.Mapping)) \ + or (not isinstance(upd, collections.Mapping)): + raise TypeError('Cannot update using non-dict types in dictupdate.update()') + updkeys = list(upd.keys()) + if not set(list(dest.keys())) & set(updkeys): + recursive_update = False + if recursive_update: + for key in updkeys: + val = upd[key] + try: + dest_subkey = dest.get(key, None) + except AttributeError: + dest_subkey = None + if isinstance(dest_subkey, collections.Mapping) \ + and isinstance(val, collections.Mapping): + ret = update(dest_subkey, val, merge_lists=merge_lists) + dest[key] = ret + elif isinstance(dest_subkey, list) \ + and isinstance(val, list): + if merge_lists: + dest[key] = dest.get(key, []) + val + else: + dest[key] = upd[key] + else: + dest[key] = upd[key] + return dest + else: + try: + for k in upd.keys(): + dest[k] = upd[k] + except AttributeError: + # this mapping is not a dict + for k in upd: + dest[k] = upd[k] + return dest From 2ceb882163a606430d7aa5308d4cc768d8e51b67 Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 27 Mar 2017 14:57:43 -0600 Subject: [PATCH 3/9] Fix pillar destination --- _modules/win_pulsar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_modules/win_pulsar.py b/_modules/win_pulsar.py index 9b8e9b9..fac85ff 100644 --- a/_modules/win_pulsar.py +++ b/_modules/win_pulsar.py @@ -95,7 +95,7 @@ def process(configfile='salt://hubblestack_pulsar/hubblestack_pulsar_config.yaml :return: ''' - config = __opts__.get('pulsar', {}) + config = __opts__.get('hubblestack_pulsar', {}) if isinstance(configfile, list): config['paths'] = configfile else: From 40718d900615f13d650178c226792137865118d2 Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 27 Mar 2017 14:58:55 -0600 Subject: [PATCH 4/9] Actually search pillar --- _modules/win_pulsar.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/_modules/win_pulsar.py b/_modules/win_pulsar.py index fac85ff..c9c0c08 100644 --- a/_modules/win_pulsar.py +++ b/_modules/win_pulsar.py @@ -95,7 +95,7 @@ def process(configfile='salt://hubblestack_pulsar/hubblestack_pulsar_config.yaml :return: ''' - config = __opts__.get('hubblestack_pulsar', {}) + config = __salt__['config.get']('hubblestack_pulsar', {}) if isinstance(configfile, list): config['paths'] = configfile else: @@ -218,11 +218,6 @@ def process(configfile='salt://hubblestack_pulsar/hubblestack_pulsar_config.yaml return ret -def _return(args, returner): - __returners__ = salt.loader.returners(__opts__, __salt__) - __returners__[returner](*args) - - def _check_acl(path, mask, wtype, recurse): audit_dict = {} success = True From dd27a0ba6c3c7643a5c476607cfa64375337f07d Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 27 Mar 2017 15:02:56 -0600 Subject: [PATCH 5/9] Add version to win_pulsar.py --- _modules/win_pulsar.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/_modules/win_pulsar.py b/_modules/win_pulsar.py index c9c0c08..922c759 100644 --- a/_modules/win_pulsar.py +++ b/_modules/win_pulsar.py @@ -28,6 +28,8 @@ CONFIG = None CONFIG_STALENESS = 0 +__version__ = 'v2017.3.2' + def __virtual__(): if not salt.utils.is_windows(): From 56f0fcf0123cb7044bafe2a1cc4a26c396ef96c2 Mon Sep 17 00:00:00 2001 From: Chandler Newby Date: Wed, 29 Mar 2017 16:49:36 -0600 Subject: [PATCH 6/9] Extract time from events that contain them Only accept times within the past year. If something that isn't an epoch time gets into the time field, it should just be ignored. --- _returners/splunk_nebula_return.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/_returners/splunk_nebula_return.py b/_returners/splunk_nebula_return.py index 1e69983..303096d 100644 --- a/_returners/splunk_nebula_return.py +++ b/_returners/splunk_nebula_return.py @@ -46,6 +46,7 @@ import requests import json import time +from datetime import datetime import logging @@ -125,7 +126,17 @@ def returner(ret): payload.update({'index': opts['index']}) payload.update({'sourcetype': opts['sourcetype']}) payload.update({'event': event}) - hec.batchEvent(payload) + + # If the osquery query includes a field called 'time' it will be checked. + # If it's within the last year, it will be used as the eventtime. + event_time = query_result.get('time', '') + try: + if (datetime.fromtimestamp(time.time()) - datetime.fromtimestamp(float(event_time))).days > 365: + event_time = '' + except: + event_time = '' + finally: + hec.batchEvent(payload, eventtime=event_time) hec.flushBatch() return From cc58f9630965103e0a0d2f818f4e30787924c56e Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 3 Apr 2017 10:41:37 -0600 Subject: [PATCH 7/9] Whoops (fix pulsar loading) --- _beacons/pulsar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_beacons/pulsar.py b/_beacons/pulsar.py index 065ca7e..f051c95 100644 --- a/_beacons/pulsar.py +++ b/_beacons/pulsar.py @@ -50,7 +50,7 @@ def __virtual__(): if salt.utils.is_windows(): return False, 'This module only works on Linux' - return False + return True def _get_mask(mask): From ed6aaac2f8b99b97db5a07276648c71fa125cd69 Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 3 Apr 2017 10:45:22 -0600 Subject: [PATCH 8/9] Rev to v2017.4.1 --- README.md | 2 +- _beacons/pulsar.py | 2 +- _beacons/win_pulsar.py | 2 +- _modules/hubble.py | 2 +- _modules/nebula_osquery.py | 2 +- _modules/win_pulsar.py | 2 +- _returners/slack_pulsar_returner.py | 2 +- _returners/splunk_nebula_return.py | 2 +- _returners/splunk_nova_return.py | 2 +- _returners/splunk_pulsar_return.py | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 772130e..cce5a7e 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ fileserver_backend: - git gitfs_remotes: - https://github.com/hubblestack/hubble-salt.git: - - base: v2017.3.2 + - base: v2017.4.1 - root: '' ``` diff --git a/_beacons/pulsar.py b/_beacons/pulsar.py index f051c95..9d9954d 100644 --- a/_beacons/pulsar.py +++ b/_beacons/pulsar.py @@ -39,7 +39,7 @@ DEFAULT_MASK = None __virtualname__ = 'pulsar' -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' CONFIG = None CONFIG_STALENESS = 0 diff --git a/_beacons/win_pulsar.py b/_beacons/win_pulsar.py index fad8cf2..73b11e4 100644 --- a/_beacons/win_pulsar.py +++ b/_beacons/win_pulsar.py @@ -26,7 +26,7 @@ DEFAULT_TYPE = 'all' __virtualname__ = 'pulsar' -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' CONFIG = None CONFIG_STALENESS = 0 diff --git a/_modules/hubble.py b/_modules/hubble.py index 6bd6885..08d88a4 100644 --- a/_modules/hubble.py +++ b/_modules/hubble.py @@ -35,7 +35,7 @@ from salt.loader import LazyLoader __nova__ = {} -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' def audit(configs=None, diff --git a/_modules/nebula_osquery.py b/_modules/nebula_osquery.py index de291b2..999ba10 100644 --- a/_modules/nebula_osquery.py +++ b/_modules/nebula_osquery.py @@ -40,7 +40,7 @@ log = logging.getLogger(__name__) -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' __virtualname__ = 'nebula' diff --git a/_modules/win_pulsar.py b/_modules/win_pulsar.py index 922c759..f567404 100644 --- a/_modules/win_pulsar.py +++ b/_modules/win_pulsar.py @@ -28,7 +28,7 @@ CONFIG = None CONFIG_STALENESS = 0 -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' def __virtual__(): diff --git a/_returners/slack_pulsar_returner.py b/_returners/slack_pulsar_returner.py index 545564f..8647366 100644 --- a/_returners/slack_pulsar_returner.py +++ b/_returners/slack_pulsar_returner.py @@ -69,7 +69,7 @@ # Import Salt Libs import salt.returners -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' log = logging.getLogger(__name__) diff --git a/_returners/splunk_nebula_return.py b/_returners/splunk_nebula_return.py index 303096d..1876b9a 100644 --- a/_returners/splunk_nebula_return.py +++ b/_returners/splunk_nebula_return.py @@ -50,7 +50,7 @@ import logging -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' _max_content_bytes = 100000 http_event_collector_SSL_verify = False diff --git a/_returners/splunk_nova_return.py b/_returners/splunk_nova_return.py index bf104ec..3104787 100644 --- a/_returners/splunk_nova_return.py +++ b/_returners/splunk_nova_return.py @@ -49,7 +49,7 @@ import logging -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' _max_content_bytes = 100000 http_event_collector_SSL_verify = False diff --git a/_returners/splunk_pulsar_return.py b/_returners/splunk_pulsar_return.py index 7987cf8..7f4ff3c 100644 --- a/_returners/splunk_pulsar_return.py +++ b/_returners/splunk_pulsar_return.py @@ -52,7 +52,7 @@ import logging -__version__ = 'v2017.3.2' +__version__ = 'v2017.4.1' _max_content_bytes = 100000 http_event_collector_SSL_verify = False From b5cb9308895d5b9660caf6692c60c83b2bfa94fc Mon Sep 17 00:00:00 2001 From: Colton Myers Date: Mon, 3 Apr 2017 10:47:40 -0600 Subject: [PATCH 9/9] Add latest queries from upstream hubblestack_data --- hubblestack_nebula/hubblestack_nebula_queries.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/hubblestack_nebula/hubblestack_nebula_queries.yaml b/hubblestack_nebula/hubblestack_nebula_queries.yaml index efed2d6..cd08da3 100644 --- a/hubblestack_nebula/hubblestack_nebula_queries.yaml +++ b/hubblestack_nebula/hubblestack_nebula_queries.yaml @@ -4,7 +4,9 @@ fifteen_min: - query_name: established_outbound query: SELECT t.iso_8601 AS _time, pos.family, h.*, ltrim(pos.local_address, ':f') AS src_connection_ip, pos.local_port AS src_connection_port, pos.remote_port AS dest_connection_port, ltrim(remote_address, ':f') AS dest_connection_ip, name, p.path AS file_path, cmdline, pos.protocol FROM process_open_sockets AS pos JOIN processes AS p ON p.pid=pos.pid LEFT JOIN time AS t LEFT JOIN (SELECT * FROM listening_ports) AS lp ON lp.port=pos.local_port AND lp.protocol=pos.protocol LEFT JOIN hash AS h ON h.path=p.path WHERE NOT remote_address='' AND NOT remote_address='::' AND NOT remote_address='0.0.0.0' AND NOT remote_address='127.0.0.1' AND port is NULL; - query_name: listening_procs - query: SELECT t.iso_8601 AS _time, h.md5 AS md5, p.pid AS process_id, name AS process, ltrim(address, ':f') AS address, port AS dest_port, p.path AS file_path, cmdline, p.on_disk, root, parent, CASE lp.protocol WHEN 6 THEN 'tcp' WHEN 17 THEN 'udp' ELSE lp.protocol END as transport FROM listening_ports AS lp LEFT JOIN processes AS p ON lp.pid=p.pid LEFT JOIN time AS t LEFT JOIN hash AS h ON h.path=p.path; + query: SELECT t.iso_8601 AS _time, h.md5 AS md5, p.pid AS process_id, name AS process, ltrim(address, ':f') AS address, port AS dest_port, p.path AS file_path, cmdline, p.on_disk, root, parent, CASE lp.protocol WHEN 6 THEN 'tcp' WHEN 17 THEN 'udp' ELSE lp.protocol END as transport FROM listening_ports AS lp LEFT JOIN processes AS p ON lp.pid=p.pid LEFT JOIN time AS t LEFT JOIN hash AS h ON h.path=p.path; + - query_name: shell_history + query: SELECT uid, gid, username, groupname, command, time, history_file FROM users JOIN groups USING (gid) JOIN shell_history USING (uid) WHERE time > strftime('%s', 'now', '-15 minutes'); hour: - query_name: crontab query: SELECT c.*,t.iso_8601 AS _time FROM crontab AS c JOIN time AS t; @@ -21,3 +23,5 @@ day: query: SELECT total_seconds AS uptime FROM uptime; - query_name: suid_binaries query: SELECT sb.*, t.iso_8601 AS _time, h.sha1, h.sha256 FROM suid_bin AS sb JOIN time AS t LEFT JOIN hash AS h ON sb.path=h.path; + - query_name: ssh_key_files + query: SELECT u.username AS user, usk.uid, g.groupname AS groupname, f.gid, usk.path AS file_path, usk.encrypted, f.mode AS file_acl, f.device, f.size AS file_size, f.atime AS file_access_time, f.mtime AS file_modify_time, f.ctime AS file_change_time, h.md5, h.sha1, h.sha256 FROM user_ssh_keys AS usk LEFT JOIN hash AS h ON h.path=usk.path LEFT JOIN file AS f ON f.path=usk.path LEFT JOIN users AS u ON u.uid=usk.uid LEFT JOIN groups AS g ON g.gid=f.gid ;