Commit 24bd9724 authored by Ad Schellevis's avatar Ad Schellevis

(ids) python style fixes (non functional)

parent b870cd79
......@@ -37,9 +37,9 @@ from lib import rule_source_directory
if __name__ == '__main__':
RuleCache = lib.rulecache.RuleCache()
rule_config_fn = ('%s../rules.config'%rule_source_directory)
rule_target_dir = ('%s../opnsense.rules'%rule_source_directory)
rule_yaml_list = ('%s../installed_rules.yaml'%rule_source_directory)
rule_config_fn = ('%s../rules.config' % rule_source_directory)
rule_target_dir = ('%s../opnsense.rules' % rule_source_directory)
rule_yaml_list = ('%s../installed_rules.yaml' % rule_source_directory)
# parse OPNsense rule config
rule_updates = {}
......@@ -59,9 +59,9 @@ if __name__ == '__main__':
# install ruleset
all_installed_files = []
for filename in RuleCache.listLocal():
for filename in RuleCache.list_local():
output_data = []
for rule_info_record in RuleCache.listRules(filename=filename):
for rule_info_record in RuleCache.list_rules(filename=filename):
# default behavior, do not touch rule, only copy to output
rule = rule_info_record['rule']
# change rule if in rule rule updates
......@@ -75,7 +75,7 @@ if __name__ == '__main__':
# generate altered rule
if 'enabled' in rule_updates[rule_info_record['metadata']['sid']]:
if (rule_updates[rule_info_record['metadata']['sid']]['enabled']) == '0':
rule = ('#%s'%rule[i:])
rule = ('#%s' % rule[i:])
else:
rule = rule[i:]
......@@ -83,12 +83,12 @@ if __name__ == '__main__':
# write data to file
all_installed_files.append(filename.split('/')[-1])
open('%s/%s'%(rule_target_dir, filename.split('/')[-1]), 'wb').write('\n'.join(output_data))
open('%s/%s' % (rule_target_dir, filename.split('/')[-1]), 'wb').write('\n'.join(output_data))
# flush all written rule filenames into yaml file
with open(rule_yaml_list,'wb') as f_out:
with open(rule_yaml_list, 'wb') as f_out:
f_out.write('%YAML 1.1\n')
f_out.write('---\n')
f_out.write('rule-files:\n')
for installed_file in all_installed_files:
f_out.write(' - %s\n'%installed_file)
f_out.write(' - %s\n' % installed_file)
......@@ -25,5 +25,5 @@
"""
# define paths used by suricata
rule_source_directory='/usr/local/etc/suricata/rules/'
suricata_alert_log='/var/log/suricata/eve.json'
rule_source_directory = '/usr/local/etc/suricata/rules/'
suricata_alert_log = '/var/log/suricata/eve.json'
......@@ -31,24 +31,25 @@
import syslog
import requests
class Downloader(object):
def __init__(self, target_dir):
self._target_dir = target_dir
def download(self, proto, url):
if proto in ('http','https'):
frm_url = url.replace('//','/').replace(':/','://')
if proto in ('http', 'https'):
frm_url = url.replace('//', '/').replace(':/', '://')
req = requests.get(url=frm_url)
if req.status_code == 200:
target_filename = ('%s/%s'%(self._target_dir, frm_url.split('/')[-1])).replace('//','/')
target_filename = ('%s/%s' % (self._target_dir, frm_url.split('/')[-1])).replace('//', '/')
try:
open(target_filename,'wb').write(req.text)
open(target_filename, 'wb').write(req.text)
except IOError:
syslog.syslog(syslog.LOG_ERR, 'cannot write to %s'%(target_filename))
syslog.syslog(syslog.LOG_ERR, 'cannot write to %s' % target_filename)
return None
syslog.syslog(syslog.LOG_INFO, 'download completed for %s'%(frm_url))
syslog.syslog(syslog.LOG_INFO, 'download completed for %s' % frm_url)
else:
syslog.syslog(syslog.LOG_ERR, 'download failed for %s'%(frm_url))
syslog.syslog(syslog.LOG_ERR, 'download failed for %s' % frm_url)
@staticmethod
def is_supported(proto):
......@@ -56,7 +57,7 @@ class Downloader(object):
:param proto:
:return:
"""
if proto in ['http','https']:
if proto in ['http', 'https']:
return True
else:
return False
......@@ -26,14 +26,15 @@
import os
def reverse_log_reader(filename, block_size = 8192, start_pos=None):
def reverse_log_reader(filename, block_size=8192, start_pos=None):
""" read log file in reverse order
:param filename: filename to parse
:param block_size: max block size to examine per loop
:param start_pos: start at position in file (None is end of file)
:return: generator
"""
with open(filename,'rU') as f_in:
with open(filename, 'rU') as f_in:
if start_pos is None:
f_in.seek(0, os.SEEK_END)
file_byte_start = f_in.tell()
......@@ -42,7 +43,7 @@ def reverse_log_reader(filename, block_size = 8192, start_pos=None):
data = ''
while True:
if file_byte_start-block_size < 0:
if file_byte_start - block_size < 0:
block_size = file_byte_start
file_byte_start = 0
else:
......@@ -59,10 +60,10 @@ def reverse_log_reader(filename, block_size = 8192, start_pos=None):
data = data[:eol]
eol = data.rfind('\n')
# field line and position in file
yield {'line':line.strip(),'pos':line_end}
yield {'line': line.strip(), 'pos': line_end}
if file_byte_start == 0 and eol == -1:
# flush last line
yield {'line':data.strip(),'pos':len(data)}
yield {'line': data.strip(), 'pos': len(data)}
if file_byte_start == 0:
break
......@@ -33,30 +33,31 @@ import syslog
import glob
import xml.etree.ElementTree
class Metadata(object):
def __init__(self):
self._rules_dir = '%s/../metadata/rules/'%(os.path.dirname(os.path.abspath(__file__)))
self._rules_dir = '%s/../metadata/rules/' % (os.path.dirname(os.path.abspath(__file__)))
def list_rules(self):
""" list all available rules
:return: generator method returning all known rulefiles
"""
for filename in sorted(glob.glob('%s*.xml'%self._rules_dir)):
for filename in sorted(glob.glob('%s*.xml' % self._rules_dir)):
try:
ruleXML=xml.etree.ElementTree.fromstring(open(filename).read())
rule_xml = xml.etree.ElementTree.fromstring(open(filename).read())
except xml.etree.ElementTree.ParseError:
# unparseable metadata
syslog.syslog(syslog.LOG_ERR,'suricata metadata unparsable @ %s'%filename)
syslog.syslog(syslog.LOG_ERR, 'suricata metadata unparsable @ %s' % filename)
continue
src_location = ruleXML.find('location')
src_location = rule_xml.find('location')
if src_location is None or 'url' not in src_location.attrib:
syslog.syslog(syslog.LOG_ERR,'suricata metadata missing location @ %s'%filename)
syslog.syslog(syslog.LOG_ERR, 'suricata metadata missing location @ %s' % filename)
else:
if ruleXML.find('files') is None:
syslog.syslog(syslog.LOG_ERR,'suricata metadata missing files @ %s'%filename)
if rule_xml.find('files') is None:
syslog.syslog(syslog.LOG_ERR, 'suricata metadata missing files @ %s' % filename)
else:
for rule_filename in ruleXML.find('files'):
for rule_filename in rule_xml.find('files'):
metadata_record = dict()
metadata_record['source'] = src_location.attrib
metadata_record['filename'] = rule_filename.text.strip()
......
......@@ -26,7 +26,8 @@
import sys
def updateParams(parameters):
def update_params(parameters):
""" update predefined parameters with given list from shell (as switches)
for example /a valA /b valB
converts to
......@@ -35,11 +36,11 @@ def updateParams(parameters):
:param parameters: parameter dictionary
:return:
"""
cmd=None
cmd = None
for arg in sys.argv[1:]:
if cmd is None:
cmd=arg[1:]
cmd = arg[1:]
else:
if cmd in parameters and arg.strip() != '':
parameters[cmd] = arg.strip()
cmd=None
cmd = None
......@@ -40,23 +40,23 @@ from lib.log import reverse_log_reader
if __name__ == '__main__':
result = []
for filename in sorted(glob.glob('%s*'%suricata_alert_log)):
for filename in sorted(glob.glob('%s*' % suricata_alert_log)):
row = dict()
row['size'] = os.stat(filename).st_size
# always list first file and non empty next.
if row['size'] > 0 or filename.split('/')[-1].count('.') == 1:
if row['size'] > 0 or filename.split('/')[-1].count('.') == 1:
row['modified'] = os.stat(filename).st_mtime
row['filename'] = filename.split('/')[-1]
# try to find actual timestamp from file
for line in reverse_log_reader(filename=filename):
if line['line'] != '':
record = ujson.loads(line['line'])
if record.has_key('timestamp'):
row['modified'] = int(time.mktime(datetime.datetime.strptime(record['timestamp'].split('.')[0], "%Y-%m-%dT%H:%M:%S").timetuple()))
if 'timestamp' in record:
row['modified'] = int(time.mktime(datetime.datetime.strptime(record['timestamp'].split('.')[0],
"%Y-%m-%dT%H:%M:%S").timetuple()))
break
ext=filename.split('.')[-1]
ext = filename.split('.')[-1]
if ext.isdigit():
row['sequence'] = int(ext)
else:
......
......@@ -35,10 +35,10 @@ from lib.rulecache import RuleCache
if __name__ == '__main__':
rc = RuleCache()
if rc.isChanged():
if rc.is_changed():
rc.create()
items=rc.listClassTypes()
result = {'items': items, 'count':len(items)}
items = rc.list_class_types()
result = {'items': items, 'count': len(items)}
print (ujson.dumps(result))
......@@ -43,10 +43,10 @@ if __name__ == '__main__':
items = dict()
for rule in md.list_rules():
items[rule['filename']] = rule
rule_filename = ('%s/%s'%(rule_source_directory, rule['filename'])).replace('//', '/')
rule_filename = ('%s/%s' % (rule_source_directory, rule['filename'])).replace('//', '/')
if os.path.exists(rule_filename):
items[rule['filename']]['modified_local'] = os.stat(rule_filename).st_mtime
else:
items[rule['filename']]['modified_local'] = None
result = {'items': items, 'count':len(items)}
result = {'items': items, 'count': len(items)}
print (ujson.dumps(result))
......@@ -36,17 +36,17 @@ import sre_constants
import shlex
import ujson
from lib.log import reverse_log_reader
from lib.params import updateParams
from lib.params import update_params
from lib import suricata_alert_log
if __name__ == '__main__':
# handle parameters
parameters = {'limit':'0','offset':'0', 'filter':'','fileid':''}
updateParams(parameters)
parameters = {'limit': '0', 'offset': '0', 'filter': '', 'fileid': ''}
update_params(parameters)
# choose logfile by number
if parameters['fileid'].isdigit():
suricata_log = '%s.%d'%(suricata_alert_log,int(parameters['fileid']))
suricata_log = '%s.%d' % (suricata_alert_log, int(parameters['fileid']))
else:
suricata_log = suricata_alert_log
......@@ -60,13 +60,12 @@ if __name__ == '__main__':
else:
offset = 0
data_filters = {}
data_filters_comp = {}
for filter in shlex.split(parameters['filter']):
filterField = filter.split('/')[0]
if filter.find('/') > -1:
data_filters[filterField] = '/'.join(filter.split('/')[1:])
for filter_txt in shlex.split(parameters['filter']):
filterField = filter_txt.split('/')[0]
if filter_txt.find('/') > -1:
data_filters[filterField] = '/'.join(filter_txt.split('/')[1:])
filter_regexp = data_filters[filterField]
filter_regexp = filter_regexp.replace('*', '.*')
filter_regexp = filter_regexp.lower()
......@@ -74,7 +73,7 @@ if __name__ == '__main__':
data_filters_comp[filterField] = re.compile(filter_regexp)
except sre_constants.error:
# remove illegal expression
#del data_filters[filterField]
# del data_filters[filterField]
data_filters_comp[filterField] = re.compile('.*')
# filter one specific log line
......@@ -84,7 +83,7 @@ if __name__ == '__main__':
log_start_pos = None
# query suricata eve log
result = {'filters':data_filters,'rows':[],'total_rows':0,'origin':suricata_log.split('/')[-1]}
result = {'filters': data_filters, 'rows': [], 'total_rows': 0, 'origin': suricata_log.split('/')[-1]}
if os.path.exists(suricata_log):
for line in reverse_log_reader(filename=suricata_log, start_pos=log_start_pos):
try:
......@@ -107,7 +106,8 @@ if __name__ == '__main__':
for filterKeys in data_filters:
filter_hit = False
for filterKey in filterKeys.split(','):
if record.has_key(filterKey) and data_filters_comp[filterKeys].match(('%s'%record[filterKey]).lower()):
if filterKey in record and data_filters_comp[filterKeys].match(
('%s' % record[filterKey]).lower()):
filter_hit = True
if not filter_hit:
......@@ -121,7 +121,7 @@ if __name__ == '__main__':
break
# only try to fetch one line when filepos is given
if log_start_pos != None:
if log_start_pos is not None:
break
# output results
......
......@@ -35,21 +35,24 @@
import ujson
from lib.rulecache import RuleCache
from lib.params import updateParams
from lib.params import update_params
# Because rule parsing isn't very useful when the rule definitions didn't change we create a single json file
# to hold the last results (combined with creation date and number of files).
if __name__ == '__main__':
rc = RuleCache()
if rc.isChanged():
if rc.is_changed():
rc.create()
# load parameters, ignore validation here the search method only processes valid input
parameters = {'limit':'0','offset':'0','sort_by':'', 'filter':''}
updateParams(parameters)
parameters = {'limit': '0', 'offset': '0', 'sort_by': '', 'filter': ''}
update_params(parameters)
# rename, filter tag to filter_txt
parameters['filter_txt'] = parameters['filter']
del parameters['filter']
# dump output
result=rc.search(**parameters)
result = rc.search(**parameters)
result['parameters'] = parameters
print (ujson.dumps(result))
......@@ -40,7 +40,7 @@ from lib import rule_source_directory
# check for a running update process, this may take a while so it's better to check...
try:
lck = open('/tmp/suricata-rule-updater.py','w+')
lck = open('/tmp/suricata-rule-updater.py', 'w+')
fcntl.flock(lck, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
# already running, exit status 99
......@@ -48,13 +48,13 @@ except IOError:
if __name__ == '__main__':
# load list of configured rules from generated config
enabled_rulefiles=[]
updater_conf='/usr/local/etc/suricata/rule-updater.config'
enabled_rulefiles = []
updater_conf = '/usr/local/etc/suricata/rule-updater.config'
if os.path.exists(updater_conf):
cnf = ConfigParser()
cnf.read(updater_conf)
for section in cnf.sections():
if cnf.has_option(section,'enabled') and cnf.getint(section,'enabled') == 1:
if cnf.has_option(section, 'enabled') and cnf.getint(section, 'enabled') == 1:
enabled_rulefiles.append(section.strip())
# download / remove rules
......@@ -62,14 +62,14 @@ if __name__ == '__main__':
dl = downloader.Downloader(target_dir=rule_source_directory)
for rule in md.list_rules():
if 'url' in rule['source']:
download_proto=str(rule['source']['url']).split(':')[0].lower()
download_proto = str(rule['source']['url']).split(':')[0].lower()
if dl.is_supported(download_proto):
if rule['filename'] not in enabled_rulefiles:
try:
# remove configurable but unselected file
os.remove(('%s/%s'%(rule_source_directory, rule['filename'])).replace('//', '/'))
except:
os.remove(('%s/%s' % (rule_source_directory, rule['filename'])).replace('//', '/'))
except OSError:
pass
else:
url = ('%s/%s'%(rule['source']['url'],rule['filename']))
url = ('%s/%s' % (rule['source']['url'], rule['filename']))
dl.download(proto=download_proto, url=url)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment