Commit 24bd9724 authored by Ad Schellevis's avatar Ad Schellevis

(ids) python style fixes (non functional)

parent b870cd79
......@@ -37,9 +37,9 @@ from lib import rule_source_directory
if __name__ == '__main__':
RuleCache = lib.rulecache.RuleCache()
rule_config_fn = ('%s../rules.config'%rule_source_directory)
rule_target_dir = ('%s../opnsense.rules'%rule_source_directory)
rule_yaml_list = ('%s../installed_rules.yaml'%rule_source_directory)
rule_config_fn = ('%s../rules.config' % rule_source_directory)
rule_target_dir = ('%s../opnsense.rules' % rule_source_directory)
rule_yaml_list = ('%s../installed_rules.yaml' % rule_source_directory)
# parse OPNsense rule config
rule_updates = {}
......@@ -59,9 +59,9 @@ if __name__ == '__main__':
# install ruleset
all_installed_files = []
for filename in RuleCache.listLocal():
for filename in RuleCache.list_local():
output_data = []
for rule_info_record in RuleCache.listRules(filename=filename):
for rule_info_record in RuleCache.list_rules(filename=filename):
# default behavior, do not touch rule, only copy to output
rule = rule_info_record['rule']
# change rule if in rule rule updates
......@@ -75,7 +75,7 @@ if __name__ == '__main__':
# generate altered rule
if 'enabled' in rule_updates[rule_info_record['metadata']['sid']]:
if (rule_updates[rule_info_record['metadata']['sid']]['enabled']) == '0':
rule = ('#%s'%rule[i:])
rule = ('#%s' % rule[i:])
else:
rule = rule[i:]
......@@ -83,12 +83,12 @@ if __name__ == '__main__':
# write data to file
all_installed_files.append(filename.split('/')[-1])
open('%s/%s'%(rule_target_dir, filename.split('/')[-1]), 'wb').write('\n'.join(output_data))
open('%s/%s' % (rule_target_dir, filename.split('/')[-1]), 'wb').write('\n'.join(output_data))
# flush all written rule filenames into yaml file
with open(rule_yaml_list,'wb') as f_out:
with open(rule_yaml_list, 'wb') as f_out:
f_out.write('%YAML 1.1\n')
f_out.write('---\n')
f_out.write('rule-files:\n')
for installed_file in all_installed_files:
f_out.write(' - %s\n'%installed_file)
f_out.write(' - %s\n' % installed_file)
......@@ -25,5 +25,5 @@
"""
# define paths used by suricata
rule_source_directory='/usr/local/etc/suricata/rules/'
suricata_alert_log='/var/log/suricata/eve.json'
rule_source_directory = '/usr/local/etc/suricata/rules/'
suricata_alert_log = '/var/log/suricata/eve.json'
......@@ -31,24 +31,25 @@
import syslog
import requests
class Downloader(object):
def __init__(self, target_dir):
self._target_dir = target_dir
def download(self, proto, url):
if proto in ('http','https'):
frm_url = url.replace('//','/').replace(':/','://')
if proto in ('http', 'https'):
frm_url = url.replace('//', '/').replace(':/', '://')
req = requests.get(url=frm_url)
if req.status_code == 200:
target_filename = ('%s/%s'%(self._target_dir, frm_url.split('/')[-1])).replace('//','/')
target_filename = ('%s/%s' % (self._target_dir, frm_url.split('/')[-1])).replace('//', '/')
try:
open(target_filename,'wb').write(req.text)
open(target_filename, 'wb').write(req.text)
except IOError:
syslog.syslog(syslog.LOG_ERR, 'cannot write to %s'%(target_filename))
syslog.syslog(syslog.LOG_ERR, 'cannot write to %s' % target_filename)
return None
syslog.syslog(syslog.LOG_INFO, 'download completed for %s'%(frm_url))
syslog.syslog(syslog.LOG_INFO, 'download completed for %s' % frm_url)
else:
syslog.syslog(syslog.LOG_ERR, 'download failed for %s'%(frm_url))
syslog.syslog(syslog.LOG_ERR, 'download failed for %s' % frm_url)
@staticmethod
def is_supported(proto):
......@@ -56,7 +57,7 @@ class Downloader(object):
:param proto:
:return:
"""
if proto in ['http','https']:
if proto in ['http', 'https']:
return True
else:
return False
......@@ -26,14 +26,15 @@
import os
def reverse_log_reader(filename, block_size = 8192, start_pos=None):
def reverse_log_reader(filename, block_size=8192, start_pos=None):
""" read log file in reverse order
:param filename: filename to parse
:param block_size: max block size to examine per loop
:param start_pos: start at position in file (None is end of file)
:return: generator
"""
with open(filename,'rU') as f_in:
with open(filename, 'rU') as f_in:
if start_pos is None:
f_in.seek(0, os.SEEK_END)
file_byte_start = f_in.tell()
......@@ -42,7 +43,7 @@ def reverse_log_reader(filename, block_size = 8192, start_pos=None):
data = ''
while True:
if file_byte_start-block_size < 0:
if file_byte_start - block_size < 0:
block_size = file_byte_start
file_byte_start = 0
else:
......@@ -59,10 +60,10 @@ def reverse_log_reader(filename, block_size = 8192, start_pos=None):
data = data[:eol]
eol = data.rfind('\n')
# field line and position in file
yield {'line':line.strip(),'pos':line_end}
yield {'line': line.strip(), 'pos': line_end}
if file_byte_start == 0 and eol == -1:
# flush last line
yield {'line':data.strip(),'pos':len(data)}
yield {'line': data.strip(), 'pos': len(data)}
if file_byte_start == 0:
break
......@@ -33,30 +33,31 @@ import syslog
import glob
import xml.etree.ElementTree
class Metadata(object):
def __init__(self):
self._rules_dir = '%s/../metadata/rules/'%(os.path.dirname(os.path.abspath(__file__)))
self._rules_dir = '%s/../metadata/rules/' % (os.path.dirname(os.path.abspath(__file__)))
def list_rules(self):
""" list all available rules
:return: generator method returning all known rulefiles
"""
for filename in sorted(glob.glob('%s*.xml'%self._rules_dir)):
for filename in sorted(glob.glob('%s*.xml' % self._rules_dir)):
try:
ruleXML=xml.etree.ElementTree.fromstring(open(filename).read())
rule_xml = xml.etree.ElementTree.fromstring(open(filename).read())
except xml.etree.ElementTree.ParseError:
# unparseable metadata
syslog.syslog(syslog.LOG_ERR,'suricata metadata unparsable @ %s'%filename)
syslog.syslog(syslog.LOG_ERR, 'suricata metadata unparsable @ %s' % filename)
continue
src_location = ruleXML.find('location')
src_location = rule_xml.find('location')
if src_location is None or 'url' not in src_location.attrib:
syslog.syslog(syslog.LOG_ERR,'suricata metadata missing location @ %s'%filename)
syslog.syslog(syslog.LOG_ERR, 'suricata metadata missing location @ %s' % filename)
else:
if ruleXML.find('files') is None:
syslog.syslog(syslog.LOG_ERR,'suricata metadata missing files @ %s'%filename)
if rule_xml.find('files') is None:
syslog.syslog(syslog.LOG_ERR, 'suricata metadata missing files @ %s' % filename)
else:
for rule_filename in ruleXML.find('files'):
for rule_filename in rule_xml.find('files'):
metadata_record = dict()
metadata_record['source'] = src_location.attrib
metadata_record['filename'] = rule_filename.text.strip()
......
......@@ -26,7 +26,8 @@
import sys
def updateParams(parameters):
def update_params(parameters):
""" update predefined parameters with given list from shell (as switches)
for example /a valA /b valB
converts to
......@@ -35,11 +36,11 @@ def updateParams(parameters):
:param parameters: parameter dictionary
:return:
"""
cmd=None
cmd = None
for arg in sys.argv[1:]:
if cmd is None:
cmd=arg[1:]
cmd = arg[1:]
else:
if cmd in parameters and arg.strip() != '':
parameters[cmd] = arg.strip()
cmd=None
cmd = None
......@@ -39,50 +39,52 @@ from lib import rule_source_directory
class RuleCache(object):
"""
"""
def __init__(self):
# suricata rule settings, source directory and cache json file to use
self.cachefile = '%srules.sqlite'%rule_source_directory
self._rule_fields = ['sid','msg','classtype','rev','gid','source','enabled','reference']
self._rule_defaults = {'classtype':'##none##'}
def listLocal(self):
all_rule_files=[]
for filename in glob.glob('%s*.rules'%(rule_source_directory)):
self.cachefile = '%srules.sqlite' % rule_source_directory
self._rule_fields = ['sid', 'msg', 'classtype', 'rev', 'gid', 'source', 'enabled', 'reference']
self._rule_defaults = {'classtype': '##none##'}
@staticmethod
def list_local():
all_rule_files = []
for filename in glob.glob('%s*.rules' % rule_source_directory):
all_rule_files.append(filename)
return all_rule_files
def listRules(self, filename):
def list_rules(self, filename):
""" generator function to list rule file content including metadata
:param filename:
:return:
"""
data = open(filename)
for rule in data.read().split('\n'):
rule_info_record = {'rule':rule, 'metadata':None}
rule_info_record = {'rule': rule, 'metadata': None}
if rule.find('msg:') != -1:
# define basic record
record = {'enabled':True, 'source':filename.split('/')[-1]}
if rule.strip()[0] =='#':
record = {'enabled': True, 'source': filename.split('/')[-1]}
if rule.strip()[0] == '#':
record['enabled'] = False
rule_metadata = rule[rule.find('msg:'):-1]
for field in rule_metadata.split(';'):
fieldName = field[0:field.find(':')].strip()
fieldContent = field[field.find(':')+1:].strip()
if fieldName in self._rule_fields:
if fieldContent[0] == '"':
content = fieldContent[1:-1]
fieldname = field[0:field.find(':')].strip()
fieldcontent = field[field.find(':') + 1:].strip()
if fieldname in self._rule_fields:
if fieldcontent[0] == '"':
content = fieldcontent[1:-1]
else:
content = fieldContent
content = fieldcontent
if fieldName in record:
if fieldname in record:
# if same field repeats, put items in list
if type(record[fieldName]) != list:
record[fieldName] = [record[fieldName]]
record[fieldName].append(content)
if type(record[fieldname]) != list:
record[fieldname] = [record[fieldname]]
record[fieldname].append(content)
else:
record[fieldName] = content
record[fieldname] = content
for rule_field in self._rule_fields:
if rule_field not in record:
......@@ -100,13 +102,13 @@ class RuleCache(object):
yield rule_info_record
def isChanged(self):
def is_changed(self):
""" check if rules on disk are probably different from rules in cache
:return: boolean
"""
if os.path.exists(self.cachefile):
last_mtime = 0
all_rule_files = self.listLocal()
all_rule_files = self.list_local()
for filename in all_rule_files:
file_mtime = os.stat(filename).st_mtime
if file_mtime > last_mtime:
......@@ -115,7 +117,7 @@ class RuleCache(object):
try:
db = sqlite3.connect(self.cachefile)
cur = db.cursor()
cur.execute('select max(timestamp), max(files) from stats')
cur.execute('SELECT max(timestamp), max(files) FROM stats')
results = cur.fetchall()
if last_mtime == results[0][0] and len(all_rule_files) == results[0][1]:
return False
......@@ -133,37 +135,37 @@ class RuleCache(object):
db = sqlite3.connect(self.cachefile)
cur = db.cursor()
cur.execute('create table stats (timestamp number, files number)')
cur.execute("""create table rules (sid number, msg text, classtype text,
rev integer, gid integer,reference text,
enabled boolean,source text)""")
cur.execute('CREATE TABLE stats (timestamp number, files number)')
cur.execute("""CREATE TABLE rules (sid number, msg TEXT, classtype TEXT,
rev INTEGER, gid INTEGER,reference TEXT,
enabled BOOLEAN,source TEXT)""")
last_mtime=0
all_rule_files = self.listLocal()
last_mtime = 0
all_rule_files = self.list_local()
for filename in all_rule_files:
file_mtime = os.stat(filename).st_mtime
if file_mtime > last_mtime:
last_mtime = file_mtime
rules = []
for rule_info_record in self.listRules(filename=filename):
for rule_info_record in self.list_rules(filename=filename):
if rule_info_record['metadata'] is not None:
rules.append(rule_info_record['metadata'])
cur.executemany('insert into rules(%(fieldnames)s) '
'values (%(fieldvalues)s)'%{'fieldnames':(','.join(self._rule_fields)),
'fieldvalues':':'+(',:'.join(self._rule_fields))}, rules)
cur.execute('insert into stats (timestamp,files) values (?,?) ',(last_mtime,len(all_rule_files)))
'values (%(fieldvalues)s)' % {'fieldnames': (','.join(self._rule_fields)),
'fieldvalues': ':' + (',:'.join(self._rule_fields))}, rules)
cur.execute('INSERT INTO stats (timestamp,files) VALUES (?,?) ', (last_mtime, len(all_rule_files)))
db.commit()
def search(self, limit, offset, filter, sort_by):
def search(self, limit, offset, filter_txt, sort_by):
""" search installed rules
:param limit: limit number of rows
:param offset: limit offset
:param filter: text to search, used format fieldname1,fieldname2/searchphrase include % to match on a part
:param sort: order by, list of fields and possible asc/desc parameter
:param filter_txt: text to search, used format fieldname1,fieldname2/searchphrase include % to match on a part
:param sort_by: order by, list of fields and possible asc/desc parameter
:return: dict
"""
result = {'rows':[]}
result = {'rows': []}
if os.path.exists(self.cachefile):
db = sqlite3.connect(self.cachefile)
cur = db.cursor()
......@@ -172,7 +174,7 @@ class RuleCache(object):
sql = 'select * from rules '
sql_filters = {}
for filtertag in shlex.split(filter):
for filtertag in shlex.split(filter_txt):
fieldnames = filtertag.split('/')[0]
searchcontent = '/'.join(filtertag.split('/')[1:])
if len(sql_filters) > 0:
......@@ -184,9 +186,9 @@ class RuleCache(object):
if fieldname != fieldnames.split(',')[0].strip():
sql += ' or '
if searchcontent.find('*') == -1:
sql += 'cast('+fieldname + " as text) like :"+fieldname+" "
sql += 'cast(' + fieldname + " as text) like :" + fieldname + " "
else:
sql += 'cast('+fieldname + " as text) like '%'|| :"+fieldname+" || '%' "
sql += 'cast(' + fieldname + " as text) like '%'|| :" + fieldname + " || '%' "
sql_filters[fieldname] = searchcontent.replace('*', '')
else:
# not a valid fieldname, add a tag to make sure our sql statement is valid
......@@ -194,28 +196,28 @@ class RuleCache(object):
sql += ' ) '
# apply sort order (if any)
sql_sort =[]
sql_sort = []
for sortField in sort_by.split(','):
if sortField.split(' ')[0] in self._rule_fields:
if sortField.split(' ')[-1].lower() == 'desc':
sql_sort.append('%s desc'%sortField.split()[0])
sql_sort.append('%s desc' % sortField.split()[0])
else:
sql_sort.append('%s asc'%sortField.split()[0])
sql_sort.append('%s asc' % sortField.split()[0])
# count total number of rows
cur.execute('select count(*) from (%s) a'%sql, sql_filters)
cur.execute('select count(*) from (%s) a' % sql, sql_filters)
result['total_rows'] = cur.fetchall()[0][0]
if len(sql_sort) > 0:
sql += ' order by %s'%(','.join(sql_sort))
sql += ' order by %s' % (','.join(sql_sort))
if str(limit) != '0' and str(limit).isdigit():
sql += ' limit %s'%(limit)
sql += ' limit %s' % limit
if str(offset) != '0' and str(offset).isdigit():
sql += ' offset %s'%(offset)
sql += ' offset %s' % offset
# fetch results
cur.execute(sql,sql_filters)
cur.execute(sql, sql_filters)
while True:
row = cur.fetchone()
if row is None:
......@@ -228,7 +230,7 @@ class RuleCache(object):
return result
def listClassTypes(self):
def list_class_types(self):
"""
:return: list of installed classtypes
"""
......@@ -236,7 +238,7 @@ class RuleCache(object):
if os.path.exists(self.cachefile):
db = sqlite3.connect(self.cachefile)
cur = db.cursor()
cur.execute('select distinct classtype from rules')
cur.execute('SELECT DISTINCT classtype FROM rules')
for record in cur.fetchall():
result.append(record[0])
......
......@@ -40,23 +40,23 @@ from lib.log import reverse_log_reader
if __name__ == '__main__':
result = []
for filename in sorted(glob.glob('%s*'%suricata_alert_log)):
for filename in sorted(glob.glob('%s*' % suricata_alert_log)):
row = dict()
row['size'] = os.stat(filename).st_size
# always list first file and non empty next.
if row['size'] > 0 or filename.split('/')[-1].count('.') == 1:
if row['size'] > 0 or filename.split('/')[-1].count('.') == 1:
row['modified'] = os.stat(filename).st_mtime
row['filename'] = filename.split('/')[-1]
# try to find actual timestamp from file
for line in reverse_log_reader(filename=filename):
if line['line'] != '':
record = ujson.loads(line['line'])
if record.has_key('timestamp'):
row['modified'] = int(time.mktime(datetime.datetime.strptime(record['timestamp'].split('.')[0], "%Y-%m-%dT%H:%M:%S").timetuple()))
if 'timestamp' in record:
row['modified'] = int(time.mktime(datetime.datetime.strptime(record['timestamp'].split('.')[0],
"%Y-%m-%dT%H:%M:%S").timetuple()))
break
ext=filename.split('.')[-1]
ext = filename.split('.')[-1]
if ext.isdigit():
row['sequence'] = int(ext)
else:
......
......@@ -35,10 +35,10 @@ from lib.rulecache import RuleCache
if __name__ == '__main__':
rc = RuleCache()
if rc.isChanged():
if rc.is_changed():
rc.create()
items=rc.listClassTypes()
result = {'items': items, 'count':len(items)}
items = rc.list_class_types()
result = {'items': items, 'count': len(items)}
print (ujson.dumps(result))
......@@ -43,10 +43,10 @@ if __name__ == '__main__':
items = dict()
for rule in md.list_rules():
items[rule['filename']] = rule
rule_filename = ('%s/%s'%(rule_source_directory, rule['filename'])).replace('//', '/')
rule_filename = ('%s/%s' % (rule_source_directory, rule['filename'])).replace('//', '/')
if os.path.exists(rule_filename):
items[rule['filename']]['modified_local'] = os.stat(rule_filename).st_mtime
else:
items[rule['filename']]['modified_local'] = None
result = {'items': items, 'count':len(items)}
result = {'items': items, 'count': len(items)}
print (ujson.dumps(result))
......@@ -36,17 +36,17 @@ import sre_constants
import shlex
import ujson
from lib.log import reverse_log_reader
from lib.params import updateParams
from lib.params import update_params
from lib import suricata_alert_log
if __name__ == '__main__':
# handle parameters
parameters = {'limit':'0','offset':'0', 'filter':'','fileid':''}
updateParams(parameters)
parameters = {'limit': '0', 'offset': '0', 'filter': '', 'fileid': ''}
update_params(parameters)
# choose logfile by number
if parameters['fileid'].isdigit():
suricata_log = '%s.%d'%(suricata_alert_log,int(parameters['fileid']))
suricata_log = '%s.%d' % (suricata_alert_log, int(parameters['fileid']))
else:
suricata_log = suricata_alert_log
......@@ -60,13 +60,12 @@ if __name__ == '__main__':
else:
offset = 0
data_filters = {}
data_filters_comp = {}
for filter in shlex.split(parameters['filter']):
filterField = filter.split('/')[0]
if filter.find('/') > -1:
data_filters[filterField] = '/'.join(filter.split('/')[1:])
for filter_txt in shlex.split(parameters['filter']):
filterField = filter_txt.split('/')[0]
if filter_txt.find('/') > -1:
data_filters[filterField] = '/'.join(filter_txt.split('/')[1:])
filter_regexp = data_filters[filterField]
filter_regexp = filter_regexp.replace('*', '.*')
filter_regexp = filter_regexp.lower()
......@@ -74,7 +73,7 @@ if __name__ == '__main__':
data_filters_comp[filterField] = re.compile(filter_regexp)
except sre_constants.error:
# remove illegal expression
#del data_filters[filterField]
# del data_filters[filterField]
data_filters_comp[filterField] = re.compile('.*')
# filter one specific log line
......@@ -84,7 +83,7 @@ if __name__ == '__main__':
log_start_pos = None
# query suricata eve log
result = {'filters':data_filters,'rows':[],'total_rows':0,'origin':suricata_log.split('/')[-1]}
result = {'filters': data_filters, 'rows': [], 'total_rows': 0, 'origin': suricata_log.split('/')[-1]}
if os.path.exists(suricata_log):
for line in reverse_log_reader(filename=suricata_log, start_pos=log_start_pos):
try:
......@@ -107,7 +106,8 @@ if __name__ == '__main__':
for filterKeys in data_filters:
filter_hit = False
for filterKey in filterKeys.split(','):
if record.has_key(filterKey) and data_filters_comp[filterKeys].match(('%s'%record[filterKey]).lower()):
if filterKey in record and data_filters_comp[filterKeys].match(
('%s' % record[filterKey]).lower()):
filter_hit = True
if not filter_hit:
......@@ -121,7 +121,7 @@ if __name__ == '__main__':
break
# only try to fetch one line when filepos is given
if log_start_pos != None:
if log_start_pos is not None:
break
# output results
......
......@@ -35,21 +35,24 @@
import ujson
from lib.rulecache import RuleCache
from lib.params import updateParams
from lib.params import update_params
# Because rule parsing isn't very useful when the rule definitions didn't change we create a single json file
# to hold the last results (combined with creation date and number of files).
if __name__ == '__main__':
rc = RuleCache()
if rc.isChanged():
if rc.is_changed():
rc.create()
# load parameters, ignore validation here the search method only processes valid input
parameters = {'limit':'0','offset':'0','sort_by':'', 'filter':''}
updateParams(parameters)
parameters = {'limit': '0', 'offset': '0', 'sort_by': '', 'filter': ''}
update_params(parameters)
# rename, filter tag to filter_txt
parameters['filter_txt'] = parameters['filter']
del parameters['filter']
# dump output
result=rc.search(**parameters)
result = rc.search(**parameters)
result['parameters'] = parameters
print (ujson.dumps(result))
......@@ -40,7 +40,7 @@ from lib import rule_source_directory
# check for a running update process, this may take a while so it's better to check...
try:
lck = open('/tmp/suricata-rule-updater.py','w+')
lck = open('/tmp/suricata-rule-updater.py', 'w+')
fcntl.flock(lck, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
# already running, exit status 99
......@@ -48,13 +48,13 @@ except IOError:
if __name__ == '__main__':
# load list of configured rules from generated config
enabled_rulefiles=[]
updater_conf='/usr/local/etc/suricata/rule-updater.config'
enabled_rulefiles = []
updater_conf = '/usr/local/etc/suricata/rule-updater.config'
if os.path.exists(updater_conf):
cnf = ConfigParser()
cnf.read(updater_conf)
for section in cnf.sections():
if cnf.has_option(section,'enabled') and cnf.getint(section,'enabled') == 1:
if cnf.has_option(section, 'enabled') and cnf.getint(section, 'enabled') == 1:
enabled_rulefiles.append(section.strip())
# download / remove rules
......@@ -62,14 +62,14 @@ if __name__ == '__main__':
dl = downloader.Downloader(target_dir=rule_source_directory)
for rule in md.list_rules():
if 'url' in rule['source']:
download_proto=str(rule['source']['url']).split(':')[0].lower()
download_proto = str(rule['source']['url']).split(':')[0].lower()
if dl.is_supported(download_proto):
if rule['filename'] not in enabled_rulefiles:
try:
# remove configurable but unselected file
os.remove(('%s/%s'%(rule_source_directory, rule['filename'])).replace('//', '/'))
except:
os.remove(('%s/%s' % (rule_source_directory, rule['filename'])).replace('//', '/'))
except OSError:
pass
else:
url = ('%s/%s'%(rule['source']['url'],rule['filename']))
url = ('%s/%s' % (rule['source']['url'], rule['filename']))
dl.download(proto=download_proto, url=url)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment