Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
O
OpnSense
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kulya
OpnSense
Commits
2c79de82
Commit
2c79de82
authored
Sep 09, 2015
by
Franco Fichtner
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
scripts: merge suricata/queryAlertLog.py from master
parent
1c4867a9
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
87 additions
and
86 deletions
+87
-86
queryAlertLog.py
src/opnsense/scripts/suricata/queryAlertLog.py
+87
-86
No files found.
src/opnsense/scripts/suricata/queryAlertLog.py
View file @
2c79de82
...
...
@@ -39,89 +39,90 @@ from lib.log import reverse_log_reader
from
lib.params
import
updateParams
from
lib
import
suricata_alert_log
# handle parameters
parameters
=
{
'limit'
:
'0'
,
'offset'
:
'0'
,
'filter'
:
''
,
'fileid'
:
''
}
updateParams
(
parameters
)
# choose logfile by number
if
parameters
[
'fileid'
]
.
isdigit
():
suricata_log
=
'
%
s.
%
d'
%
(
suricata_alert_log
,
int
(
parameters
[
'fileid'
]))
else
:
suricata_log
=
suricata_alert_log
if
parameters
[
'limit'
]
.
isdigit
():
limit
=
int
(
parameters
[
'limit'
])
else
:
limit
=
0
if
parameters
[
'offset'
]
.
isdigit
():
offset
=
int
(
parameters
[
'offset'
])
else
:
offset
=
0
data_filters
=
{}
data_filters_comp
=
{}
for
filter
in
shlex
.
split
(
parameters
[
'filter'
]):
filterField
=
filter
.
split
(
'/'
)[
0
]
if
filter
.
find
(
'/'
)
>
-
1
:
data_filters
[
filterField
]
=
'/'
.
join
(
filter
.
split
(
'/'
)[
1
:])
filter_regexp
=
data_filters
[
filterField
]
filter_regexp
=
filter_regexp
.
replace
(
'*'
,
'.*'
)
filter_regexp
=
filter_regexp
.
lower
()
try
:
data_filters_comp
[
filterField
]
=
re
.
compile
(
filter_regexp
)
except
sre_constants
.
error
:
# remove illegal expression
#del data_filters[filterField]
data_filters_comp
[
filterField
]
=
re
.
compile
(
'.*'
)
# filter one specific log line
if
'filepos'
in
data_filters
and
data_filters
[
'filepos'
]
.
isdigit
():
log_start_pos
=
int
(
data_filters
[
'filepos'
])
else
:
log_start_pos
=
None
# query suricata eve log
result
=
{
'filters'
:
data_filters
,
'rows'
:[],
'total_rows'
:
0
,
'origin'
:
suricata_log
.
split
(
'/'
)[
-
1
]}
if
os
.
path
.
exists
(
suricata_log
):
for
line
in
reverse_log_reader
(
filename
=
suricata_log
,
start_pos
=
log_start_pos
):
try
:
record
=
ujson
.
loads
(
line
[
'line'
])
except
ValueError
:
# can not handle line
record
=
{}
# only process valid alert items
if
'alert'
in
record
:
# add position in file
record
[
'filepos'
]
=
line
[
'pos'
]
record
[
'fileid'
]
=
parameters
[
'fileid'
]
# flatten structure
record
[
'alert_sid'
]
=
record
[
'alert'
][
'signature_id'
]
record
[
'alert'
]
=
record
[
'alert'
][
'signature'
]
# use filters on data (using regular expressions)
do_output
=
True
for
filterKeys
in
data_filters
:
filter_hit
=
False
for
filterKey
in
filterKeys
.
split
(
','
):
if
record
.
has_key
(
filterKey
)
and
data_filters_comp
[
filterKeys
]
.
match
((
'
%
s'
%
record
[
filterKey
])
.
lower
()):
filter_hit
=
True
if
not
filter_hit
:
do_output
=
False
if
do_output
:
result
[
'total_rows'
]
+=
1
if
(
len
(
result
[
'rows'
])
<
limit
or
limit
==
0
)
and
result
[
'total_rows'
]
>=
offset
:
result
[
'rows'
]
.
append
(
record
)
elif
result
[
'total_rows'
]
>
offset
+
limit
:
# do not fetch data until end of file...
break
# only try to fetch one line when filepos is given
if
log_start_pos
!=
None
:
break
# output results
print
(
ujson
.
dumps
(
result
))
if
__name__
==
'__main__'
:
# handle parameters
parameters
=
{
'limit'
:
'0'
,
'offset'
:
'0'
,
'filter'
:
''
,
'fileid'
:
''
}
updateParams
(
parameters
)
# choose logfile by number
if
parameters
[
'fileid'
]
.
isdigit
():
suricata_log
=
'
%
s.
%
d'
%
(
suricata_alert_log
,
int
(
parameters
[
'fileid'
]))
else
:
suricata_log
=
suricata_alert_log
if
parameters
[
'limit'
]
.
isdigit
():
limit
=
int
(
parameters
[
'limit'
])
else
:
limit
=
0
if
parameters
[
'offset'
]
.
isdigit
():
offset
=
int
(
parameters
[
'offset'
])
else
:
offset
=
0
data_filters
=
{}
data_filters_comp
=
{}
for
filter
in
shlex
.
split
(
parameters
[
'filter'
]):
filterField
=
filter
.
split
(
'/'
)[
0
]
if
filter
.
find
(
'/'
)
>
-
1
:
data_filters
[
filterField
]
=
'/'
.
join
(
filter
.
split
(
'/'
)[
1
:])
filter_regexp
=
data_filters
[
filterField
]
filter_regexp
=
filter_regexp
.
replace
(
'*'
,
'.*'
)
filter_regexp
=
filter_regexp
.
lower
()
try
:
data_filters_comp
[
filterField
]
=
re
.
compile
(
filter_regexp
)
except
sre_constants
.
error
:
# remove illegal expression
#del data_filters[filterField]
data_filters_comp
[
filterField
]
=
re
.
compile
(
'.*'
)
# filter one specific log line
if
'filepos'
in
data_filters
and
data_filters
[
'filepos'
]
.
isdigit
():
log_start_pos
=
int
(
data_filters
[
'filepos'
])
else
:
log_start_pos
=
None
# query suricata eve log
result
=
{
'filters'
:
data_filters
,
'rows'
:[],
'total_rows'
:
0
,
'origin'
:
suricata_log
.
split
(
'/'
)[
-
1
]}
if
os
.
path
.
exists
(
suricata_log
):
for
line
in
reverse_log_reader
(
filename
=
suricata_log
,
start_pos
=
log_start_pos
):
try
:
record
=
ujson
.
loads
(
line
[
'line'
])
except
ValueError
:
# can not handle line
record
=
{}
# only process valid alert items
if
'alert'
in
record
:
# add position in file
record
[
'filepos'
]
=
line
[
'pos'
]
record
[
'fileid'
]
=
parameters
[
'fileid'
]
# flatten structure
record
[
'alert_sid'
]
=
record
[
'alert'
][
'signature_id'
]
record
[
'alert'
]
=
record
[
'alert'
][
'signature'
]
# use filters on data (using regular expressions)
do_output
=
True
for
filterKeys
in
data_filters
:
filter_hit
=
False
for
filterKey
in
filterKeys
.
split
(
','
):
if
record
.
has_key
(
filterKey
)
and
data_filters_comp
[
filterKeys
]
.
match
((
'
%
s'
%
record
[
filterKey
])
.
lower
()):
filter_hit
=
True
if
not
filter_hit
:
do_output
=
False
if
do_output
:
result
[
'total_rows'
]
+=
1
if
(
len
(
result
[
'rows'
])
<
limit
or
limit
==
0
)
and
result
[
'total_rows'
]
>=
offset
:
result
[
'rows'
]
.
append
(
record
)
elif
result
[
'total_rows'
]
>
offset
+
limit
:
# do not fetch data until end of file...
break
# only try to fetch one line when filepos is given
if
log_start_pos
!=
None
:
break
# output results
print
(
ujson
.
dumps
(
result
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment