Changed the base module file to be more user friendly by providing the data as method arguments instead of less discoverable attributes.
This commit is contained in:
@@ -2,3 +2,6 @@ We wanted a very flexible python library with which we could parse any log file
|
|||||||
# Differences with the fully fledged log parsers
|
# Differences with the fully fledged log parsers
|
||||||
Instead of coming with an incomplete amount of parsers for specific services we wanted a few lifecycle hooks from which to parse arbitrary log files.
|
Instead of coming with an incomplete amount of parsers for specific services we wanted a few lifecycle hooks from which to parse arbitrary log files.
|
||||||
There is no default reporting from this library, for that a few lifecycle hooks are offered for you to extract the accumulated data.
|
There is no default reporting from this library, for that a few lifecycle hooks are offered for you to extract the accumulated data.
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
[ ] generic parsers with a flexible parser collection that can be adjusted on the go
|
||||||
|
|||||||
@@ -9,34 +9,32 @@ class BaseOrderedLogParser:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.retrieve_status()
|
status = self.retrieve_status()
|
||||||
self.parser_init()
|
|
||||||
|
|
||||||
context_manager = self.get_context_manager()
|
context_manager = self.get_context_manager()
|
||||||
|
|
||||||
with context_manager(*self.context_manager_args) as file:
|
with context_manager(*self.context_manager_args) as file:
|
||||||
self.aggregators_init(file)
|
aggregates = self.aggregators_init(file, status)
|
||||||
line = file.readline()
|
line = file.readline()
|
||||||
new_flag = False
|
new_flag = False
|
||||||
|
|
||||||
while (line):
|
while (line):
|
||||||
# Only check for a new line if we're not at new entries yet
|
# Only check for a new line if we're not at new entries yet
|
||||||
if not new_flag:
|
if not new_flag:
|
||||||
new_flag = self.new_flag_condition(line)
|
new_flag = self.new_flag_condition(line, status)
|
||||||
|
|
||||||
# Ignore old entries and only consider new ones
|
# Ignore old entries and only consider new ones
|
||||||
if new_flag:
|
if new_flag:
|
||||||
break_condition = self.parse_line(line)
|
break_condition = self.parse_line(line, aggregates, status)
|
||||||
|
|
||||||
if not break_condition:
|
if not break_condition:
|
||||||
break
|
break
|
||||||
|
|
||||||
line = file.readline()
|
line = file.readline()
|
||||||
|
|
||||||
self.aggregators_deinit(file)
|
self.aggregators_deinit(file, aggregates, status)
|
||||||
|
|
||||||
self.update_status()
|
self.update_status(aggregates, status)
|
||||||
self.parser_deinit()
|
|
||||||
|
|
||||||
def get_context_manager(self):
|
def get_context_manager(self):
|
||||||
"""
|
"""
|
||||||
@@ -46,27 +44,16 @@ class BaseOrderedLogParser:
|
|||||||
"""
|
"""
|
||||||
return open
|
return open
|
||||||
|
|
||||||
def parser_init(self):
|
def aggregators_deinit(self, file, aggregates, status):
|
||||||
"""
|
"""
|
||||||
Extra hook that runs after the status has been retrieved but before
|
Hook that runs right before the context manager closes the file. It
|
||||||
the parser starts going through the file
|
comes with the aggregates and the status date so sometimes closing the
|
||||||
|
status of the parser here makes more sense.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def aggregators_deinit(self, file):
|
def update_status(self, aggregates, status):
|
||||||
"""
|
"""
|
||||||
Hook that runs right before the context manager closes the file.
|
Updates the saved status of the parser.
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_status(self):
|
|
||||||
"""
|
|
||||||
Updates the saved status of the parser
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def parser_deinit(self):
|
|
||||||
"""
|
|
||||||
Extra hook that runs after the parser updated the saved status
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|||||||
Reference in New Issue
Block a user