diff --git a/README.md b/README.md index 23c8c9b..a090069 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,6 @@ We wanted a very flexible python library with which we could parse any log file # Differences with the fully fledged log parsers Instead of coming with an incomplete amount of parsers for specific services we wanted a few lifecycle hooks from which to parse arbitrary log files. There is no default reporting from this library, for that a few lifecycle hooks are offered for you to extract the accumulated data. + +# TODO +[ ] generic parsers with a flexible parser collection that can be adjusted on the go diff --git a/starfields_logparser/base_parsers.py b/starfields_logparser/base_parsers.py index 9189593..e2070d2 100644 --- a/starfields_logparser/base_parsers.py +++ b/starfields_logparser/base_parsers.py @@ -9,34 +9,32 @@ class BaseOrderedLogParser: """ def execute(self): - self.retrieve_status() - self.parser_init() + status = self.retrieve_status() context_manager = self.get_context_manager() with context_manager(*self.context_manager_args) as file: - self.aggregators_init(file) + aggregates = self.aggregators_init(file, status) line = file.readline() new_flag = False while (line): # Only check for a new line if we're not at new entries yet if not new_flag: - new_flag = self.new_flag_condition(line) + new_flag = self.new_flag_condition(line, status) # Ignore old entries and only consider new ones if new_flag: - break_condition = self.parse_line(line) + break_condition = self.parse_line(line, aggregates, status) if not break_condition: break line = file.readline() - self.aggregators_deinit(file) + self.aggregators_deinit(file, aggregates, status) - self.update_status() - self.parser_deinit() + self.update_status(aggregates, status) def get_context_manager(self): """ @@ -46,27 +44,16 @@ class BaseOrderedLogParser: """ return open - def parser_init(self): + def aggregators_deinit(self, file, aggregates, status): """ - Extra hook that runs after the status has been retrieved but before - the parser starts going through the file + Hook that runs right before the context manager closes the file. It + comes with the aggregates and the status date so sometimes closing the + status of the parser here makes more sense. """ pass - def aggregators_deinit(self, file): + def update_status(self, aggregates, status): """ - Hook that runs right before the context manager closes the file. - """ - pass - - def update_status(self): - """ - Updates the saved status of the parser - """ - pass - - def parser_deinit(self): - """ - Extra hook that runs after the parser updated the saved status + Updates the saved status of the parser. """ pass