From c6d3a00d2016473744a7a4b43c2517ace5c457e0 Mon Sep 17 00:00:00 2001 From: Pelagic Date: Fri, 15 Dec 2023 09:53:28 +0200 Subject: [PATCH] Revert "Added the new PKGBUILD for starfields-logparser." This reverts commit b21df6a09e68967d143a681664c1773143770d47. --- PKGBUILD | 10 ++--- README.md | 7 ++++ pyproject.toml | 24 +++++++++++ starfields_logparser/base_parsers.py | 59 ++++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 5 deletions(-) create mode 100644 README.md create mode 100644 pyproject.toml create mode 100644 starfields_logparser/base_parsers.py diff --git a/PKGBUILD b/PKGBUILD index b79608a..d7f24a6 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -8,11 +8,11 @@ pkgrel=1 pkgdesc='A basic python logparser for arbitrary log files' arch=('any') depends=("python>=3.8" "python-django>=3.0") -url="https://git.vickys-corner.xyz/StarFields/starfields-logparser" +url="https://git.vickys-corner.xyz/ace/starfields-logparser" license=('GPLv3') -source=("https://git.vickys-corner.xyz/StarFields/starfields-logparser/archive/starfields-logparser-0.1.0.tar.gz") -sha256sums=('4998937ddb7648bc7d8fdd4636cb1cdc097893bc889c2f0269e82b49bd451c06') +source=("https://git.vickys-corner.xyz/StarFields/starfields-drf-generics/archive/starfields-drf-generics-0.2.0.tar.gz") +sha256sums=('4803c2574e2232181c4c512aa0a80f7e087bbc8da59279d7ffc99d3167092496') makedepends=( 'python-build' @@ -23,11 +23,11 @@ makedepends=( ) build() { - cd ${srcdir}/starfields-logparser + cd ${srcdir}/starfields-drf-generics python -m build --wheel --no-isolation } package() { - cd ${srcdir}/starfields-logparser/${_pkgname} + cd ${srcdir}/starfields-drf-generics/${_pkgname} python -m installer --destdir="${pkgdir}" ../dist/*.whl } diff --git a/README.md b/README.md new file mode 100644 index 0000000..a090069 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +We wanted a very flexible python library with which we could parse any log file with any format. +# Differences with the fully fledged log parsers +Instead of coming with an incomplete amount of parsers for specific services we wanted a few lifecycle hooks from which to parse arbitrary log files. +There is no default reporting from this library, for that a few lifecycle hooks are offered for you to extract the accumulated data. + +# TODO +[ ] generic parsers with a flexible parser collection that can be adjusted on the go diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..fe1c973 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,24 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "starfields-logparser" +version = "0.1.0" +authors = [ + { name="Anastasios Svolis", email="support@starfields.gr" }, +] +description = "A basic python library for parsing arbitrary log files" +readme = "README.md" +requires-python = ">=3.8" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://git.vickys-corner.xyz/StarFields/starfields-logparser" + +[tool.setuptools.packages.find] +where = ["starfields_logparser"] diff --git a/starfields_logparser/base_parsers.py b/starfields_logparser/base_parsers.py new file mode 100644 index 0000000..e2070d2 --- /dev/null +++ b/starfields_logparser/base_parsers.py @@ -0,0 +1,59 @@ +#!/usr/bin/python + + +class BaseOrderedLogParser: + """ + This parser is to be inherited from. It goes through log entries + until it finds an entry it hasn't gone through, in which case it + parses until the end of the entries. + """ + + def execute(self): + status = self.retrieve_status() + + context_manager = self.get_context_manager() + + with context_manager(*self.context_manager_args) as file: + aggregates = self.aggregators_init(file, status) + line = file.readline() + new_flag = False + + while (line): + # Only check for a new line if we're not at new entries yet + if not new_flag: + new_flag = self.new_flag_condition(line, status) + + # Ignore old entries and only consider new ones + if new_flag: + break_condition = self.parse_line(line, aggregates, status) + + if not break_condition: + break + + line = file.readline() + + self.aggregators_deinit(file, aggregates, status) + + self.update_status(aggregates, status) + + def get_context_manager(self): + """ + Hook to customize the context manager. Defaults to the file context + manager. The context manager file should have a readline() method + that reads the next "line". + """ + return open + + def aggregators_deinit(self, file, aggregates, status): + """ + Hook that runs right before the context manager closes the file. It + comes with the aggregates and the status date so sometimes closing the + status of the parser here makes more sense. + """ + pass + + def update_status(self, aggregates, status): + """ + Updates the saved status of the parser. + """ + pass