Refines log reader to read only X last lines for faster log reading. Adds config for log reading.

This commit is contained in:
Michael Beck
2025-02-05 23:31:23 +01:00
parent d868a181a9
commit 3282270164
3 changed files with 86 additions and 15 deletions

74
app.py
View File

@@ -120,6 +120,55 @@ def generate_statistics(df):
activity_by_hour = df.groupby('hour').size()
return activity_by_hour
# Taken from:
# https://gist.github.com/amitsaha/5990310?permalink_comment_id=3017951#gistcomment-3017951
def tail(filename, n):
stat = os.stat(filename)
n = int(n)
if stat.st_size == 0 or n == 0:
yield ''
return
page_size = config['LOGGING']['TAIL_PAGE_SIZE']
offsets = []
count = _n = n if n >= 0 else -n
last_byte_read = last_nl_byte = starting_offset = stat.st_size - 1
with open(filename, 'r') as f:
while count > 0:
starting_byte = last_byte_read - page_size
if last_byte_read == 0:
offsets.append(0)
break
elif starting_byte < 0:
f.seek(0)
text = f.read(last_byte_read)
else:
f.seek(starting_byte)
text = f.read(page_size)
for i in range(-1, -1*len(text)-1, -1):
last_byte_read -= 1
if text[i] == '\n':
last_nl_byte = last_byte_read
starting_offset = last_nl_byte + 1
offsets.append(starting_offset)
count -= 1
offsets = offsets[len(offsets)-_n:]
offsets.reverse()
with open(filename, 'r') as f:
for i, offset in enumerate(offsets):
f.seek(offset)
if i == len(offsets) - 1:
yield f.read()
else:
bytes_to_read = offsets[i+1] - offset
yield f.read(bytes_to_read)
@app.route('/')
def index():
form = ScrapingForm()
@@ -177,23 +226,27 @@ def logs():
@app.route('/logfile', methods=['GET'])
def logfile():
page = int(request.args.get('page', 0)) # Page number
lines_per_page = int(request.args.get('lines_per_page', config['DEFAULT']['LOG_VIEW_LINES'])) # Lines per page
lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
log_file_path = logFile # Path to the current log file
if not os.path.isfile(log_file_path):
logging.error("Log file not found")
return jsonify({"error": "Log file not found"}), 404
with open(log_file_path, 'r') as file:
log_lines = file.readlines()
log_lines = log_lines[::-1]
log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES']))
log_lines = log_lines[::-1] # Reverse the list
start = page * lines_per_page
end = start + lines_per_page
paginated_lines = log_lines[start:end] if start < len(log_lines) else []
return jsonify({"log": paginated_lines, "total_lines": len(log_lines), "pages": len(log_lines) // lines_per_page})
return jsonify({
"log": paginated_lines,
"total_lines": len(log_lines),
"pages": (len(log_lines) + lines_per_page - 1) // lines_per_page,
"start_line": len(log_lines) - start # Starting line number for the current page
})
@app.route('/results')
def results():
# Assuming the scraping is done and data is saved somewhere
@@ -272,5 +325,10 @@ def download_data_file(filename):
def download_log_file(filename):
return send_from_directory('logs', filename)
@app.route('/config/lines_per_page')
def get_lines_per_page():
lines_per_page = config['LOGGING']['VIEW_PAGE_LINES']
return jsonify({"lines_per_page": lines_per_page})
if __name__ == '__main__':
app.run(debug=True, threaded=True)