Refines log reader to read only X last lines for faster log reading. Adds config for log reading.

This commit is contained in:
Michael Beck
2025-02-05 23:31:23 +01:00
parent d868a181a9
commit 3282270164
3 changed files with 86 additions and 15 deletions

70
app.py
View File

@@ -120,6 +120,55 @@ def generate_statistics(df):
activity_by_hour = df.groupby('hour').size() activity_by_hour = df.groupby('hour').size()
return activity_by_hour return activity_by_hour
# Taken from:
# https://gist.github.com/amitsaha/5990310?permalink_comment_id=3017951#gistcomment-3017951
def tail(filename, n):
stat = os.stat(filename)
n = int(n)
if stat.st_size == 0 or n == 0:
yield ''
return
page_size = config['LOGGING']['TAIL_PAGE_SIZE']
offsets = []
count = _n = n if n >= 0 else -n
last_byte_read = last_nl_byte = starting_offset = stat.st_size - 1
with open(filename, 'r') as f:
while count > 0:
starting_byte = last_byte_read - page_size
if last_byte_read == 0:
offsets.append(0)
break
elif starting_byte < 0:
f.seek(0)
text = f.read(last_byte_read)
else:
f.seek(starting_byte)
text = f.read(page_size)
for i in range(-1, -1*len(text)-1, -1):
last_byte_read -= 1
if text[i] == '\n':
last_nl_byte = last_byte_read
starting_offset = last_nl_byte + 1
offsets.append(starting_offset)
count -= 1
offsets = offsets[len(offsets)-_n:]
offsets.reverse()
with open(filename, 'r') as f:
for i, offset in enumerate(offsets):
f.seek(offset)
if i == len(offsets) - 1:
yield f.read()
else:
bytes_to_read = offsets[i+1] - offset
yield f.read(bytes_to_read)
@app.route('/') @app.route('/')
def index(): def index():
form = ScrapingForm() form = ScrapingForm()
@@ -177,23 +226,27 @@ def logs():
@app.route('/logfile', methods=['GET']) @app.route('/logfile', methods=['GET'])
def logfile(): def logfile():
page = int(request.args.get('page', 0)) # Page number page = int(request.args.get('page', 0)) # Page number
lines_per_page = int(request.args.get('lines_per_page', config['DEFAULT']['LOG_VIEW_LINES'])) # Lines per page lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
log_file_path = logFile # Path to the current log file log_file_path = logFile # Path to the current log file
if not os.path.isfile(log_file_path): if not os.path.isfile(log_file_path):
logging.error("Log file not found")
return jsonify({"error": "Log file not found"}), 404 return jsonify({"error": "Log file not found"}), 404
with open(log_file_path, 'r') as file: log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES']))
log_lines = file.readlines()
log_lines = log_lines[::-1] log_lines = log_lines[::-1] # Reverse the list
start = page * lines_per_page start = page * lines_per_page
end = start + lines_per_page end = start + lines_per_page
paginated_lines = log_lines[start:end] if start < len(log_lines) else [] paginated_lines = log_lines[start:end] if start < len(log_lines) else []
return jsonify({"log": paginated_lines, "total_lines": len(log_lines), "pages": len(log_lines) // lines_per_page}) return jsonify({
"log": paginated_lines,
"total_lines": len(log_lines),
"pages": (len(log_lines) + lines_per_page - 1) // lines_per_page,
"start_line": len(log_lines) - start # Starting line number for the current page
})
@app.route('/results') @app.route('/results')
def results(): def results():
# Assuming the scraping is done and data is saved somewhere # Assuming the scraping is done and data is saved somewhere
@@ -272,5 +325,10 @@ def download_data_file(filename):
def download_log_file(filename): def download_log_file(filename):
return send_from_directory('logs', filename) return send_from_directory('logs', filename)
@app.route('/config/lines_per_page')
def get_lines_per_page():
lines_per_page = config['LOGGING']['VIEW_PAGE_LINES']
return jsonify({"lines_per_page": lines_per_page})
if __name__ == '__main__': if __name__ == '__main__':
app.run(debug=True, threaded=True) app.run(debug=True, threaded=True)

View File

@@ -1,3 +1,8 @@
[DEFAULT] [DEFAULT]
SECRET_KEY = your_secret_key SECRET_KEY = your_secret_key # Secret key for session management
API_KEY = your_api_key API_KEY = your_api_key # API key for accessing the TORN API. Public key should be enough
[LOGGING]
VIEW_MAX_LINES = 500 # Maximum number of lines to display in the log viewer
VIEW_PAGE_LINES = 50 # Number of lines to display per page in the log viewer
TAIL_PAGE_SIZE = 100 # Number of bytes to read at a time

View File

@@ -6,10 +6,18 @@ document.addEventListener('DOMContentLoaded', () => {
const nextPageButton = document.getElementById('nextPage'); const nextPageButton = document.getElementById('nextPage');
const pageInfo = document.getElementById('pageInfo'); const pageInfo = document.getElementById('pageInfo');
let currentPage = 0; let currentPage = 0;
let linesPerPage;
const linesPerPage = 50;
let autoRefreshInterval; let autoRefreshInterval;
// Fetch the configuration value for linesPerPage
fetch('/config/lines_per_page')
.then(response => response.json())
.then(data => {
linesPerPage = data.lines_per_page;
// Initial fetch of logs after getting the config value
fetchLogs(currentPage);
});
const fetchLogs = (page) => { const fetchLogs = (page) => {
fetch(`/logfile?page=${page}&lines_per_page=${linesPerPage}`) fetch(`/logfile?page=${page}&lines_per_page=${linesPerPage}`)
.then(response => response.json()) .then(response => response.json())
@@ -18,7 +26,7 @@ document.addEventListener('DOMContentLoaded', () => {
logsElement.textContent = data.error; logsElement.textContent = data.error;
} else { } else {
logsElement.innerHTML = data.log.map((line, index) => { logsElement.innerHTML = data.log.map((line, index) => {
const lineNumber = data.total_lines - (page * linesPerPage + index); const lineNumber = data.start_line - index;
return `<span class="line-number">${lineNumber}</span> ${line}`; return `<span class="line-number">${lineNumber}</span> ${line}`;
}).join(''); }).join('');
// Disable/enable pagination buttons based on the page number and total lines // Disable/enable pagination buttons based on the page number and total lines
@@ -29,11 +37,11 @@ document.addEventListener('DOMContentLoaded', () => {
}); });
}; };
const startAutoRefresh = () => { function startAutoRefresh() {
autoRefreshInterval = setInterval(() => { autoRefreshInterval = setInterval(() => {
fetchLogs(currentPage); fetchLogs(currentPage);
}, 5000); // Refresh every 5 seconds }, 5000); // Refresh every 5 seconds
}; }
const stopAutoRefresh = () => { const stopAutoRefresh = () => {
clearInterval(autoRefreshInterval); clearInterval(autoRefreshInterval);