diff --git a/app.py b/app.py index 9d59021..7a60093 100644 --- a/app.py +++ b/app.py @@ -120,6 +120,55 @@ def generate_statistics(df): activity_by_hour = df.groupby('hour').size() return activity_by_hour +# Taken from: +# https://gist.github.com/amitsaha/5990310?permalink_comment_id=3017951#gistcomment-3017951 +def tail(filename, n): + stat = os.stat(filename) + n = int(n) + if stat.st_size == 0 or n == 0: + yield '' + return + + page_size = config['LOGGING']['TAIL_PAGE_SIZE'] + offsets = [] + count = _n = n if n >= 0 else -n + + last_byte_read = last_nl_byte = starting_offset = stat.st_size - 1 + + with open(filename, 'r') as f: + while count > 0: + starting_byte = last_byte_read - page_size + if last_byte_read == 0: + offsets.append(0) + break + elif starting_byte < 0: + f.seek(0) + text = f.read(last_byte_read) + else: + f.seek(starting_byte) + text = f.read(page_size) + + for i in range(-1, -1*len(text)-1, -1): + last_byte_read -= 1 + if text[i] == '\n': + last_nl_byte = last_byte_read + starting_offset = last_nl_byte + 1 + offsets.append(starting_offset) + count -= 1 + + offsets = offsets[len(offsets)-_n:] + offsets.reverse() + + with open(filename, 'r') as f: + for i, offset in enumerate(offsets): + f.seek(offset) + + if i == len(offsets) - 1: + yield f.read() + else: + bytes_to_read = offsets[i+1] - offset + yield f.read(bytes_to_read) + @app.route('/') def index(): form = ScrapingForm() @@ -177,23 +226,27 @@ def logs(): @app.route('/logfile', methods=['GET']) def logfile(): page = int(request.args.get('page', 0)) # Page number - lines_per_page = int(request.args.get('lines_per_page', config['DEFAULT']['LOG_VIEW_LINES'])) # Lines per page + lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page log_file_path = logFile # Path to the current log file if not os.path.isfile(log_file_path): + logging.error("Log file not found") return jsonify({"error": "Log file not found"}), 404 - with open(log_file_path, 'r') as file: - log_lines = file.readlines() - - log_lines = log_lines[::-1] - + log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES'])) + + log_lines = log_lines[::-1] # Reverse the list + start = page * lines_per_page end = start + lines_per_page paginated_lines = log_lines[start:end] if start < len(log_lines) else [] - return jsonify({"log": paginated_lines, "total_lines": len(log_lines), "pages": len(log_lines) // lines_per_page}) - + return jsonify({ + "log": paginated_lines, + "total_lines": len(log_lines), + "pages": (len(log_lines) + lines_per_page - 1) // lines_per_page, + "start_line": len(log_lines) - start # Starting line number for the current page + }) @app.route('/results') def results(): # Assuming the scraping is done and data is saved somewhere @@ -272,5 +325,10 @@ def download_data_file(filename): def download_log_file(filename): return send_from_directory('logs', filename) +@app.route('/config/lines_per_page') +def get_lines_per_page(): + lines_per_page = config['LOGGING']['VIEW_PAGE_LINES'] + return jsonify({"lines_per_page": lines_per_page}) + if __name__ == '__main__': app.run(debug=True, threaded=True) \ No newline at end of file diff --git a/example_config.ini b/example_config.ini index 50655ba..8c10e53 100644 --- a/example_config.ini +++ b/example_config.ini @@ -1,3 +1,8 @@ [DEFAULT] -SECRET_KEY = your_secret_key -API_KEY = your_api_key +SECRET_KEY = your_secret_key # Secret key for session management +API_KEY = your_api_key # API key for accessing the TORN API. Public key should be enough + +[LOGGING] +VIEW_MAX_LINES = 500 # Maximum number of lines to display in the log viewer +VIEW_PAGE_LINES = 50 # Number of lines to display per page in the log viewer +TAIL_PAGE_SIZE = 100 # Number of bytes to read at a time \ No newline at end of file diff --git a/static/app.js b/static/app.js index 8351421..6e10f0d 100644 --- a/static/app.js +++ b/static/app.js @@ -6,10 +6,18 @@ document.addEventListener('DOMContentLoaded', () => { const nextPageButton = document.getElementById('nextPage'); const pageInfo = document.getElementById('pageInfo'); let currentPage = 0; - - const linesPerPage = 50; + let linesPerPage; let autoRefreshInterval; + // Fetch the configuration value for linesPerPage + fetch('/config/lines_per_page') + .then(response => response.json()) + .then(data => { + linesPerPage = data.lines_per_page; + // Initial fetch of logs after getting the config value + fetchLogs(currentPage); + }); + const fetchLogs = (page) => { fetch(`/logfile?page=${page}&lines_per_page=${linesPerPage}`) .then(response => response.json()) @@ -18,7 +26,7 @@ document.addEventListener('DOMContentLoaded', () => { logsElement.textContent = data.error; } else { logsElement.innerHTML = data.log.map((line, index) => { - const lineNumber = data.total_lines - (page * linesPerPage + index); + const lineNumber = data.start_line - index; return `${lineNumber} ${line}`; }).join(''); // Disable/enable pagination buttons based on the page number and total lines @@ -29,11 +37,11 @@ document.addEventListener('DOMContentLoaded', () => { }); }; - const startAutoRefresh = () => { + function startAutoRefresh() { autoRefreshInterval = setInterval(() => { fetchLogs(currentPage); }, 5000); // Refresh every 5 seconds - }; + } const stopAutoRefresh = () => { clearInterval(autoRefreshInterval);