refactors logging and config

This commit is contained in:
2025-02-10 16:34:11 +01:00
parent d1f562ce94
commit 33621bdec4
14 changed files with 114 additions and 147 deletions

View File

@@ -7,15 +7,10 @@ from datetime import datetime
import pandas as pd
from app.models import Scraper
from app.util import create_zip, delete_old_zips, tail, get_size
from app.util import create_zip, delete_old_zips, tail
from app.config import load_config
from app.logging_config import get_logger
from app.forms import ScrapingForm
config = load_config()
logger = get_logger()
log_file_name = logger.handlers[0].baseFilename
scraping_thread = None
scraper = None
scrape_lock = threading.Lock()
@@ -23,10 +18,11 @@ scrape_lock = threading.Lock()
def register_api(app):
@app.route('/start_scraping', methods=['POST'])
def start_scraping():
global scraping_thread, scraper
with scrape_lock:
scraper = current_app.config.get('SCRAPER')
if scraper is not None and scraper.scraping_active:
logger.warning("Can't start scraping process: scraping already in progress")
current_app.logger.warning("Can't start scraping process: scraping already in progress")
return jsonify({"status": "Scraping already in progress"})
form = ScrapingForm()
@@ -35,10 +31,10 @@ def register_api(app):
fetch_interval = form.fetch_interval.data
run_interval = form.run_interval.data
scraper = Scraper(faction_id, fetch_interval, run_interval, current_app)
scraper = Scraper(faction_id, fetch_interval, run_interval, app)
scraper.scraping_active = True
scraping_thread = threading.Thread(target=scraper.start_scraping)
scraping_thread = threading.Thread(target=scraper.start_scraping, args=(app,))
scraping_thread.daemon = True
scraping_thread.start()
@@ -56,19 +52,21 @@ def register_api(app):
scraper.stop_scraping()
current_app.config['SCRAPING_ACTIVE'] = False
logger.debug("Scraping stopped by user")
current_app.logger.debug("Scraping stopped by user")
return jsonify({"status": "Scraping stopped"})
@app.route('/logfile', methods=['GET'])
def logfile():
log_file_name = current_app.logger.handlers[0].baseFilename
page = int(request.args.get('page', 0)) # Page number
lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
lines_per_page = int(request.args.get('lines_per_page', current_app.config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
log_file_path = log_file_name # Path to the current log file
if not os.path.isfile(log_file_path):
logger.error("Log file not found")
current_app.logger.error("Log file not found")
return jsonify({"error": "Log file not found"}), 404
log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES']))
log_lines = list(tail(log_file_path, current_app.config['LOGGING']['VIEW_MAX_LINES']))
log_lines = log_lines[::-1] # Reverse the list
@@ -123,14 +121,15 @@ def register_api(app):
@app.route('/delete_files', methods=['POST'])
def delete_files():
log_file_name = current_app.logger.handlers[0].baseFilename
file_paths = request.json.get('file_paths', [])
if not file_paths:
return jsonify({"error": "No files specified"}), 400
errors = []
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
for file_path in file_paths:
if file_path.startswith('/data/'):
@@ -171,40 +170,39 @@ def register_api(app):
@app.route('/data/<path:filename>')
def download_data_file(filename):
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
file_path = os.path.join(data_dir, filename)
return send_from_directory(directory=data_dir, path=filename, as_attachment=True)
@app.route('/log/<path:filename>')
def download_log_file(filename):
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
file_path = os.path.join(log_dir, filename)
return send_from_directory(directory=log_dir, path=filename, as_attachment=True)
@app.route('/tmp/<path:filename>')
def download_tmp_file(filename):
tmp_dir = os.path.abspath(config['TEMP']['TEMP_DIR'])
tmp_dir = os.path.abspath(current_app.config['TEMP']['TEMP_DIR'])
file_path = os.path.join(tmp_dir, filename)
return send_from_directory(directory=tmp_dir, path=filename, as_attachment=True)
@app.route('/config/lines_per_page')
def get_lines_per_page():
lines_per_page = config['LOGGING']['VIEW_PAGE_LINES']
lines_per_page = current_app.config['LOGGING']['VIEW_PAGE_LINES']
return jsonify({"lines_per_page": lines_per_page})
@app.route('/scraping_status', methods=['GET'])
def scraping_status():
if scraper is None:
logger.debug("Scraper is not initialized.")
current_app.logger.debug("Scraper is not initialized.")
return jsonify({"scraping_active": False})
if scraper.scraping_active:
logger.debug("Scraping is active.")
current_app.logger.debug("Scraping is active.")
return jsonify({"scraping_active": True})
else:
logger.debug("Scraping is not active.")
current_app.logger.debug("Scraping is not active.")
return jsonify({"scraping_active": False})