Files
TornActivityTracker/app/api.py
2025-02-07 04:13:17 +01:00

210 lines
8.2 KiB
Python

# filepath: /home/michaelb/Dokumente/TornActivityTracker/app/api.py
from flask import jsonify, request, Response, send_from_directory, current_app
import threading
import os
import glob
from datetime import datetime
import pandas as pd
from app.models import Scraper, generate_statistics
from app.util import create_zip, delete_old_zips, tail, get_size
from app.config import load_config
from app.logging_config import get_logger
from app.forms import ScrapingForm
config = load_config()
logger = get_logger()
log_file_name = logger.handlers[0].baseFilename
scraping_thread = None
scraper = None
scrape_lock = threading.Lock()
def register_api(app):
@app.route('/start_scraping', methods=['POST'])
def start_scraping():
with scrape_lock:
scraper = current_app.config.get('SCRAPER')
if scraper is not None and scraper.scraping_active:
logger.warning("Can't start scraping process: scraping already in progress")
return jsonify({"status": "Scraping already in progress"})
form = ScrapingForm()
if form.validate_on_submit():
faction_id = form.faction_id.data
fetch_interval = form.fetch_interval.data
run_interval = form.run_interval.data
scraper = Scraper(faction_id, fetch_interval, run_interval, current_app)
scraper.scraping_active = True
scraping_thread = threading.Thread(target=scraper.start_scraping)
scraping_thread.daemon = True
scraping_thread.start()
current_app.config['SCRAPER'] = scraper
current_app.config['SCRAPING_THREAD'] = scraping_thread
return jsonify({"status": "Scraping started"})
return jsonify({"status": "Invalid form data"})
@app.route('/stop_scraping', methods=['POST'])
def stop_scraping():
scraper = current_app.config.get('SCRAPER')
if scraper is None or not scraper.scraping_active:
return jsonify({"status": "Scraping is not running"})
scraper.stop_scraping()
current_app.config['SCRAPING_ACTIVE'] = False
logger.debug("Scraping stopped by user")
return jsonify({"status": "Scraping stopped"})
@app.route('/logfile', methods=['GET'])
def logfile():
page = int(request.args.get('page', 0)) # Page number
lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
log_file_path = log_file_name # Path to the current log file
if not os.path.isfile(log_file_path):
logger.error("Log file not found")
return jsonify({"error": "Log file not found"}), 404
log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES']))
log_lines = log_lines[::-1] # Reverse the list
start = page * lines_per_page
end = start + lines_per_page
paginated_lines = log_lines[start:end] if start < len(log_lines) else []
return jsonify({
"log": paginated_lines,
"total_lines": len(log_lines),
"pages": (len(log_lines) + lines_per_page - 1) // lines_per_page,
"start_line": len(log_lines) - start
})
@app.route('/download_files', methods=['POST'])
def download_files():
delete_old_zips() # Clean up old zip files
file_paths = request.json.get('file_paths')
if not file_paths:
return jsonify({"error": "No files specified"}), 400
# Get the absolute path of the parent directory
parent_dir = os.path.abspath(os.path.join(app.root_path, os.pardir))
# Validate and correct file paths
valid_file_paths = []
for file_path in file_paths:
if file_path.startswith('/data/'):
corrected_path = file_path.lstrip('/')
full_path = os.path.join(parent_dir, corrected_path)
if os.path.isfile(full_path):
valid_file_paths.append(full_path)
elif file_path.startswith('/log/'):
corrected_path = file_path.lstrip('/')
full_path = os.path.join(parent_dir, corrected_path)
if os.path.isfile(full_path):
valid_file_paths.append(full_path)
if not valid_file_paths:
return jsonify({"error": "No valid files specified"}), 400
# Create a unique zip file name
zip_name = f"files_{datetime.now().strftime('%Y%m%d%H%M%S')}.zip"
zip_path = create_zip(valid_file_paths, zip_name)
# Log the directory and file path for debugging
current_app.logger.debug(f"Sending file from directory: temp, file: {zip_name}")
return download_tmp_file(zip_name)
@app.route('/delete_files', methods=['POST'])
def delete_files():
file_paths = request.json.get('file_paths', [])
if not file_paths:
return jsonify({"error": "No files specified"}), 400
errors = []
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
for file_path in file_paths:
if file_path.startswith('/data/'):
full_file_path = os.path.join(data_dir, file_path.lstrip('/data/'))
elif file_path.startswith('/log/'):
full_file_path = os.path.join(log_dir, file_path.lstrip('/log/'))
else:
errors.append({"file": file_path, "error": "File not in allowed directory"})
continue
# Check if the file is in either the logs or the data files folder
#if not (full_file_path.startswith(data_dir) or full_file_path.startswith(log_dir)):
# errors.append({"file": file_path, "error": "File not in allowed directory"})
# continue
# Check if it's the currently active log file
if full_file_path == log_file_name:
errors.append({"file": file_path, "error": "Cannot delete active log file."})
continue
# Check if it's an active data file
if scraper and scraper.data_file_name == full_file_path:
errors.append({"file": file_path, "error": "Cannot delete active data file."})
continue
if not os.path.isfile(full_file_path):
errors.append({"file": file_path, "error": "File not found"})
continue
try:
os.remove(full_file_path)
except Exception as e:
errors.append({"file": file_path, "error": str(e)})
if errors:
return jsonify({"errors": errors}), 207 # Multi-Status response
return jsonify({"success": True}), 200
@app.route('/data/<path:filename>')
def download_data_file(filename):
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
file_path = os.path.join(data_dir, filename)
return send_from_directory(directory=data_dir, path=filename, as_attachment=True)
@app.route('/log/<path:filename>')
def download_log_file(filename):
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
file_path = os.path.join(log_dir, filename)
return send_from_directory(directory=log_dir, path=filename, as_attachment=True)
@app.route('/tmp/<path:filename>')
def download_tmp_file(filename):
tmp_dir = os.path.abspath(config['TEMP']['TEMP_DIR'])
file_path = os.path.join(tmp_dir, filename)
return send_from_directory(directory=tmp_dir, path=filename, as_attachment=True)
@app.route('/config/lines_per_page')
def get_lines_per_page():
lines_per_page = config['LOGGING']['VIEW_PAGE_LINES']
return jsonify({"lines_per_page": lines_per_page})
@app.route('/scraping_status', methods=['GET'])
def scraping_status():
if scraper is None:
logger.debug("Scraper is not initialized.")
return jsonify({"scraping_active": False})
if scraper.scraping_active:
logger.debug("Scraping is active.")
return jsonify({"scraping_active": True})
else:
logger.debug("Scraping is not active.")
return jsonify({"scraping_active": False})