refactors to use redis & celery
This commit is contained in:
102
app/views.py
102
app/views.py
@@ -2,6 +2,8 @@ import os
|
||||
import glob
|
||||
from flask import render_template, Blueprint, current_app, request
|
||||
|
||||
from app.tasks import get_redis
|
||||
|
||||
from app.forms import ScrapingForm
|
||||
from app.util import get_size
|
||||
from app.config import load_config
|
||||
@@ -13,6 +15,14 @@ from datetime import datetime
|
||||
|
||||
views_bp = Blueprint("views", __name__)
|
||||
|
||||
def sizeof_fmt(num, suffix="B"):
|
||||
"""Convert bytes to human readable format"""
|
||||
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f} {unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f} Yi{suffix}"
|
||||
|
||||
def register_views(app):
|
||||
@app.route('/')
|
||||
def index():
|
||||
@@ -29,50 +39,60 @@ def register_views(app):
|
||||
|
||||
@app.route('/download_results')
|
||||
def download_results():
|
||||
log_file_name = os.path.abspath(app.config['LOG_FILE_NAME'])
|
||||
scraper = app.config.get('SCRAPER')
|
||||
|
||||
if scraper:
|
||||
print(scraper.data_file_name)
|
||||
if not scraper:
|
||||
print("Scraper not initialized")
|
||||
# Get the current active log file and data file from Redis and app config
|
||||
redis_client = get_redis()
|
||||
current_faction_id = redis_client.get("current_faction_id")
|
||||
|
||||
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
|
||||
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
|
||||
|
||||
data_files = glob.glob(os.path.join(data_dir, "*.csv"))
|
||||
log_files = glob.glob(os.path.join(log_dir, "*.log"))
|
||||
active_data_file = None
|
||||
if current_faction_id:
|
||||
active_data_file = redis_client.hget(f"scraper:{current_faction_id}", "data_file_name")
|
||||
|
||||
def get_file_info(file_path):
|
||||
active_log_file = app.config['LOG_FILE_NAME']
|
||||
|
||||
def get_file_info(file_path, file_type='data'):
|
||||
stats = os.stat(file_path)
|
||||
name = os.path.basename(file_path)
|
||||
|
||||
# Determine if file is active
|
||||
is_active = False
|
||||
if file_type == 'data' and active_data_file:
|
||||
is_active = os.path.abspath(file_path) == os.path.abspath(active_data_file)
|
||||
elif file_type == 'log' and active_log_file:
|
||||
is_active = os.path.basename(file_path) == os.path.basename(active_log_file)
|
||||
|
||||
return {
|
||||
"name": file_path,
|
||||
"name_display": os.path.basename(file_path),
|
||||
"last_modified": os.path.getmtime(file_path),
|
||||
"created": os.path.getctime(file_path),
|
||||
"size": get_size(file_path)
|
||||
'name': file_path, # Full path for internal use
|
||||
'name_display': name, # Just filename for display
|
||||
'last_modified': stats.st_mtime, # Send timestamp instead of datetime
|
||||
'created': stats.st_ctime, # Send timestamp instead of datetime
|
||||
'size': sizeof_fmt(stats.st_size),
|
||||
'active': is_active
|
||||
}
|
||||
|
||||
data_files_info = [get_file_info(file) for file in data_files]
|
||||
log_files_info = [get_file_info(file) for file in log_files]
|
||||
|
||||
if scraper and scraper.scraping_active:
|
||||
for data_file in data_files_info:
|
||||
if os.path.abspath(scraper.data_file_name) == data_file['name']:
|
||||
data_file['active'] = True
|
||||
else:
|
||||
data_file['active'] = False
|
||||
|
||||
for log_file in log_files_info:
|
||||
if log_file_name == os.path.abspath(log_file['name']):
|
||||
log_file['active'] = True
|
||||
else:
|
||||
log_file['active'] = False
|
||||
|
||||
data_files_info.sort(key=lambda x: x['last_modified'], reverse=True)
|
||||
log_files_info.sort(key=lambda x: x['last_modified'], reverse=True)
|
||||
|
||||
files = {"data": data_files_info, "log": log_files_info}
|
||||
|
||||
|
||||
data_files = []
|
||||
log_files = []
|
||||
|
||||
# Get data files
|
||||
data_dir = os.path.abspath(app.config['DATA']['DATA_DIR'])
|
||||
if os.path.exists(data_dir):
|
||||
for file in glob.glob(os.path.join(data_dir, "*.csv")):
|
||||
data_files.append(get_file_info(file, 'data'))
|
||||
|
||||
# Get log files
|
||||
log_dir = os.path.abspath(app.config['LOGGING']['LOG_DIR'])
|
||||
if os.path.exists(log_dir):
|
||||
for file in glob.glob(os.path.join(log_dir, "*.log")):
|
||||
log_files.append(get_file_info(file, 'log'))
|
||||
|
||||
# Sort files by modification time, newest first
|
||||
data_files.sort(key=lambda x: x['last_modified'], reverse=True)
|
||||
log_files.sort(key=lambda x: x['last_modified'], reverse=True)
|
||||
|
||||
files = {
|
||||
'data': data_files,
|
||||
'log': log_files
|
||||
}
|
||||
|
||||
return render_template('download_results.html', files=files)
|
||||
|
||||
views_bp = Blueprint("views", __name__)
|
||||
@@ -120,7 +140,7 @@ def register_views(app):
|
||||
|
||||
@views_bp.route('/server_time')
|
||||
def server_time():
|
||||
current_time = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
|
||||
current_time = datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
||||
return {'server_time': current_time}
|
||||
|
||||
app.register_blueprint(views_bp)
|
||||
|
||||
Reference in New Issue
Block a user