15 Commits

Author SHA1 Message Date
07e3af444c adds docker & fly.toml 2025-02-20 14:26:39 +01:00
11e6348a8e updates 2025-02-11 02:49:55 +01:00
c078017b5f fucked up the config file 2025-02-11 02:18:49 +01:00
f3da58e202 adds template config variables (like app title) 2025-02-11 02:16:02 +01:00
57e969a647 adds activity indicators in header (ugly af) 2025-02-11 01:58:06 +01:00
0340dea4f8 adds status and server time indicators. fixes checkboxes 2025-02-10 18:22:06 +01:00
2b6aebdab4 moves app initialization to correct file 2025-02-10 17:45:24 +01:00
a6292d2d0f adds ending time to activity indicator 2025-02-10 17:33:37 +01:00
a44c2bfc04 seperates index and log viewer 2025-02-10 16:52:29 +01:00
33621bdec4 refactors logging and config 2025-02-10 16:34:11 +01:00
d1f562ce94 removed state file as current state will be stored in scraper class 2025-02-10 14:12:23 +01:00
5e00df4e13 Merge pull request 'feature/analysis-form' (#10) from feature/analysis-form into master
Reviewed-on: #10
2025-02-10 03:11:57 +01:00
293d3e26a6 Merge pull request 'corrects button display in download_results' (#9) from develop into master
Reviewed-on: #9
2025-02-10 03:11:34 +01:00
ae5310d764 corrects button display in download_results 2025-02-09 18:32:43 +01:00
487d59512a Merge pull request 'adds correct license' (#7) from develop into master
Reviewed-on: #7
2025-02-09 16:07:20 +01:00
34 changed files with 611 additions and 319 deletions

5
.dockerignore Normal file
View File

@@ -0,0 +1,5 @@
fly.toml
.git/
__pycache__/
.envrc
.venv/

15
Dockerfile Normal file
View File

@@ -0,0 +1,15 @@
FROM python:3.13.1 AS builder
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1
WORKDIR /app
RUN python -m venv .venv
COPY requirements.txt ./
RUN .venv/bin/pip install -r requirements.txt
FROM python:3.13.1-slim
WORKDIR /app
COPY --from=builder /app/.venv .venv/
COPY . .
CMD ["/app/.venv/bin/flask", "run", "--host=0.0.0.0", "--port=8080"]

View File

@@ -0,0 +1,50 @@
import os
from flask import Flask
from flask_bootstrap import Bootstrap5
from datetime import datetime
from app.views import register_views
from app.api import register_api
from app.config import load_config
from app.filters import register_filters
from app.logging_config import init_logger
def create_app():
app = Flask(__name__)
os.environ['TZ'] = 'UTC'
config = load_config()
app.config['SECRET_KEY'] = config['DEFAULT']['SECRET_KEY']
# Move bootstrap settings to root level
for key, value in config.get('BOOTSTRAP', {}).items():
app.config[key.upper()] = value
bootstrap = Bootstrap5(app)
# Store the entire config in Flask app
app.config.update(config)
# Initialize other settings
app.config['SCRAPING_ACTIVE'] = False
app.config['SCRAPING_THREAD'] = None
app.config['DATA_FILE_NAME'] = None
app.config['LOG_FILE_NAME'] = "log/" + datetime.now().strftime('%Y-%m-%d-%H-%M') + '.log'
# Initialize logging
app.logger = init_logger(app.config)
# Register routes
register_views(app)
register_api(app)
register_filters(app)
@app.context_processor
def inject_main_config():
main_config = app.config.get('MAIN', {})
return dict(main_config=main_config)
return app

View File

@@ -2,7 +2,6 @@ import os
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from flask import url_for
from abc import ABC, abstractmethod
from .base import BaseAnalysis

View File

@@ -1,7 +1,6 @@
import os
import pandas as pd
import plotly.graph_objects as go
from flask import url_for
from abc import ABC, abstractmethod
from .base import BaseAnalysis

View File

@@ -4,13 +4,9 @@ import seaborn as sns
from .basePlotAnalysis import BasePlotAnalysis
from flask import current_app, url_for
from app.logging_config import get_logger
import matplotlib
matplotlib.use('Agg')
logger = get_logger()
class PlotTopActiveUsers(BasePlotAnalysis):
"""
Class for analyzing the most active users and generating a bar chart.

View File

@@ -3,7 +3,6 @@ import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from .basePlotAnalysis import BasePlotAnalysis
from flask import current_app, url_for
import matplotlib
matplotlib.use('Agg')

View File

@@ -4,13 +4,9 @@ import seaborn as sns
from .basePlotAnalysis import BasePlotAnalysis
from flask import current_app, url_for
from app.logging_config import get_logger
import matplotlib
matplotlib.use('Agg')
logger = get_logger()
class PlotLineActivityAllUsers(BasePlotAnalysis):
"""
Class for analyzing user activity trends over multiple days and generating a line graph.

View File

@@ -5,10 +5,6 @@ import plotly.graph_objects as go
from .basePlotlyAnalysis import BasePlotlyAnalysis
from flask import current_app, url_for
from app.logging_config import get_logger
logger = get_logger()
class PlotlyActivityHeatmap(BasePlotlyAnalysis):
"""
Class for analyzing user activity trends over multiple days and generating an interactive heatmap.

View File

@@ -4,10 +4,6 @@ from plotly.subplots import make_subplots
from .basePlotlyAnalysis import BasePlotlyAnalysis
from flask import current_app, url_for
from app.logging_config import get_logger
logger = get_logger()
class PlotlyLineActivityAllUsers(BasePlotlyAnalysis):
"""
Class for analyzing user activity trends over multiple days and generating an interactive line graph.

View File

@@ -7,15 +7,10 @@ from datetime import datetime
import pandas as pd
from app.models import Scraper
from app.util import create_zip, delete_old_zips, tail, get_size
from app.util import create_zip, delete_old_zips, tail
from app.config import load_config
from app.logging_config import get_logger
from app.forms import ScrapingForm
config = load_config()
logger = get_logger()
log_file_name = logger.handlers[0].baseFilename
scraping_thread = None
scraper = None
scrape_lock = threading.Lock()
@@ -23,10 +18,11 @@ scrape_lock = threading.Lock()
def register_api(app):
@app.route('/start_scraping', methods=['POST'])
def start_scraping():
global scraping_thread, scraper
with scrape_lock:
scraper = current_app.config.get('SCRAPER')
if scraper is not None and scraper.scraping_active:
logger.warning("Can't start scraping process: scraping already in progress")
current_app.logger.warning("Can't start scraping process: scraping already in progress")
return jsonify({"status": "Scraping already in progress"})
form = ScrapingForm()
@@ -35,10 +31,10 @@ def register_api(app):
fetch_interval = form.fetch_interval.data
run_interval = form.run_interval.data
scraper = Scraper(faction_id, fetch_interval, run_interval, current_app)
scraper = Scraper(faction_id, fetch_interval, run_interval, app)
scraper.scraping_active = True
scraping_thread = threading.Thread(target=scraper.start_scraping)
scraping_thread = threading.Thread(target=scraper.start_scraping, args=(app,))
scraping_thread.daemon = True
scraping_thread.start()
@@ -56,19 +52,21 @@ def register_api(app):
scraper.stop_scraping()
current_app.config['SCRAPING_ACTIVE'] = False
logger.debug("Scraping stopped by user")
current_app.logger.debug("Scraping stopped by user")
return jsonify({"status": "Scraping stopped"})
@app.route('/logfile', methods=['GET'])
def logfile():
log_file_name = current_app.logger.handlers[0].baseFilename
page = int(request.args.get('page', 0)) # Page number
lines_per_page = int(request.args.get('lines_per_page', config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
lines_per_page = int(request.args.get('lines_per_page', current_app.config['LOGGING']['VIEW_PAGE_LINES'])) # Lines per page
log_file_path = log_file_name # Path to the current log file
if not os.path.isfile(log_file_path):
logger.error("Log file not found")
current_app.logger.error("Log file not found")
return jsonify({"error": "Log file not found"}), 404
log_lines = list(tail(log_file_path, config['LOGGING']['VIEW_MAX_LINES']))
log_lines = list(tail(log_file_path, current_app.config['LOGGING']['VIEW_MAX_LINES']))
log_lines = log_lines[::-1] # Reverse the list
@@ -123,14 +121,15 @@ def register_api(app):
@app.route('/delete_files', methods=['POST'])
def delete_files():
log_file_name = current_app.logger.handlers[0].baseFilename
file_paths = request.json.get('file_paths', [])
if not file_paths:
return jsonify({"error": "No files specified"}), 400
errors = []
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
for file_path in file_paths:
if file_path.startswith('/data/'):
@@ -171,40 +170,46 @@ def register_api(app):
@app.route('/data/<path:filename>')
def download_data_file(filename):
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
file_path = os.path.join(data_dir, filename)
return send_from_directory(directory=data_dir, path=filename, as_attachment=True)
@app.route('/log/<path:filename>')
def download_log_file(filename):
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
file_path = os.path.join(log_dir, filename)
return send_from_directory(directory=log_dir, path=filename, as_attachment=True)
@app.route('/tmp/<path:filename>')
def download_tmp_file(filename):
tmp_dir = os.path.abspath(config['TEMP']['TEMP_DIR'])
tmp_dir = os.path.abspath(current_app.config['TEMP']['TEMP_DIR'])
file_path = os.path.join(tmp_dir, filename)
return send_from_directory(directory=tmp_dir, path=filename, as_attachment=True)
@app.route('/config/lines_per_page')
def get_lines_per_page():
lines_per_page = config['LOGGING']['VIEW_PAGE_LINES']
lines_per_page = current_app.config['LOGGING']['VIEW_PAGE_LINES']
return jsonify({"lines_per_page": lines_per_page})
@app.route('/scraping_status', methods=['GET'])
def scraping_status():
if scraper is None:
logger.debug("Scraper is not initialized.")
current_app.logger.debug("Scraper is not initialized.")
return jsonify({"scraping_active": False})
if scraper.scraping_active:
logger.debug("Scraping is active.")
current_app.logger.debug("Scraping is active.")
return jsonify({"scraping_active": True})
else:
logger.debug("Scraping is not active.")
current_app.logger.debug("Scraping is not active.")
return jsonify({"scraping_active": False})
@app.route('/scraping_get_end_time')
def scraping_get_end_time():
if scraper is None:
current_app.logger.debug("Scraper is not initialized.")
return jsonify({"scraping_active":False})
return jsonify({"end_time": scraper.end_time})

View File

@@ -1,41 +0,0 @@
from flask import Flask
from flask_bootstrap import Bootstrap5
from datetime import datetime
from app.views import register_views
from app.api import register_api
from app.config import load_config
from app.filters import register_filters
def init_app():
config = load_config()
# Initialize app
app = Flask(__name__)
# Load configuration
app.config['SECRET_KEY'] = config['DEFAULT']['SECRET_KEY']
app.config['API_KEY'] = config['DEFAULT']['API_KEY']
app.config['DATA'] = config['DATA']
app.config['TEMP'] = config['TEMP']
app.config['LOGGING'] = config['LOGGING']
# Move bootstrap settings to root level
for key in config['BOOTSTRAP']:
app.config[key.upper()] = config['BOOTSTRAP'][key]
bootstrap = Bootstrap5(app)
# Initialize global variables
app.config['SCRAPING_ACTIVE'] = False
app.config['SCRAPING_THREAD'] = None
app.config['DATA_FILE_NAME'] = None
app.config['LOG_FILE_NAME'] = "log/" + datetime.now().strftime('%Y-%m-%d-%H-%M') + '.log'
# Register routes
register_views(app)
register_api(app)
register_filters(app)
return app

View File

@@ -1,7 +1,8 @@
import configparser
from configobj import ConfigObj
import os
def load_config():
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), '..', 'config.ini'))
return config
config_path = os.path.join(os.path.dirname(__file__), '..', 'config.ini')
# Load config while preserving sections as nested dicts
return ConfigObj(config_path)

View File

@@ -4,36 +4,31 @@ from queue import Queue
import os
from datetime import datetime
from app.config import load_config
from flask import current_app
config = load_config()
def init_logger(config):
LOG_DIR = config.get('LOGGING', {}).get('LOG_DIR', 'log')
# Define the log directory and ensure it exists
LOG_DIR = config['LOGGING']['LOG_DIR']
if not os.path.exists(LOG_DIR):
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
# Generate the log filename dynamically
log_file_name = os.path.join(LOG_DIR, datetime.now().strftime('%Y-%m-%d-%H-%M') + '.log')
log_file_name = os.path.join(LOG_DIR, datetime.now().strftime('%Y-%m-%d-%H-%M') + '.log')
# Initialize the logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# File handler
file_handler = logging.FileHandler(log_file_name, mode='w')
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s',
file_handler = logging.FileHandler(log_file_name, mode='w')
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# Queue handler for real-time logging
log_queue = Queue()
queue_handler = QueueHandler(log_queue)
queue_handler.setLevel(logging.DEBUG)
logger.addHandler(queue_handler)
log_queue = Queue()
queue_handler = QueueHandler(log_queue)
queue_handler.setLevel(logging.DEBUG)
logger.addHandler(queue_handler)
logger.debug("Logger initialized")
# Function to get logger in other modules
def get_logger():
return logger

View File

@@ -6,14 +6,7 @@ import time
from datetime import datetime, timedelta
from requests.exceptions import ConnectionError, Timeout, RequestException
from app.logging_config import get_logger
from app.config import load_config
config = load_config()
API_KEY = config['DEFAULT']['API_KEY']
logger = get_logger()
from flask import current_app
class Scraper:
def __init__(self, faction_id, fetch_interval, run_interval, app):
@@ -23,19 +16,21 @@ class Scraper:
self.end_time = datetime.now() + timedelta(days=run_interval)
self.data_file_name = os.path.join(app.config['DATA']['DATA_DIR'], f"{self.faction_id}-{datetime.now().strftime('%Y-%m-%d-%H-%M')}.csv")
self.scraping_active = False
self.API_KEY = app.config['DEFAULT']['API_KEY']
self.logger = app.logger
print(self.data_file_name)
def fetch_faction_data(self):
url = f"https://api.torn.com/faction/{self.faction_id}?selections=&key={API_KEY}"
url = f"https://api.torn.com/faction/{self.faction_id}?selections=&key={self.API_KEY}"
response = requests.get(url)
if response.status_code == 200:
return response.json()
logger.warning(f"Failed to fetch faction data for faction ID {self.faction_id}. Response: {response.text}")
current_app.logger.warning(f"Failed to fetch faction data for faction ID {self.faction_id}. Response: {response.text}")
return None
def fetch_user_activity(self, user_id):
url = f"https://api.torn.com/user/{user_id}?selections=basic,profile&key={API_KEY}"
url = f"https://api.torn.com/user/{user_id}?selections=basic,profile&key={self.API_KEY}"
retries = 3
for attempt in range(retries):
try:
@@ -43,46 +38,52 @@ class Scraper:
response.raise_for_status()
return response.json()
except ConnectionError as e:
logger.error(f"Connection error while fetching user activity for user ID {user_id}: {e}")
current_app.logger.error(f"Connection error while fetching user activity for user ID {user_id}: {e}")
except Timeout as e:
logger.error(f"Timeout error while fetching user activity for user ID {user_id}: {e}")
current_app.logger.error(f"Timeout error while fetching user activity for user ID {user_id}: {e}")
except RequestException as e:
logger.error(f"Error while fetching user activity for user ID {user_id}: {e}")
current_app.logger.error(f"Error while fetching user activity for user ID {user_id}: {e}")
if attempt < retries - 1:
current_app.logger.debug(f"Retrying {attempt + 1}/{retries} for user {user_id}")
time.sleep(2 ** attempt) # Exponential backoff
return None
def start_scraping(self) -> None:
def start_scraping(self, app) -> None:
"""Starts the scraping process until the end time is reached or stopped manually."""
self.scraping_active = True
logger.info(f"Starting scraping for faction ID {self.faction_id}")
logger.debug(f"Fetch interval: {self.fetch_interval}s, Run interval: {self.run_interval} days, End time: {self.end_time}")
# Anwendungskontext explizit setzen
with app.app_context():
current_app.logger.info(f"Starting scraping for faction ID {self.faction_id}")
current_app.logger.debug(f"Fetch interval: {self.fetch_interval}s, Run interval: {self.run_interval} days, End time: {self.end_time}")
MAX_FAILURES = 5 # Stop after 5 consecutive failures
failure_count = 0
while datetime.now() < self.end_time and self.scraping_active:
logger.info(f"Fetching data at {datetime.now()}")
current_app.logger.info(f"Fetching data at {datetime.now()}")
faction_data = self.fetch_faction_data()
if not faction_data or "members" not in faction_data:
logger.warning(f"No faction data found for ID {self.faction_id} (Failure {failure_count + 1}/{MAX_FAILURES})")
current_app.logger.warning(f"No faction data found for ID {self.faction_id} (Failure {failure_count + 1}/{MAX_FAILURES})")
failure_count += 1
if failure_count >= MAX_FAILURES:
logger.error(f"Max failures reached ({MAX_FAILURES}). Stopping scraping.")
current_app.logger.error(f"Max failures reached ({MAX_FAILURES}). Stopping scraping.")
break
time.sleep(self.fetch_interval)
continue
current_app.logger.info(f"Fetched {len(faction_data['members'])} members for faction {self.faction_id}")
failure_count = 0 # Reset failure count on success
user_activity_data = self.process_faction_members(faction_data["members"])
self.save_data(user_activity_data)
logger.info(f"Data appended to {self.data_file_name}")
current_app.logger.info(f"Data appended to {self.data_file_name}")
time.sleep(self.fetch_interval)
self.handle_scraping_end()
def process_faction_members(self, members: Dict[str, Dict]) -> List[Dict]:
"""Processes and retrieves user activity for all faction members."""
user_activity_data = []
@@ -96,16 +97,16 @@ class Scraper:
"status": user_activity.get("status", {}).get("state", ""),
"timestamp": datetime.now().timestamp(),
})
logger.info(f"Fetched data for user {user_id} ({user_activity.get('name', '')})")
current_app.logger.info(f"Fetched data for user {user_id} ({user_activity.get('name', '')})")
else:
logger.warning(f"Failed to fetch data for user {user_id}")
current_app.logger.warning(f"Failed to fetch data for user {user_id}")
return user_activity_data
def save_data(self, user_activity_data: List[Dict]) -> None:
"""Saves user activity data to a CSV file."""
if not user_activity_data:
logger.warning("No data to save.")
current_app.logger.warning("No data to save.")
return
df = pd.DataFrame(user_activity_data)
@@ -117,22 +118,22 @@ class Scraper:
try:
with open(self.data_file_name, "a" if file_exists else "w") as f:
df.to_csv(f, mode="a" if file_exists else "w", header=not file_exists, index=False)
logger.info(f"Data successfully saved to {self.data_file_name}")
current_app.logger.info(f"Data successfully saved to {self.data_file_name}")
except Exception as e:
logger.error(f"Error saving data to {self.data_file_name}: {e}")
current_app.logger.error(f"Error saving data to {self.data_file_name}: {e}")
def handle_scraping_end(self) -> None:
"""Handles cleanup and logging when scraping ends."""
if not self.scraping_active:
logger.warning(f"Scraping stopped manually at {datetime.now()}")
current_app.logger.warning(f"Scraping stopped manually at {datetime.now()}")
elif datetime.now() >= self.end_time:
logger.warning(f"Scraping stopped due to timeout at {datetime.now()} (Run interval: {self.run_interval} days)")
current_app.logger.warning(f"Scraping stopped due to timeout at {datetime.now()} (Run interval: {self.run_interval} days)")
else:
logger.error(f"Unexpected stop at {datetime.now()}")
current_app.logger.error(f"Unexpected stop at {datetime.now()}")
logger.info("Scraping completed.")
current_app.logger.info("Scraping completed.")
self.scraping_active = False
def stop_scraping(self):
self.scraping_active = False
logger.debug("Scraping stopped by user")
current_app.logger.debug("Scraping stopped by user")

View File

@@ -1,2 +0,0 @@
data_file_name = None
log_file_name = None

View File

@@ -1,7 +1,38 @@
function checkAllCheckboxes(tableId, checkAllCheckboxId) {
const table = document.getElementById(tableId);
const checkboxes = table.querySelectorAll('input[type="checkbox"]');
const checkAllCheckbox = document.getElementById(checkAllCheckboxId);
import { ScraperUtils } from './scraper_utils.js';
checkboxes.forEach(checkbox => checkbox.checked = checkAllCheckbox.checked);
class Common {
constructor() {
this.utils = new ScraperUtils();
this.addEventListeners();
this.scheduleUpdates();
}
scheduleUpdates() {
// Ensure server time updates every minute but only after initial fetch
setTimeout(() => {
setInterval(() => this.utils.updateServerTime(), 60000);
}, 5000); // Delay first scheduled update to prevent duplicate initial request
}
addEventListeners() {
if (this.utils.stopButton) {
this.utils.stopButton.addEventListener('click', () => this.utils.checkScrapingStatus());
}
}
}
document.addEventListener('DOMContentLoaded', () => {
new Common();
});
window.checkAllCheckboxes = function(tableId, checkAllId) {
var table = document.getElementById(tableId);
var checkAll = document.getElementById(checkAllId);
var checkboxes = table.querySelectorAll('input[type="checkbox"]');
checkboxes.forEach(function(checkbox) {
if (!checkbox.disabled) {
checkbox.checked = checkAll.checked;
}
});
};

View File

@@ -1,91 +1,21 @@
class LogScraperApp {
import { ScraperUtils } from './scraper_utils.js';
class ScraperApp {
constructor() {
this.utils = new ScraperUtils();
this.form = document.getElementById('scrapingForm');
this.stopButton = document.getElementById('stopButton');
this.logsElement = document.getElementById('logs');
this.prevPageButton = document.getElementById('prevPage');
this.nextPageButton = document.getElementById('nextPage');
this.pageInfo = document.getElementById('pageInfo');
this.startButton = document.getElementById('startButton');
this.currentPage = 0;
this.linesPerPage = null;
this.autoRefreshInterval = null;
this.init();
}
async init() {
await this.fetchConfig();
await this.checkScrapingStatus();
init() {
this.utils.checkScrapingStatus();
this.addEventListeners();
}
async fetchConfig() {
try {
const response = await fetch('/config/lines_per_page');
const data = await response.json();
this.linesPerPage = data.lines_per_page;
this.fetchLogs(this.currentPage);
} catch (error) {
console.error('Error fetching config:', error);
}
}
async fetchLogs(page) {
try {
const response = await fetch(`/logfile?page=${page}&lines_per_page=${this.linesPerPage}`);
const data = await response.json();
if (data.error) {
this.logsElement.textContent = data.error;
} else {
this.logsElement.innerHTML = data.log.map((line, index) => {
const lineNumber = data.start_line - index;
return `<span class="line-number">${lineNumber}</span> ${line}`;
}).join('');
this.updatePagination(data.total_lines);
}
} catch (error) {
console.error('Error fetching logs:', error);
}
}
updatePagination(totalLines) {
this.prevPageButton.disabled = this.currentPage === 0;
this.nextPageButton.disabled = (this.currentPage + 1) * this.linesPerPage >= totalLines;
this.pageInfo.textContent = `Page ${this.currentPage + 1} of ${Math.ceil(totalLines / this.linesPerPage)}`;
}
startAutoRefresh() {
this.autoRefreshInterval = setInterval(() => this.fetchLogs(this.currentPage), 5000);
}
stopAutoRefresh() {
clearInterval(this.autoRefreshInterval);
}
async checkScrapingStatus() {
try {
const response = await fetch('/scraping_status');
const data = await response.json();
if (data.scraping_active) {
this.startButton.disabled = true;
this.stopButton.disabled = false;
this.startAutoRefresh();
} else {
this.startButton.disabled = false;
this.stopButton.disabled = true;
}
this.fetchLogs(this.currentPage);
} catch (error) {
console.error('Error checking scraping status:', error);
}
}
async startScraping(event) {
event.preventDefault();
event.preventDefault(); // Prevent default form submission
const formData = new FormData(this.form);
try {
const response = await fetch('/start_scraping', {
@@ -93,12 +23,8 @@ class LogScraperApp {
body: formData
});
const data = await response.json();
console.log(data);
if (data.status === "Scraping started") {
this.startButton.disabled = true;
this.stopButton.disabled = false;
this.startAutoRefresh();
this.utils.checkScrapingStatus(); // Update UI
}
} catch (error) {
console.error('Error starting scraping:', error);
@@ -107,14 +33,12 @@ class LogScraperApp {
async stopScraping() {
try {
const response = await fetch('/stop_scraping', { method: 'POST' });
const response = await fetch('/stop_scraping', {
method: 'POST'
});
const data = await response.json();
console.log(data);
if (data.status === "Scraping stopped") {
this.startButton.disabled = false;
this.stopButton.disabled = true;
this.stopAutoRefresh();
this.utils.checkScrapingStatus(); // Update UI
}
} catch (error) {
console.error('Error stopping scraping:', error);
@@ -122,23 +46,11 @@ class LogScraperApp {
}
addEventListeners() {
this.prevPageButton.addEventListener('click', () => {
if (this.currentPage > 0) {
this.currentPage--;
this.fetchLogs(this.currentPage);
}
});
this.nextPageButton.addEventListener('click', () => {
this.currentPage++;
this.fetchLogs(this.currentPage);
});
this.form.addEventListener('submit', (event) => this.startScraping(event));
this.stopButton.addEventListener('click', () => this.stopScraping());
}
}
// Initialize the application when DOM is fully loaded
document.addEventListener('DOMContentLoaded', () => new LogScraperApp());
document.addEventListener('DOMContentLoaded', () => {
new ScraperApp();
});

97
app/static/log_viewer.js Normal file
View File

@@ -0,0 +1,97 @@
class LogViewerApp {
constructor() {
this.logsElement = document.getElementById('logs');
this.prevPageButton = document.getElementById('prevPage');
this.nextPageButton = document.getElementById('nextPage');
this.pageInfo = document.getElementById('pageInfo');
this.currentPage = 0;
this.linesPerPage = null;
this.autoRefreshInterval = null;
this.init();
}
async init() {
await this.fetchConfig();
await this.checkScrapingStatus();
this.addEventListeners();
}
async fetchConfig() {
try {
const response = await fetch('/config/lines_per_page');
const data = await response.json();
this.linesPerPage = data.lines_per_page;
this.fetchLogs(this.currentPage);
} catch (error) {
console.error('Error fetching config:', error);
}
}
async fetchLogs(page) {
try {
const response = await fetch(`/logfile?page=${page}&lines_per_page=${this.linesPerPage}`);
const data = await response.json();
if (data.error) {
this.logsElement.textContent = data.error;
} else {
this.logsElement.innerHTML = data.log.map((line, index) => {
const lineNumber = data.start_line - index;
return `<span class="line-number">${lineNumber}</span> ${line}`;
}).join('');
this.updatePagination(data.total_lines);
}
} catch (error) {
console.error('Error fetching logs:', error);
}
}
updatePagination(totalLines) {
this.prevPageButton.disabled = this.currentPage === 0;
this.nextPageButton.disabled = (this.currentPage + 1) * this.linesPerPage >= totalLines;
this.pageInfo.textContent = `Page ${this.currentPage + 1} of ${Math.ceil(totalLines / this.linesPerPage)}`;
}
startAutoRefresh() {
this.autoRefreshInterval = setInterval(() => this.fetchLogs(this.currentPage), 5000);
}
stopAutoRefresh() {
clearInterval(this.autoRefreshInterval);
}
async checkScrapingStatus() {
try {
const response = await fetch('/scraping_status');
const data = await response.json();
if (data.scraping_active) {
this.startAutoRefresh();
} else {
this.stopAutoRefresh();
}
this.fetchLogs(this.currentPage);
} catch (error) {
console.error('Error checking scraping status:', error);
}
}
addEventListeners() {
this.prevPageButton.addEventListener('click', () => {
if (this.currentPage > 0) {
this.currentPage--;
this.fetchLogs(this.currentPage);
}
});
this.nextPageButton.addEventListener('click', () => {
this.currentPage++;
this.fetchLogs(this.currentPage);
});
}
}
// Initialize the application when DOM is fully loaded
document.addEventListener('DOMContentLoaded', () => new LogViewerApp());

180
app/static/scraper_utils.js Normal file
View File

@@ -0,0 +1,180 @@
export class ScraperUtils {
constructor() {
this.activityIndicator = document.getElementById('activity_indicator');
this.endTimeElement = document.getElementById('end_time');
this.serverTimeElement = document.getElementById('server_time');
this.timeLeftElement = document.getElementById('time-left'); // New element for countdown
this.stopButton = document.getElementById('stopButton');
this.startButton = document.getElementById('startButton');
this.statusContainer = document.getElementById('status_container');
this.loadingIndicator = document.getElementById('loading_indicator');
this.statusContent = document.querySelectorAll('#status_content');
this.serverTime = null;
this.endTime = null;
this.init();
}
async init() {
this.showLoadingIndicator();
try {
// Ensure each function runs only once
await Promise.all([
this.updateServerTime(),
this.checkScrapingStatus()
]);
} catch (error) {
console.error("Error during initialization:", error);
}
// Ensure end time is fetched only if scraping is active
if (this.endTime === null) {
try {
await this.fetchEndTime();
} catch (error) {
console.error("Error fetching end time:", error);
}
}
// Ensure UI is only updated once everything is ready
if (this.serverTime && this.endTime) {
this.startClock();
this.hideLoadingIndicator();
} else {
console.warn("Delaying hiding the loading indicator due to missing data...");
const checkDataInterval = setInterval(() => {
if (this.serverTime && this.endTime) {
clearInterval(checkDataInterval);
this.startClock();
this.hideLoadingIndicator();
}
}, 500);
}
}
showLoadingIndicator() {
this.statusContainer.classList.remove('d-none');
this.loadingIndicator.classList.remove('d-none');
this.statusContent.forEach(element => element.classList.add('d-none'));
}
hideLoadingIndicator() {
this.loadingIndicator.classList.add('d-none');
this.statusContent.forEach(element => element.classList.remove('d-none'));
}
async checkScrapingStatus() {
try {
const response = await fetch('/scraping_status');
const data = await response.json();
if (data.scraping_active) {
if (this.startButton) this.startButton.disabled = true;
if (this.stopButton) this.stopButton.disabled = false;
this.activityIndicator.classList.remove('text-bg-danger');
this.activityIndicator.classList.add('text-bg-success');
this.activityIndicator.textContent = 'Active';
console.log(`Scraping is active until ${data.end_time} TCT`);
// Only call fetchEndTime() if endTime is not already set
if (!this.endTime) {
await this.fetchEndTime();
}
this.endTimeElement.classList.remove('d-none');
this.timeLeftElement.classList.remove('d-none');
} else {
if (this.startButton) this.startButton.disabled = false;
if (this.stopButton) this.stopButton.disabled = true;
this.activityIndicator.classList.remove('text-bg-success');
this.activityIndicator.classList.add('text-bg-danger');
this.activityIndicator.textContent = 'Inactive';
this.endTimeElement.classList.add('d-none');
this.timeLeftElement.classList.add('d-none');
}
} catch (error) {
console.error('Error checking scraping status:', error);
}
}
async updateServerTime() {
try {
const response = await fetch('/server_time');
const data = await response.json();
this.serverTime = new Date(data.server_time.replace(' ', 'T'));
this.serverTimeElement.textContent = `Server Time (TCT): ${this.formatDateToHHMMSS(this.serverTime)}`;
} catch (error) {
console.error('Error fetching server time:', error);
}
}
async fetchEndTime() {
if (this.endTime) return;
try {
const response = await fetch('/scraping_get_end_time');
const data = await response.json();
if (data.end_time) {
this.endTime = new Date(data.end_time);
this.endTimeElement.textContent = `Running until ${this.formatDateToYYYYMMDDHHMMSS(this.endTime)} TCT`;
}
} catch (error) {
this.endTimeElement.textContent = 'Error fetching end time';
console.error('Error fetching end time:', error);
}
}
startClock() {
const updateClock = () => {
if (this.serverTime) {
this.serverTime.setSeconds(this.serverTime.getSeconds() + 1);
this.serverTimeElement.textContent = `Server Time (TCT): ${this.formatDateToHHMMSS(this.serverTime)}`;
}
if (this.endTime && this.serverTime) {
const timeLeft = this.endTime - this.serverTime;
this.timeLeftElement.textContent = `Time Left: ${timeLeft > 0 ? this.formatMillisecondsToHHMMSS(timeLeft) : '00:00:00'}`;
}
};
// Immediately update the clock
updateClock();
// Continue updating every second
setInterval(updateClock, 1000);
}
formatDateToYYYYMMDDHHMMSS(date) {
if (!(date instanceof Date) || isNaN(date)) {
console.error('Invalid date:', date);
return '';
}
return `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')} ` +
`${String(date.getHours()).padStart(2, '0')}:${String(date.getMinutes()).padStart(2, '0')}:${String(date.getSeconds()).padStart(2, '0')}`;
}
formatDateToHHMMSS(date) {
if (!(date instanceof Date) || isNaN(date)) {
console.error('Invalid date:', date);
return '';
}
return `${String(date.getHours()).padStart(2, '0')}:${String(date.getMinutes()).padStart(2, '0')}:${String(date.getSeconds()).padStart(2, '0')}`;
}
formatMillisecondsToHHMMSS(ms) {
const totalSeconds = Math.floor(ms / 1000);
const hours = Math.floor(totalSeconds / 3600);
const minutes = Math.floor((totalSeconds % 3600) / 60);
const seconds = totalSeconds % 60;
return `${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}:${String(seconds).padStart(2, '0')}`;
}
}

View File

@@ -22,6 +22,9 @@
{% block content %}
{% endblock %}
</main>
<footer>
{% include 'includes/footer.html' %}
</footer>
{% block scripts %}
{% include 'includes/scripts.html' %}
{% endblock %}

View File

@@ -7,7 +7,7 @@
<div class="col">
<h2>Data Files</h2>
</div>
<div class="col">
<div class="col text-end">
<div class="btn-group btn-group-sm">
<button class="btn btn-warning" onclick="deleteSelectedFiles()">Delete Selected Files</button>
<button class="btn btn-success" onclick="downloadSelectedFiles()">Download Selected Files</button>
@@ -67,7 +67,7 @@
<table id="logFilesTable" class="table table-striped table-bordered table-hover">
<thead>
<tr>
<th width="2%"><input type="checkbox" id="checkAllLog" onclick="checkAllCheckboxes('logFilesTable', 'checkAllLog')"></th>
<th width="2%"><input type="checkbox" id="checkAllLog" class="form-check-input" onclick="checkAllCheckboxes('logFilesTable', 'checkAllLog')"></th>
<th onclick="sortTable(1, 'logFilesTable')">File Name</th>
<th onclick="sortTable(2, 'logFilesTable')">Last Modified</th>
<th onclick="sortTable(3, 'logFilesTable')">Created</th>

View File

View File

@@ -1,7 +1,6 @@
<!-- app/templates/includes/navigation.html -->
<nav class="navbar navbar-nav navbar-expand-md bg-primary">
<div class="container-fluid">
<a class="navbar-brand" href="/">Torn User Activity Scraper</a>
<a class="navbar-brand" href="/">{{ main_config.APP_TITLE }}</a>
{% from 'bootstrap4/nav.html' import render_nav_item %}
{{ render_nav_item('views.analyze', 'Data Visualization') }}
{{ render_nav_item('download_results', 'Files') }}
@@ -15,3 +14,26 @@
</div>
</div>
</nav>
<div id="status_container" class="container-fluid d-flex justify-content-center">
<div class="container-md my-1 shadow p-4 pb-0 m-1 w-50" id="status_badges">
<div id="loading_indicator" class="alert alert-info">Loading...</div>
<div id="status_content">
<div class="row justify-content-center">
<div class="col col-6 p-1">
<div id="activity_indicator" class="alert alert-danger fw-bolder">Inactive</div>
</div>
<div class="col col-6 p-1">
<div id="server_time" class="alert alert-primary">Server Time (TCT):</div>
</div>
</div>
<div class="row justify-content-center">
<div class="col col-6 p-1">
<div id="end_time" class="alert alert-info">Running until:</div>
</div>
<div class="col p-1">
<div id="time-left" class="alert alert-info">Time Left:</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -1,3 +1,3 @@
{{ bootstrap.load_js() }}
<script src="{{url_for('static', filename='color_mode.js')}}"></script>
<script src="{{ url_for('static', filename='common.js') }}"></script>
<script type="module" src="{{ url_for('static', filename='common.js') }}"></script>

View File

@@ -2,7 +2,13 @@
{% block content %}
<section id="scrapingFormContainer" class="container-fluid d-flex justify-content-center">
<div class="container-md my-5 mx-2 shadow-lg p-4 ">
<h2>Scraper <span id="activity_indicator" class="badge text-bg-danger">Inactive</span></h2>
<div class="row">
<div class="col">
<h2>Scraper</h2>
</div>
<div class="col text-end">
</div>
</div>
<form id="scrapingForm" method="POST" action="{{ url_for('start_scraping') }}">
{{ form.hidden_tag() }}
<div class="form-group">
@@ -24,23 +30,5 @@
</div>
</div>
</section>
<section id="resultsContainer" class="container-fluid d-flex justify-content-center">
<div class="container-md my-5 mx-2 shadow-lg p-4" style="height: 500px;">
<div class="row">
<div class="col-8">
<h2>Logs</h2>
<pre id="logs" class="pre-scrollable" style="height: 350px; overflow:scroll; "><code></code></pre>
<div class="btn-group btn-group-sm">
<button class="btn btn-primary" id="prevPage">Previous</button>
<button class="btn btn-primary" id="pageInfo" disabled>Page 1 of 1</button>
<button class="btn btn-primary" id="nextPage">Next</button>
</div>
</div>
<div class="col">
<h2>Stats</h2>
</div>
</div>
</div>
</section>
<script src="{{url_for('static', filename='index.js')}}"></script>
<script type="module" src="{{url_for('static', filename='index.js')}}"></script>
{% endblock content %}

View File

@@ -1,3 +1,22 @@
{% extends 'base.html' %}
{% block content %}
<section id="resultsContainer" class="container-fluid d-flex justify-content-center">
<div class="container-md my-5 mx-2 shadow-lg p-4" style="height: 500px;">
<div class="row">
<div class="col-8">
<h2>Logs</h2>
<pre id="logs" class="pre-scrollable" style="height: 350px; overflow:scroll;"><code></code></pre>
<div class="btn-group btn-group-sm">
<button class="btn btn-primary" id="prevPage">Previous</button>
<button class="btn btn-primary" id="pageInfo" disabled>Page 1 of 1</button>
<button class="btn btn-primary" id="nextPage">Next</button>
</div>
</div>
<div class="col">
<h2>Stats</h2>
</div>
</div>
</div>
</section>
<script src="{{url_for('static', filename='log_viewer.js')}}"></script>
{% endblock content %}

View File

@@ -1,13 +1,10 @@
import os
import zipfile
from datetime import datetime, timedelta
from app.state import data_file_name, log_file_name
from flask import current_app
from app.config import load_config
config = load_config()
def create_zip(file_paths, zip_name, app):
temp_dir = os.path.abspath(app.config['TEMP']['TEMP_DIR'])
zip_path = os.path.join(temp_dir, zip_name)
@@ -18,7 +15,7 @@ def create_zip(file_paths, zip_name, app):
return zip_path
def delete_old_zips():
temp_dir = os.path.abspath(config['TEMP']['TEMP_DIR'])
temp_dir = os.path.abspath(current_app.config['TEMP']['TEMP_DIR'])
now = datetime.now()
for filename in os.listdir(temp_dir):
if filename.endswith('.zip'):
@@ -33,7 +30,7 @@ def tail(filename, n):
yield ''
return
page_size = int(config['LOGGING']['TAIL_PAGE_SIZE'])
page_size = int(current_app.config['LOGGING']['TAIL_PAGE_SIZE'])
offsets = []
count = _n = n if n >= 0 else -n

View File

@@ -6,17 +6,10 @@ from app.forms import ScrapingForm
from app.util import get_size
from app.config import load_config
from app.api import scraper as scraper
from app.logging_config import get_logger
from app.analysis import load_data, load_analysis_modules
from app.state import log_file_name
print(f"A imported log_file_name: {log_file_name}")
config = load_config()
logger = get_logger()
from datetime import datetime
views_bp = Blueprint("views", __name__)
@@ -44,8 +37,8 @@ def register_views(app):
if not scraper:
print("Scraper not initialized")
data_dir = os.path.abspath(config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(config['LOGGING']['LOG_DIR'])
data_dir = os.path.abspath(current_app.config['DATA']['DATA_DIR'])
log_dir = os.path.abspath(current_app.config['LOGGING']['LOG_DIR'])
data_files = glob.glob(os.path.join(data_dir, "*.csv"))
log_files = glob.glob(os.path.join(log_dir, "*.log"))
@@ -124,4 +117,10 @@ def register_views(app):
context["results"] = results
return render_template("analyze.html", **context)
@views_bp.route('/server_time')
def server_time():
current_time = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
return {'server_time': current_time}
app.register_blueprint(views_bp)

View File

@@ -1,3 +1,7 @@
# All main config options will be passed to template engine
[MAIN]
APP_TITLE = 'Torn User Activity Grabber'
[DEFAULT]
SECRET_KEY = your_secret_key
API_KEY = your_api_key

20
fly.toml Normal file
View File

@@ -0,0 +1,20 @@
# fly.toml app configuration file generated for tornactivitytracker on 2025-02-11T02:59:23+01:00
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#
app = 'tornactivitytracker'
primary_region = 'fra'
[build]
[http_service]
internal_port = 8080
force_https = true
auto_stop_machines = 'stop'
auto_start_machines = true
min_machines_running = 0
processes = ['app']
[[vm]]
size = 'shared-cpu-2x'

View File

@@ -7,3 +7,5 @@ requests
matplotlib
seaborn
configparser
plotly
configobj

View File

@@ -14,6 +14,8 @@ charset-normalizer==3.4.1
# via requests
click==8.1.8
# via flask
configobj==5.0.9
# via -r requirements.in
configparser==7.1.0
# via -r requirements.in
contourpy==1.3.1
@@ -48,6 +50,8 @@ matplotlib==3.10.0
# via
# -r requirements.in
# seaborn
narwhals==1.26.0
# via plotly
numpy==2.2.2
# via
# contourpy
@@ -55,13 +59,17 @@ numpy==2.2.2
# pandas
# seaborn
packaging==24.2
# via matplotlib
# via
# matplotlib
# plotly
pandas==2.2.3
# via
# -r requirements.in
# seaborn
pillow==11.1.0
# via matplotlib
plotly==6.0.0
# via -r requirements.in
pyparsing==3.2.1
# via matplotlib
python-dateutil==2.9.0.post0

4
run.py
View File

@@ -1,5 +1,5 @@
from app.app import init_app
from app import create_app
if __name__ == '__main__':
app = init_app()
app = create_app()
app.run(debug=True, threaded=True)