diff --git a/app.py b/app.py
index cd0d08a..9cb8d8b 100644
--- a/app.py
+++ b/app.py
@@ -203,33 +203,44 @@ def results():
def download_results():
data_files = glob.glob("data/*.csv")
log_files = glob.glob("log/*.log")
-
+
def get_file_info(file_path):
return {
"name": file_path,
+ "name_display": os.path.basename(file_path),
"last_modified": os.path.getmtime(file_path),
"created": os.path.getctime(file_path),
"size": get_size(file_path)
}
-
+
data_files_info = [get_file_info(file) for file in data_files]
log_files_info = [get_file_info(file) for file in log_files]
files = {"data": data_files_info, "log": log_files_info}
+
return render_template('download_results.html', files=files)
-@app.route('/delete_file', methods=['POST'])
-def delete_file():
- file_path = request.form.get('file_path')
+@app.route('/delete_files', methods=['POST'])
+def delete_files():
+ file_paths = request.form.getlist('file_paths')
- if not file_path or not os.path.isfile(file_path):
- return jsonify({"error": "File not found"}), 404
+ if not file_paths:
+ return jsonify({"error": "No files specified"}), 400
- try:
- os.remove(file_path)
- return jsonify({"success": True}), 200
- except Exception as e:
- return jsonify({"error": str(e)}), 500
+ errors = []
+ for file_path in file_paths:
+ if not os.path.isfile(file_path):
+ errors.append({"file": file_path, "error": "File not found"})
+ continue
+
+ try:
+ os.remove(file_path)
+ except Exception as e:
+ errors.append({"file": file_path, "error": str(e)})
+
+ if errors:
+ return jsonify({"errors": errors}), 207 # Multi-Status response
+ return jsonify({"success": True}), 200
@app.template_filter('datetimeformat')
def datetimeformat(value):
diff --git a/static/index.js b/static/index.js
new file mode 100644
index 0000000..1d94545
--- /dev/null
+++ b/static/index.js
@@ -0,0 +1,108 @@
+document.addEventListener('DOMContentLoaded', () => {
+ const form = document.getElementById('scrapingForm');
+ const stopButton = document.getElementById('stopButton');
+ const logsElement = document.getElementById('logs');
+ const prevPageButton = document.getElementById('prevPage');
+ const nextPageButton = document.getElementById('nextPage');
+ let currentPage = 0;
+ const linesPerPage = 50;
+ let autoRefreshInterval;
+
+ if (form) {
+ console.log('Form:', form);
+ console.log('Submit button:', form.querySelector('button[type="submit"]'));
+ }
+
+ const fetchLogs = (page) => {
+ fetch(`/logfile?lines=${linesPerPage * (page + 1)}`)
+ .then(response => response.json())
+ .then(data => {
+ if (data.error) {
+ logsElement.textContent = data.error;
+ } else {
+ // Reverse the order of log lines
+ const reversedLogs = data.log.reverse();
+ logsElement.textContent = reversedLogs.join('');
+ }
+ });
+ };
+
+ const startAutoRefresh = () => {
+ autoRefreshInterval = setInterval(() => {
+ fetchLogs(currentPage);
+ }, 5000); // Refresh every 5 seconds
+ };
+
+ const stopAutoRefresh = () => {
+ clearInterval(autoRefreshInterval);
+ };
+
+ // Check scraping status on page load
+ fetch('/scraping_status')
+ .then(response => response.json())
+ .then(data => {
+ if (data.scraping_active) {
+ startButton.disabled = true;
+ stopButton.disabled = false;
+ startAutoRefresh(); // Start auto-refresh if scraping is active
+ } else {
+ startButton.disabled = false;
+ stopButton.disabled = true;
+ }
+ fetchLogs(currentPage);
+ });
+
+ prevPageButton.addEventListener('click', () => {
+ if (currentPage > 0) {
+ currentPage--;
+ fetchLogs(currentPage);
+ }
+ });
+
+ nextPageButton.addEventListener('click', () => {
+ currentPage++;
+ fetchLogs(currentPage);
+ });
+
+ form.addEventListener('submit', function(e) {
+ e.preventDefault();
+ const formData = new FormData(this);
+
+ fetch('/start_scraping', {
+ method: 'POST',
+ body: formData
+ }).then(response => response.json())
+ .then(data => {
+ console.log(data);
+ const submitButton = form.querySelector('button[type="submit"]');
+ if (data.status === "Scraping started") {
+ if (submitButton) {
+ submitButton.disabled = true;
+ }
+ stopButton.disabled = false;
+ startAutoRefresh(); // Start auto-refresh when scraping starts
+ } else {
+ // Handle errors or other statuses
+ }
+ });
+ });
+
+ stopButton.addEventListener('click', function() {
+ fetch('/stop_scraping', {
+ method: 'POST'
+ }).then(response => response.json())
+ .then(data => {
+ console.log(data);
+ const submitButton = form.querySelector('button[type="submit"]');
+ if (data.status === "Scraping stopped") {
+ if (submitButton) {
+ submitButton.disabled = false;
+ }
+ stopButton.disabled = true;
+ stopAutoRefresh(); // Stop auto-refresh when scraping stops
+ } else {
+ // Handle errors or other statuses
+ }
+ });
+ });
+});
\ No newline at end of file
diff --git a/templates/download_results.html b/templates/download_results.html
index 1ad0114..92b0795 100644
--- a/templates/download_results.html
+++ b/templates/download_results.html
@@ -7,7 +7,68 @@
{{ bootstrap.load_css() }}
-
+
+
@@ -25,50 +86,74 @@
-
Data
+
+
+
+
Data Files
+
+
+
+
+
+
+
-
Log
-
+
+
+
+
Log Files
+
+
+
+
+
+
+