refactors to use redis & celery
This commit is contained in:
50
stop_scraping.py
Normal file
50
stop_scraping.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import redis
|
||||
import argparse
|
||||
|
||||
def get_redis():
|
||||
return redis.StrictRedis(
|
||||
host='localhost',
|
||||
port=6379,
|
||||
db=0,
|
||||
decode_responses=True
|
||||
)
|
||||
|
||||
def stop_scraping(flush=False, force=False):
|
||||
redis_client = get_redis()
|
||||
|
||||
if flush:
|
||||
redis_client.flushall()
|
||||
print("Flushed all Redis data")
|
||||
return True
|
||||
|
||||
current_faction_id = redis_client.get("current_faction_id")
|
||||
|
||||
if not current_faction_id:
|
||||
print("No active scraping session found.")
|
||||
return False if not force else True
|
||||
|
||||
redis_client.hset(f"scraper:{current_faction_id}", "scraping_active", "0")
|
||||
print(f"Sent stop signal to scraping process for faction {current_faction_id}")
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Stop the Torn Activity Tracker scraping process.')
|
||||
parser.add_argument('--force', action='store_true', help='Force stop even if no active session is found')
|
||||
parser.add_argument('--flush', action='store_true', help='Flush all Redis data (WARNING: This will clear ALL Redis data)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.flush:
|
||||
if input("WARNING: This will delete ALL Redis data. Are you sure? (y/N) ").lower() != 'y':
|
||||
print("Operation cancelled.")
|
||||
exit(0)
|
||||
|
||||
success = stop_scraping(flush=args.flush, force=args.force)
|
||||
|
||||
if not success and args.force:
|
||||
print("Forcing stop for all potential scraping processes...")
|
||||
redis_client = get_redis()
|
||||
# Get all scraper keys
|
||||
for key in redis_client.keys("scraper:*"):
|
||||
redis_client.hset(key, "scraping_active", "0")
|
||||
print("Sent stop signal to all potential scraping processes.")
|
||||
Reference in New Issue
Block a user