|
|
|
@ -434,12 +434,17 @@ def run_rss_automator(): |
|
|
|
try: |
|
|
|
try: |
|
|
|
lock_fd = acquire_lock() |
|
|
|
lock_fd = acquire_lock() |
|
|
|
logging.info("***** RSS Automator Launched *****") |
|
|
|
logging.info("***** RSS Automator Launched *****") |
|
|
|
post_data, category, sleep_time = curate_from_rss() |
|
|
|
# Load JSON files once |
|
|
|
|
|
|
|
posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS) |
|
|
|
|
|
|
|
posted_titles = set(entry["title"] for entry in posted_titles_data) |
|
|
|
|
|
|
|
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) |
|
|
|
|
|
|
|
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) |
|
|
|
|
|
|
|
post_data, category, sleep_time = curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_images) |
|
|
|
if not post_data: |
|
|
|
if not post_data: |
|
|
|
logging.info("No postable RSS article found") |
|
|
|
logging.info("No postable RSS article found") |
|
|
|
logging.info(f"Completed run with sleep time: {sleep_time} seconds") |
|
|
|
logging.info(f"Completed run with sleep time: {sleep_time} seconds") |
|
|
|
time.sleep(sleep_time) |
|
|
|
time.sleep(sleep_time) |
|
|
|
return post_data, category, fixes |
|
|
|
return post_data, category, sleep_time # Fixed return to include sleep_time |
|
|
|
except Exception as e: |
|
|
|
except Exception as e: |
|
|
|
logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True) |
|
|
|
logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True) |
|
|
|
return None, None, random.randint(600, 1800) |
|
|
|
return None, None, random.randint(600, 1800) |
|
|
|
|