diff --git a/foodie_automator_google.py b/foodie_automator_google.py index 0de3b87..371a5b5 100644 --- a/foodie_automator_google.py +++ b/foodie_automator_google.py @@ -27,6 +27,20 @@ from foodie_utils import ( ) from foodie_hooks import get_dynamic_hook, select_best_cta +# Flag to indicate if we're in the middle of posting +is_posting = False + +def signal_handler(sig, frame): + logging.info("Received termination signal, checking if safe to exit...") + if is_posting: + logging.info("Currently posting, will exit after completion.") + else: + logging.info("Safe to exit immediately.") + sys.exit(0) + +signal.signal(signal.SIGTERM, signal_handler) +signal.signal(signal.SIGINT, signal_handler) + logger = logging.getLogger() logger.setLevel(logging.INFO) file_handler = logging.FileHandler('/tmp/foodie_automator_google_trends.log', mode='a') @@ -231,23 +245,10 @@ def curate_from_google_trends(geo_list=['US']): cta = select_best_cta(post_data["title"], final_summary, post_url=None) post_data["content"] = f"{final_summary}\n\n{cta}" - post_id, post_url = post_to_wp( - post_data=post_data, - category=category, - link=link, - author=author, - image_url=image_url, - original_source=original_source, - image_source=image_source, - uploader=uploader, - pixabay_url=pixabay_url, - interest_score=interest_score - ) - - if post_id: - cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) - post_data["content"] = f"{final_summary}\n\n{cta}" - post_to_wp( + global is_posting + is_posting = True + try: + post_id, post_url = post_to_wp( post_data=post_data, category=category, link=link, @@ -257,9 +258,31 @@ def curate_from_google_trends(geo_list=['US']): image_source=image_source, uploader=uploader, pixabay_url=pixabay_url, - interest_score=interest_score, - post_id=post_id + interest_score=interest_score ) + finally: + is_posting = False + + if post_id: + cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) + post_data["content"] = f"{final_summary}\n\n{cta}" + is_posting = True + try: + post_to_wp( + post_data=post_data, + category=category, + link=link, + author=author, + image_url=image_url, + original_source=original_source, + image_source=image_source, + uploader=uploader, + pixabay_url=pixabay_url, + interest_score=interest_score, + post_id=post_id + ) + finally: + is_posting = False timestamp = datetime.now(timezone.utc).isoformat() save_json_file(POSTED_TITLES_FILE, title, timestamp) diff --git a/foodie_automator_reddit.py b/foodie_automator_reddit.py index eb587d7..ca547ee 100644 --- a/foodie_automator_reddit.py +++ b/foodie_automator_reddit.py @@ -23,6 +23,20 @@ from foodie_utils import ( ) from foodie_hooks import get_dynamic_hook, select_best_cta +# Flag to indicate if we're in the middle of posting +is_posting = False + +def signal_handler(sig, frame): + logging.info("Received termination signal, checking if safe to exit...") + if is_posting: + logging.info("Currently posting, will exit after completion.") + else: + logging.info("Safe to exit immediately.") + sys.exit(0) + +signal.signal(signal.SIGTERM, signal_handler) +signal.signal(signal.SIGINT, signal_handler) + LOG_FILE = "/home/shane/foodie_automator/foodie_automator_reddit.log" LOG_PRUNE_DAYS = 30 @@ -257,24 +271,10 @@ def curate_from_reddit(): post_data["content"] = f"{final_summary}\n\n{cta}" - post_id, post_url = post_to_wp( - post_data=post_data, - category=category, - link=link, - author=author, - image_url=image_url, - original_source=original_source, - image_source=image_source, - uploader=uploader, - pixabay_url=pixabay_url, - interest_score=interest_score - ) - - if post_id: - cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) - post_data["content"] = f"{final_summary}\n\n{cta}" - - post_to_wp( + global is_posting + is_posting = True + try: + post_id, post_url = post_to_wp( post_data=post_data, category=category, link=link, @@ -284,9 +284,32 @@ def curate_from_reddit(): image_source=image_source, uploader=uploader, pixabay_url=pixabay_url, - interest_score=interest_score, - post_id=post_id + interest_score=interest_score ) + finally: + is_posting = False + + if post_id: + cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) + post_data["content"] = f"{final_summary}\n\n{cta}" + + is_posting = True + try: + post_to_wp( + post_data=post_data, + category=category, + link=link, + author=author, + image_url=image_url, + original_source=original_source, + image_source=image_source, + uploader=uploader, + pixabay_url=pixabay_url, + interest_score=interest_score, + post_id=post_id + ) + finally: + is_posting = False timestamp = datetime.now(timezone.utc).isoformat() save_json_file(POSTED_TITLES_FILE, title, timestamp) diff --git a/foodie_automator_rss.py b/foodie_automator_rss.py index b687d69..38f6e79 100644 --- a/foodie_automator_rss.py +++ b/foodie_automator_rss.py @@ -23,6 +23,20 @@ import feedparser from concurrent.futures import ThreadPoolExecutor, as_completed from typing import List, Dict, Any, Optional +# Flag to indicate if we're in the middle of posting +is_posting = False + +def signal_handler(sig, frame): + logging.info("Received termination signal, checking if safe to exit...") + if is_posting: + logging.info("Currently posting, will exit after completion.") + else: + logging.info("Safe to exit immediately.") + sys.exit(0) + +signal.signal(signal.SIGTERM, signal_handler) +signal.signal(signal.SIGINT, signal_handler) + LOG_FILE = "/home/shane/foodie_automator/foodie_automator_rss.log" LOG_PRUNE_DAYS = 30 MAX_WORKERS = 5 # Number of concurrent workers for parallel processing @@ -266,23 +280,10 @@ def curate_from_rss(): cta = select_best_cta(post_data["title"], final_summary, post_url=None) post_data["content"] = f"{final_summary}\n\n{cta}" - post_id, post_url = post_to_wp( - post_data=post_data, - category=category, - link=link, - author=author, - image_url=image_url, - original_source=original_source, - image_source=image_source, - uploader=uploader, - pixabay_url=pixabay_url, - interest_score=interest_score - ) - - if post_id: - cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) - post_data["content"] = f"{final_summary}\n\n{cta}" - post_to_wp( + global is_posting + is_posting = True + try: + post_id, post_url = post_to_wp( post_data=post_data, category=category, link=link, @@ -292,9 +293,31 @@ def curate_from_rss(): image_source=image_source, uploader=uploader, pixabay_url=pixabay_url, - interest_score=interest_score, - post_id=post_id + interest_score=interest_score ) + finally: + is_posting = False + + if post_id: + cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) + post_data["content"] = f"{final_summary}\n\n{cta}" + is_posting = True + try: + post_to_wp( + post_data=post_data, + category=category, + link=link, + author=author, + image_url=image_url, + original_source=original_source, + image_source=image_source, + uploader=uploader, + pixabay_url=pixabay_url, + interest_score=interest_score, + post_id=post_id + ) + finally: + is_posting = False timestamp = datetime.now(timezone.utc).isoformat() save_json_file(POSTED_TITLES_FILE, title, timestamp)