|
|
|
|
@ -37,23 +37,12 @@ load_dotenv() |
|
|
|
|
|
|
|
|
|
is_posting = False |
|
|
|
|
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_rss.lock" |
|
|
|
|
|
|
|
|
|
def signal_handler(sig, frame): |
|
|
|
|
logging.info("Received termination signal, checking if safe to exit...") |
|
|
|
|
if is_posting: |
|
|
|
|
logging.info("Currently posting, will exit after completion.") |
|
|
|
|
else: |
|
|
|
|
logging.info("Safe to exit immediately.") |
|
|
|
|
sys.exit(0) |
|
|
|
|
|
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler) |
|
|
|
|
signal.signal(signal.SIGINT, signal_handler) |
|
|
|
|
|
|
|
|
|
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_automator_rss.log" |
|
|
|
|
LOG_PRUNE_DAYS = 30 |
|
|
|
|
FEED_TIMEOUT = 15 |
|
|
|
|
MAX_RETRIES = 3 |
|
|
|
|
RETRY_BACKOFF = 2 |
|
|
|
|
IMAGE_UPLOAD_TIMEOUT = 30 # Added to match foodie_utils.py |
|
|
|
|
|
|
|
|
|
POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_rss_titles.json' |
|
|
|
|
USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json' |
|
|
|
|
@ -65,6 +54,11 @@ posted_titles = set(entry["title"] for entry in posted_titles_data) |
|
|
|
|
used_images = set(entry["title"] for entry in load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) if "title" in entry) |
|
|
|
|
|
|
|
|
|
def setup_logging(): |
|
|
|
|
"""Initialize logging with pruning of old logs.""" |
|
|
|
|
try: |
|
|
|
|
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True) |
|
|
|
|
if not os.access(os.path.dirname(LOG_FILE), os.W_OK): |
|
|
|
|
raise PermissionError(f"No write permission for {os.path.dirname(LOG_FILE)}") |
|
|
|
|
if os.path.exists(LOG_FILE): |
|
|
|
|
with open(LOG_FILE, 'r') as f: |
|
|
|
|
lines = f.readlines() |
|
|
|
|
@ -97,7 +91,11 @@ def setup_logging(): |
|
|
|
|
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) |
|
|
|
|
logging.getLogger().addHandler(console_handler) |
|
|
|
|
logging.getLogger("requests").setLevel(logging.WARNING) |
|
|
|
|
logging.getLogger("openai").setLevel(logging.WARNING) |
|
|
|
|
logging.info("Logging initialized for foodie_automator_rss.py") |
|
|
|
|
except Exception as e: |
|
|
|
|
print(f"Failed to setup logging: {e}") |
|
|
|
|
sys.exit(1) |
|
|
|
|
|
|
|
|
|
def acquire_lock(): |
|
|
|
|
os.makedirs(os.path.dirname(LOCK_FILE), exist_ok=True) |
|
|
|
|
@ -111,6 +109,17 @@ def acquire_lock(): |
|
|
|
|
logging.info("Another instance of foodie_automator_rss.py is running") |
|
|
|
|
sys.exit(0) |
|
|
|
|
|
|
|
|
|
def signal_handler(sig, frame): |
|
|
|
|
logging.info("Received termination signal, checking if safe to exit...") |
|
|
|
|
if is_posting: |
|
|
|
|
logging.info("Currently posting, will exit after completion.") |
|
|
|
|
else: |
|
|
|
|
logging.info("Safe to exit immediately.") |
|
|
|
|
sys.exit(0) |
|
|
|
|
|
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler) |
|
|
|
|
signal.signal(signal.SIGINT, signal_handler) |
|
|
|
|
|
|
|
|
|
def create_http_session() -> requests.Session: |
|
|
|
|
session = requests.Session() |
|
|
|
|
retry_strategy = Retry( |
|
|
|
|
@ -209,7 +218,7 @@ def fetch_duckduckgo_news_context(title, hours=24): |
|
|
|
|
if '+00:00' in date_str: |
|
|
|
|
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S+00:00").replace(tzinfo=timezone.utc) |
|
|
|
|
else: |
|
|
|
|
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S%Z").replace(tzinfo=timezone.utc) |
|
|
|
|
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) |
|
|
|
|
if dt > (datetime.now(timezone.utc) - timedelta(hours=24)): |
|
|
|
|
titles.append(r["title"].lower()) |
|
|
|
|
except ValueError as e: |
|
|
|
|
@ -324,6 +333,10 @@ def curate_from_rss(): |
|
|
|
|
interest_score=interest_score, |
|
|
|
|
should_post_tweet=True |
|
|
|
|
) |
|
|
|
|
if not post_id: |
|
|
|
|
logging.warning(f"Failed to post to WordPress for '{title}', skipping") |
|
|
|
|
attempts += 1 |
|
|
|
|
continue |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Failed to post to WordPress for '{title}': {e}", exc_info=True) |
|
|
|
|
attempts += 1 |
|
|
|
|
@ -383,6 +396,7 @@ def run_rss_automator(): |
|
|
|
|
lock_fd = None |
|
|
|
|
try: |
|
|
|
|
lock_fd = acquire_lock() |
|
|
|
|
setup_logging() |
|
|
|
|
logging.info("***** RSS Automator Launched *****") |
|
|
|
|
post_data, category, should_continue = curate_from_rss() |
|
|
|
|
if not post_data: |
|
|
|
|
@ -392,6 +406,7 @@ def run_rss_automator(): |
|
|
|
|
return post_data, category, should_continue |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True) |
|
|
|
|
print(f"Fatal error: {e}") |
|
|
|
|
return None, None, False |
|
|
|
|
finally: |
|
|
|
|
if lock_fd: |
|
|
|
|
@ -400,7 +415,4 @@ def run_rss_automator(): |
|
|
|
|
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None |
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
setup_logging() |
|
|
|
|
post_data, category, should_continue = run_rss_automator() |
|
|
|
|
# Remove sleep timer, let manage_scripts.sh control execution |
|
|
|
|
logging.info(f"Run completed, should_continue: {should_continue}") |
|
|
|
|
run_rss_automator() |