diff --git a/foodie_automator_reddit.py b/foodie_automator_reddit.py index 386a693..4f33b79 100644 --- a/foodie_automator_reddit.py +++ b/foodie_automator_reddit.py @@ -315,7 +315,8 @@ def curate_from_reddit(): image_source=image_source, uploader=uploader, pixabay_url=pixabay_url, - interest_score=interest_score + interest_score=interest_score, + post_tweet=True # Post the X tweet on the first call ) finally: is_posting = False @@ -323,7 +324,6 @@ def curate_from_reddit(): if post_id: cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) post_data["content"] = f"{final_summary}\n\n{cta}" - is_posting = True try: post_to_wp( @@ -337,7 +337,8 @@ def curate_from_reddit(): uploader=uploader, pixabay_url=pixabay_url, interest_score=interest_score, - post_id=post_id + post_id=post_id, + post_tweet=False # Skip X tweet on the update call ) finally: is_posting = False diff --git a/foodie_automator_rss.py b/foodie_automator_rss.py index 8525ded..f247366 100644 --- a/foodie_automator_rss.py +++ b/foodie_automator_rss.py @@ -307,7 +307,8 @@ def curate_from_rss(): image_source=image_source, uploader=uploader, pixabay_url=pixabay_url, - interest_score=interest_score + interest_score=interest_score, + post_tweet=True # Post the X tweet on the first call ) finally: is_posting = False @@ -328,7 +329,8 @@ def curate_from_rss(): uploader=uploader, pixabay_url=pixabay_url, interest_score=interest_score, - post_id=post_id + post_id=post_id, + post_tweet=False # Skip X tweet on the update call ) finally: is_posting = False diff --git a/foodie_utils.py b/foodie_utils.py index e9f8a52..3dcd3fb 100644 --- a/foodie_utils.py +++ b/foodie_utils.py @@ -72,7 +72,28 @@ def save_json_file(filename, key, value): logging.error(f"Failed to save or prune {filename}: {e}") def load_post_counts(): - counts = load_json_file('/home/shane/foodie_automator/x_post_counts.json') + counts = [] + filename = '/home/shane/foodie_automator/x_post_counts.json' + if os.path.exists(filename): + try: + with open(filename, 'r') as f: + lines = f.readlines() + for i, line in enumerate(lines, 1): + if line.strip(): + try: + entry = json.loads(line.strip()) + # Check for expected fields in x_post_counts.json + if not isinstance(entry, dict) or "username" not in entry or "month" not in entry or "monthly_count" not in entry or "day" not in entry or "daily_count" not in entry: + logging.warning(f"Skipping malformed entry in {filename} at line {i}: {entry}") + continue + counts.append(entry) + except json.JSONDecodeError as e: + logging.warning(f"Skipping invalid JSON line in {filename} at line {i}: {e}") + logging.info(f"Loaded {len(counts)} entries from {filename}") + except Exception as e: + logging.error(f"Failed to load {filename}: {e}") + counts = [] # Reset to empty on failure + if not counts: counts = [{ "username": author["username"], @@ -81,6 +102,7 @@ def load_post_counts(): "day": datetime.now(timezone.utc).strftime("%Y-%m-%d"), "daily_count": 0 } for author in AUTHORS] + current_month = datetime.now(timezone.utc).strftime("%Y-%m") current_day = datetime.now(timezone.utc).strftime("%Y-%m-%d") for entry in counts: @@ -598,7 +620,7 @@ def get_wp_tag_id(tag_name, wp_base_url, wp_username, wp_password): logging.error(f"Failed to get WP tag ID for '{tag_name}': {e}") return None -def post_to_wp(post_data, category, link, author, image_url, original_source, image_source="Pixabay", uploader=None, pixabay_url=None, interest_score=4, post_id=None): +def post_to_wp(post_data, category, link, author, image_url, original_source, image_source="Pixabay", uploader=None, pixabay_url=None, interest_score=4, post_id=None, post_tweet=True): wp_base_url = "https://insiderfoodie.com/wp-json/wp/v2" logging.info(f"Starting post_to_wp for '{post_data['title']}', image_source: {image_source}") @@ -703,16 +725,17 @@ def post_to_wp(post_data, category, link, author, image_url, original_source, im timestamp = datetime.now(timezone.utc).isoformat() save_post_to_recent(post_data["title"], post_url, author["username"], timestamp) - # Post article tweet to X - try: - post = {"title": post_data["title"], "url": post_url} - tweet = generate_article_tweet(author, post, author["persona"]) - if post_tweet(author, tweet): - logging.info(f"Successfully posted article tweet for {author['username']} on X") - else: - logging.warning(f"Failed to post article tweet for {author['username']} on X") - except Exception as e: - logging.error(f"Error posting article tweet for {author['username']}: {e}") + # Post article tweet to X only if post_tweet is True + if post_tweet: + try: + post = {"title": post_data["title"], "url": post_url} + tweet = generate_article_tweet(author, post, author["persona"]) + if post_tweet(author, tweet): + logging.info(f"Successfully posted article tweet for {author['username']} on X") + else: + logging.warning(f"Failed to post article tweet for {author['username']} on X") + except Exception as e: + logging.error(f"Error posting article tweet for {author['username']}: {e}") logging.info(f"Posted/Updated by {author['username']}: {post_data['title']} (ID: {post_id})") return post_id, post_url