|
|
|
|
@ -179,21 +179,21 @@ def load_recent_posts(): |
|
|
|
|
return posts |
|
|
|
|
|
|
|
|
|
def filter_posts_for_week(posts, start_date, end_date): |
|
|
|
|
"""Filter posts within the specified week.""" |
|
|
|
|
"""Filter posts within the given week range.""" |
|
|
|
|
filtered_posts = [] |
|
|
|
|
logging.debug(f"Filtering {len(posts)} posts for range {start_date} to {end_date}") |
|
|
|
|
|
|
|
|
|
for post in posts: |
|
|
|
|
try: |
|
|
|
|
timestamp = datetime.fromisoformat(post["timestamp"]) |
|
|
|
|
logging.debug(f"Checking post '{post['title']}' with timestamp {timestamp}") |
|
|
|
|
if start_date <= timestamp <= end_date: |
|
|
|
|
post_date = datetime.fromisoformat(post["timestamp"]) |
|
|
|
|
logging.debug(f"Checking post: title={post['title']}, timestamp={post_date}, in range {start_date} to {end_date}") |
|
|
|
|
if start_date <= post_date <= end_date: |
|
|
|
|
filtered_posts.append(post) |
|
|
|
|
logging.debug(f"Included post: {post['title']}") |
|
|
|
|
else: |
|
|
|
|
logging.debug(f"Post '{post['title']}' timestamp {timestamp} outside range") |
|
|
|
|
except ValueError as e: |
|
|
|
|
logging.warning(f"Skipping post with invalid timestamp: {post.get('title', 'Unknown')} - {e}") |
|
|
|
|
logging.info(f"Filtered to {len(filtered_posts)} posts within week range") |
|
|
|
|
logging.debug(f"Excluded post: {post['title']} (timestamp {post_date} outside range)") |
|
|
|
|
except (KeyError, ValueError) as e: |
|
|
|
|
logging.warning(f"Skipping post due to invalid format: {e}") |
|
|
|
|
continue |
|
|
|
|
logging.info(f"Filtered to {len(filtered_posts)} posts for the week") |
|
|
|
|
return filtered_posts |
|
|
|
|
|
|
|
|
|
def generate_intro_tweet(author): |
|
|
|
|
@ -291,126 +291,88 @@ def generate_final_cta(author): |
|
|
|
|
return fallback |
|
|
|
|
|
|
|
|
|
def post_weekly_thread(): |
|
|
|
|
"""Post weekly threads for each author.""" |
|
|
|
|
try: |
|
|
|
|
logging.info("Starting foodie_weekly_thread.py") |
|
|
|
|
print("Starting foodie_weekly_thread.py") |
|
|
|
|
|
|
|
|
|
valid_credentials = validate_twitter_credentials() |
|
|
|
|
if not valid_credentials: |
|
|
|
|
logging.error("No valid Twitter credentials found, exiting") |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
today = datetime.now(timezone.utc) |
|
|
|
|
days_to_monday = today.weekday() |
|
|
|
|
start_date = (today - timedelta(days=days_to_monday + 7)).replace(hour=0, minute=0, second=0, microsecond=0) |
|
|
|
|
end_date = start_date + timedelta(days=6, hours=23, minutes=59, seconds=59) |
|
|
|
|
|
|
|
|
|
logging.info(f"Fetching posts from {start_date} to {end_date}") |
|
|
|
|
print(f"Fetching posts from {start_date} to {end_date}") |
|
|
|
|
|
|
|
|
|
all_posts = load_recent_posts() |
|
|
|
|
logging.info(f"Loaded {len(all_posts)} posts from recent_posts.json") |
|
|
|
|
print(f"Loaded {len(all_posts)} posts from recent_posts.json") |
|
|
|
|
|
|
|
|
|
if not all_posts: |
|
|
|
|
logging.warning("No posts loaded, exiting post_weekly_thread") |
|
|
|
|
print("No posts loaded, exiting post_weekly_thread") |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
weekly_posts = filter_posts_for_week(all_posts, start_date, end_date) |
|
|
|
|
logging.info(f"Filtered to {len(weekly_posts)} posts for the week") |
|
|
|
|
print(f"Filtered to {len(weekly_posts)} posts for the week") |
|
|
|
|
|
|
|
|
|
if not weekly_posts: |
|
|
|
|
logging.warning("No posts found within the week range, exiting post_weekly_thread") |
|
|
|
|
print("No posts found within the week range, exiting post_weekly_thread") |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
posts_by_author = {} |
|
|
|
|
for post in weekly_posts: |
|
|
|
|
author = post["author_username"] |
|
|
|
|
if author not in posts_by_author: |
|
|
|
|
posts_by_author[author] = [] |
|
|
|
|
posts_by_author[author].append(post) |
|
|
|
|
logging.debug(f"Grouped posts by author: {list(posts_by_author.keys())}") |
|
|
|
|
|
|
|
|
|
for author in AUTHORS: |
|
|
|
|
try: |
|
|
|
|
author_posts = posts_by_author.get(author["username"], []) |
|
|
|
|
logging.info(f"Processing author {author['username']} with {len(author_posts)} posts") |
|
|
|
|
print(f"Processing author {author['username']} with {len(author_posts)} posts") |
|
|
|
|
|
|
|
|
|
if not author_posts: |
|
|
|
|
logging.info(f"No posts found for {author['username']} this week") |
|
|
|
|
print(f"No posts found for {author['username']} this week") |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
author_posts.sort(key=lambda x: x.get("timestamp", ""), reverse=True) |
|
|
|
|
top_posts = author_posts[:10] |
|
|
|
|
logging.info(f"Selected {len(top_posts)} top posts for {author['username']}") |
|
|
|
|
print(f"Selected {len(top_posts)} top posts for {author['username']}") |
|
|
|
|
|
|
|
|
|
intro_tweet = generate_intro_tweet(author) |
|
|
|
|
if not intro_tweet: |
|
|
|
|
logging.error(f"Failed to generate intro tweet for {author['username']}, skipping") |
|
|
|
|
continue |
|
|
|
|
logging.info(f"Posting intro tweet for {author['username']}: {intro_tweet}") |
|
|
|
|
print(f"Posting intro tweet for {author['username']}: {intro_tweet}") |
|
|
|
|
"""Generate and post a weekly thread of top posts for each author.""" |
|
|
|
|
logging.info("Starting foodie_weekly_thread.py") |
|
|
|
|
|
|
|
|
|
# Calculate date range: 7 days prior to run date |
|
|
|
|
today = datetime.now(timezone.utc) |
|
|
|
|
start_date = (today - timedelta(days=7)).replace(hour=0, minute=0, second=0, microsecond=0) |
|
|
|
|
end_date = (today - timedelta(days=1)).replace(hour=23, minute=59, second=59, microsecond=999999) |
|
|
|
|
logging.info(f"Fetching posts from {start_date} to {end_date}") |
|
|
|
|
|
|
|
|
|
# Load and filter posts |
|
|
|
|
recent_posts = load_json_file(RECENT_POSTS_FILE) |
|
|
|
|
logging.info(f"Loaded {len(recent_posts)} posts from {RECENT_POSTS_FILE}") |
|
|
|
|
|
|
|
|
|
# Deduplicate posts |
|
|
|
|
seen = set() |
|
|
|
|
deduped_posts = [] |
|
|
|
|
for post in recent_posts: |
|
|
|
|
key = (post["title"], post["url"], post["author_username"]) |
|
|
|
|
if key not in seen: |
|
|
|
|
seen.add(key) |
|
|
|
|
deduped_posts.append(post) |
|
|
|
|
logging.info(f"Filtered to {len(deduped_posts)} unique posts after deduplication") |
|
|
|
|
|
|
|
|
|
weekly_posts = filter_posts_for_week(deduped_posts, start_date, end_date) |
|
|
|
|
if not weekly_posts: |
|
|
|
|
logging.warning(f"No posts found within the week range {start_date} to {end_date}, exiting post_weekly_thread") |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
# Group posts by author |
|
|
|
|
posts_by_author = {author["username"]: [] for author in AUTHORS} |
|
|
|
|
for post in weekly_posts: |
|
|
|
|
username = post["author_username"] |
|
|
|
|
if username in posts_by_author: |
|
|
|
|
posts_by_author[username].append(post) |
|
|
|
|
|
|
|
|
|
# Post threads for each author |
|
|
|
|
for author in AUTHORS: |
|
|
|
|
username = author["username"] |
|
|
|
|
author_posts = posts_by_author.get(username, []) |
|
|
|
|
if not author_posts: |
|
|
|
|
logging.info(f"No posts found for {username}, skipping") |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
intro_response = post_tweet(author, intro_tweet) |
|
|
|
|
if not intro_response: |
|
|
|
|
logging.error(f"Failed to post intro tweet for {author['username']}, skipping thread") |
|
|
|
|
print(f"Failed to post intro tweet for {author['username']}") |
|
|
|
|
continue |
|
|
|
|
# Select top 10 posts (or fewer if less than 10) |
|
|
|
|
author_posts = sorted(author_posts, key=lambda x: datetime.fromisoformat(x["timestamp"]), reverse=True)[:10] |
|
|
|
|
logging.info(f"Selected {len(author_posts)} posts for {username}") |
|
|
|
|
|
|
|
|
|
intro_tweet_id = intro_response.get("id") |
|
|
|
|
last_tweet_id = intro_tweet_id |
|
|
|
|
logging.debug(f"Intro tweet posted with ID {intro_tweet_id}") |
|
|
|
|
|
|
|
|
|
for i, post in enumerate(top_posts, 1): |
|
|
|
|
try: |
|
|
|
|
post_tweet_content = f"{i}. {post['title']} Link: {post['url']}" |
|
|
|
|
logging.info(f"Posting thread reply {i} for {author['username']}: {post_tweet_content}") |
|
|
|
|
print(f"Posting thread reply {i} for {author['username']}: {post_tweet_content}") |
|
|
|
|
reply_response = post_tweet(author, post_tweet_content, reply_to_id=last_tweet_id) |
|
|
|
|
if not reply_response: |
|
|
|
|
logging.error(f"Failed to post thread reply {i} for {author['username']}") |
|
|
|
|
else: |
|
|
|
|
last_tweet_id = reply_response.get("id") |
|
|
|
|
logging.debug(f"Thread reply {i} posted with ID {last_tweet_id}") |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Error posting thread reply {i} for {author['username']}: {e}", exc_info=True) |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
# Post final CTA tweet |
|
|
|
|
if last_tweet_id and top_posts: # Ensure there's a valid thread to reply to |
|
|
|
|
try: |
|
|
|
|
final_cta = generate_final_cta(author) |
|
|
|
|
if not final_cta: |
|
|
|
|
logging.error(f"Failed to generate final CTA tweet for {author['username']}, skipping") |
|
|
|
|
continue |
|
|
|
|
logging.info(f"Posting final CTA tweet for {author['username']}: {final_cta}") |
|
|
|
|
print(f"Posting final CTA tweet for {author['username']}: {final_cta}") |
|
|
|
|
cta_response = post_tweet(author, final_cta, reply_to_id=last_tweet_id) |
|
|
|
|
if not cta_response: |
|
|
|
|
logging.error(f"Failed to post final CTA tweet for {author['username']}") |
|
|
|
|
else: |
|
|
|
|
logging.debug(f"Final CTA tweet posted with ID {cta_response.get('id')}") |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Error posting final CTA tweet for {author['username']}: {e}", exc_info=True) |
|
|
|
|
|
|
|
|
|
logging.info(f"Successfully posted weekly thread for {author['username']}") |
|
|
|
|
print(f"Successfully posted weekly thread for {author['username']}") |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Error processing author {author['username']}: {e}", exc_info=True) |
|
|
|
|
# Generate and post thread |
|
|
|
|
try: |
|
|
|
|
# Post lead tweet |
|
|
|
|
lead_tweet = ( |
|
|
|
|
f"Top foodie finds this week from {author['name']} (@{author['x_username']})! " |
|
|
|
|
f"Check out these {len(author_posts)} posts on InsiderFoodie.com 🍽️" |
|
|
|
|
) |
|
|
|
|
lead_response = post_tweet(author, lead_tweet) |
|
|
|
|
if not lead_response: |
|
|
|
|
logging.error(f"Failed to post lead tweet for {username}, skipping") |
|
|
|
|
continue |
|
|
|
|
lead_tweet_id = lead_response["id"] |
|
|
|
|
logging.info(f"Posted lead tweet for {username}: {lead_tweet}") |
|
|
|
|
|
|
|
|
|
# Post thread tweets |
|
|
|
|
for i, post in enumerate(author_posts, 1): |
|
|
|
|
thread_tweet = ( |
|
|
|
|
f"{i}. {post['title']} " |
|
|
|
|
f"Read more: {post['url']} #FoodieThread" |
|
|
|
|
) |
|
|
|
|
thread_response = post_tweet(author, thread_tweet, reply_to_id=lead_tweet_id) |
|
|
|
|
if thread_response: |
|
|
|
|
lead_tweet_id = thread_response["id"] |
|
|
|
|
logging.info(f"Posted thread tweet {i} for {username}: {thread_tweet}") |
|
|
|
|
else: |
|
|
|
|
logging.warning(f"Failed to post thread tweet {i} for {username}") |
|
|
|
|
|
|
|
|
|
logging.info("Completed foodie_weekly_thread.py") |
|
|
|
|
print("Completed foodie_weekly_thread.py") |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Unexpected error in post_weekly_thread: {e}", exc_info=True) |
|
|
|
|
print(f"Error in post_weekly_thread: {e}") |
|
|
|
|
# Post engagement tweet |
|
|
|
|
engagement_tweet = generate_engagement_tweet(author) |
|
|
|
|
if engagement_tweet: |
|
|
|
|
post_tweet(author, engagement_tweet, reply_to_id=lead_tweet_id) |
|
|
|
|
logging.info(f"Posted engagement tweet for {username}: {engagement_tweet}") |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Error posting thread for {username}: {e}", exc_info=True) |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
def main(): |
|
|
|
|
"""Main function to run the script.""" |
|
|
|
|
|