Shane 7 months ago
parent 8a24a93878
commit e974bd1262
  1. 155
      foodie_weekly_thread.py

@ -8,18 +8,18 @@ import sys
import fcntl
import time
from datetime import datetime, timedelta, timezone
import tweepy
from openai import OpenAI
from foodie_utils import post_tweet, AUTHORS, SUMMARY_MODEL, load_json_file, check_author_rate_limit, update_system_activity
from foodie_utils import AUTHORS, SUMMARY_MODEL, load_json_file, save_json_file, update_system_activity
from foodie_config import X_API_CREDENTIALS, RECENT_POSTS_FILE
from dotenv import load_dotenv
import shutil
load_dotenv()
# Output file for weekly thread content
WEEKLY_THREADS_FILE = "/home/shane/foodie_automator/weekly_threads.json"
SCRIPT_NAME = "foodie_weekly_thread"
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_weekly_thread.lock"
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_weekly_thread.log"
WEEKLY_THREADS_FILE = "/home/shane/foodie_automator/weekly_threads.json"
LOG_PRUNE_DAYS = 30
MAX_RETRIES = 3
RETRY_BACKOFF = 2
@ -50,7 +50,7 @@ def setup_logging():
with open(LOG_FILE, 'w') as f:
f.writelines(pruned_lines)
logging.basicBasic(
logging.basicConfig(
filename=LOG_FILE,
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
@ -59,7 +59,7 @@ def setup_logging():
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logging.getLogger().addHandler(console_handler)
logging.getLogger("tweepy").setLevel(logging.WARNING)
logging.getLogger("openai").setLevel(logging.WARNING)
logging.info("Logging initialized for foodie_weekly_thread.py")
except Exception as e:
print(f"Failed to setup logging: {e}")
@ -81,7 +81,7 @@ def acquire_lock():
def signal_handler(sig, frame):
"""Handle termination signals gracefully."""
logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped
update_system_activity(SCRIPT_NAME, "stopped")
sys.exit(0)
signal.signal(signal.SIGTERM, signal_handler)
@ -97,82 +97,6 @@ except Exception as e:
logging.error(f"Failed to initialize OpenAI client: {e}", exc_info=True)
sys.exit(1)
def validate_twitter_credentials():
"""Validate Twitter API credentials for all authors."""
logging.info("Validating Twitter API credentials for all authors")
valid_credentials = []
for author in AUTHORS:
for attempt in range(MAX_RETRIES):
try:
remaining, reset = get_x_rate_limit_status(author)
if remaining is not None and reset is not None:
logging.info(f"Credentials valid for {author['username']} (handle: {X_API_CREDENTIALS[author['username']]['x_username']})")
valid_credentials.append(X_API_CREDENTIALS[author['username']])
break
else:
logging.error(f"Rate limit check failed for {author['username']} (attempt {attempt + 1})")
if attempt < MAX_RETRIES - 1:
time.sleep(RETRY_BACKOFF * (2 ** attempt))
except Exception as e:
logging.error(f"Failed to validate credentials for {author['username']} (attempt {attempt + 1}): {e}")
if attempt < MAX_RETRIES - 1:
time.sleep(RETRY_BACKOFF * (2 ** attempt))
else:
logging.error(f"Credentials invalid for {author['username']} after {MAX_RETRIES} attempts")
if not valid_credentials:
logging.error("No valid Twitter credentials found for any author")
raise ValueError("No valid Twitter credentials found")
return valid_credentials
def load_recent_posts():
"""Load and deduplicate posts from recent_posts.json."""
logging.debug(f"Attempting to load posts from {RECENT_POSTS_FILE}")
posts = load_json_file(RECENT_POSTS_FILE)
if not posts:
logging.warning(f"No valid posts loaded from {RECENT_POSTS_FILE}")
return []
# Deduplicate posts
unique_posts = {}
for post in posts:
try:
required_fields = ["title", "url", "author_username", "timestamp"]
if not all(key in post for key in required_fields):
logging.warning(f"Skipping invalid post: missing fields {post}")
continue
datetime.fromisoformat(post["timestamp"].replace('Z', '+00:00'))
key = (post["title"], post["url"], post["author_username"])
if key not in unique_posts:
unique_posts[key] = post
else:
logging.debug(f"Skipping duplicate post: {post['title']}")
except (KeyError, ValueError) as e:
logging.warning(f"Skipping post due to invalid format: {e}")
continue
deduped_posts = list(unique_posts.values())
logging.info(f"Loaded {len(deduped_posts)} unique posts from {RECENT_POSTS_FILE}")
return deduped_posts
def filter_posts_for_week(posts, start_date, end_date):
"""Filter posts within the given week range."""
filtered_posts = []
for post in posts:
try:
post_date = datetime.fromisoformat(post["timestamp"])
logging.debug(f"Checking post: title={post['title']}, timestamp={post_date}, in range {start_date} to {end_date}")
if start_date <= post_date <= end_date:
filtered_posts.append(post)
logging.debug(f"Included post: {post['title']}")
else:
logging.debug(f"Excluded post: {post['title']} (timestamp {post_date} outside range)")
except (KeyError, ValueError) as e:
logging.warning(f"Skipping post due to invalid format: {e}")
continue
logging.info(f"Filtered to {len(filtered_posts)} posts for the week")
return filtered_posts
def generate_intro_tweet(author):
"""Generate an intro tweet for the weekly thread."""
credentials = next((cred for cred in X_API_CREDENTIALS if cred["username"] == author["username"]), None)
@ -267,7 +191,56 @@ def generate_final_cta(author):
logging.info(f"Using fallback final CTA tweet: {fallback}")
return fallback
def post_weekly_thread():
def load_recent_posts():
"""Load and deduplicate posts from recent_posts.json."""
logging.debug(f"Attempting to load posts from {RECENT_POSTS_FILE}")
posts = load_json_file(RECENT_POSTS_FILE)
if not posts:
logging.warning(f"No valid posts loaded from {RECENT_POSTS_FILE}")
return []
# Deduplicate posts
unique_posts = {}
for post in posts:
try:
required_fields = ["title", "url", "author_username", "timestamp"]
if not all(key in post for key in required_fields):
logging.warning(f"Skipping invalid post: missing fields {post}")
continue
datetime.fromisoformat(post["timestamp"].replace('Z', '+00:00'))
key = (post["title"], post["url"], post["author_username"])
if key not in unique_posts:
unique_posts[key] = post
else:
logging.debug(f"Skipping duplicate post: {post['title']}")
except (KeyError, ValueError) as e:
logging.warning(f"Skipping post due to invalid format: {e}")
continue
deduped_posts = list(unique_posts.values())
logging.info(f"Loaded {len(deduped_posts)} unique posts from {RECENT_POSTS_FILE}")
return deduped_posts
def filter_posts_for_week(posts, start_date, end_date):
"""Filter posts within the given week range."""
filtered_posts = []
for post in posts:
try:
post_date = datetime.fromisoformat(post["timestamp"])
logging.debug(f"Checking post: title={post['title']}, timestamp={post_date}, in range {start_date} to {end_date}")
if start_date <= post_date <= end_date:
filtered_posts.append(post)
logging.debug(f"Included post: {post['title']}")
else:
logging.debug(f"Excluded post: {post['title']} (timestamp {post_date} outside range)")
except (KeyError, ValueError) as e:
logging.warning(f"Skipping post due to invalid format: {e}")
continue
logging.info(f"Filtered to {len(filtered_posts)} posts for the week")
return filtered_posts
def generate_weekly_thread():
"""Generate weekly thread content for each author and save to file on Mondays."""
logging.info("Starting foodie_weekly_thread.py")
@ -298,7 +271,7 @@ def post_weekly_thread():
weekly_posts = filter_posts_for_week(deduped_posts, start_date, end_date)
if not weekly_posts:
logging.warning(f"No posts found within the week range {start_date} to {end_date}, exiting post_weekly_thread")
logging.warning(f"No posts found within the week range {start_date} to {end_date}, exiting generate_weekly_thread")
return
# Group posts by author
@ -369,6 +342,14 @@ def post_weekly_thread():
# Save thread content to file, overwriting any existing content
if thread_content:
try:
# Backup existing file before overwriting
if os.path.exists(WEEKLY_THREADS_FILE):
backup_dir = "/home/shane/foodie_automator/backups"
os.makedirs(backup_dir, exist_ok=True)
backup_file = f"{backup_dir}/weekly_threads_{timestamp.replace(':', '-')}.json"
shutil.copy(WEEKLY_THREADS_FILE, backup_file)
logging.info(f"Backed up existing {WEEKLY_THREADS_FILE} to {backup_file}")
# Save new thread content, overwriting the file
thread_data = {
"week_start": start_date.isoformat(),
@ -392,9 +373,7 @@ def main():
lock_fd = acquire_lock()
setup_logging()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
# Skip Twitter credentials validation since we're not posting
# validate_twitter_credentials()
post_weekly_thread()
generate_weekly_thread()
update_system_activity(SCRIPT_NAME, "stopped") # Record stop
except Exception as e:
logging.error(f"Fatal error in main: {e}", exc_info=True)

Loading…
Cancel
Save