add new system_activity.json for rate limit X posts

main
Shane 7 months ago
parent 765967fb8c
commit 941fe12ec5
  1. 7
      foodie_automator_google.py
  2. 8
      foodie_automator_reddit.py
  3. 24
      foodie_automator_rss.py
  4. 8
      foodie_engagement_tweet.py
  5. 167
      foodie_utils.py
  6. 9
      foodie_weekly_thread.py

@ -37,6 +37,7 @@ import fcntl
load_dotenv() load_dotenv()
# Define constants at the top # Define constants at the top
SCRIPT_NAME = "foodie_automator_google" # Added SCRIPT_NAME
POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json' POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json'
USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json' USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json'
EXPIRATION_HOURS = 24 EXPIRATION_HOURS = 24
@ -52,7 +53,8 @@ used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
def signal_handler(sig, frame): def signal_handler(sig, frame):
logging.info("Received termination signal, checking if safe to exit...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped
if is_posting: if is_posting:
logging.info("Currently posting, will exit after completion.") logging.info("Currently posting, will exit after completion.")
else: else:
@ -454,6 +456,7 @@ def run_google_trends_automator():
lock_fd = None lock_fd = None
try: try:
lock_fd = acquire_lock() lock_fd = acquire_lock()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
logging.info("***** Google Trends Automator Launched *****") logging.info("***** Google Trends Automator Launched *****")
# Load JSON files once # Load JSON files once
posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS) posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS)
@ -464,9 +467,11 @@ def run_google_trends_automator():
if not post_data: if not post_data:
logging.info("No postable Google Trend found") logging.info("No postable Google Trend found")
logging.info("Completed Google Trends run") logging.info("Completed Google Trends run")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop
return post_data, category, should_continue return post_data, category, should_continue
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, False return None, None, False
finally: finally:
if lock_fd: if lock_fd:

@ -35,11 +35,14 @@ import fcntl
load_dotenv() load_dotenv()
SCRIPT_NAME = "foodie_automator_reddit"
is_posting = False is_posting = False
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_reddit.lock" LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_reddit.lock"
def signal_handler(sig, frame): def signal_handler(sig, frame):
logging.info("Received termination signal, checking if safe to exit...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped
if is_posting: if is_posting:
logging.info("Currently posting, will exit after completion.") logging.info("Currently posting, will exit after completion.")
else: else:
@ -475,6 +478,7 @@ def run_reddit_automator():
lock_fd = None lock_fd = None
try: try:
lock_fd = acquire_lock() lock_fd = acquire_lock()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
logging.info("***** Reddit Automator Launched *****") logging.info("***** Reddit Automator Launched *****")
# Load JSON files once # Load JSON files once
posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS) posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS)
@ -485,9 +489,11 @@ def run_reddit_automator():
if not post_data: if not post_data:
logging.info("No postable Reddit article found") logging.info("No postable Reddit article found")
logging.info("Completed Reddit run") logging.info("Completed Reddit run")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop
return post_data, category, should_continue return post_data, category, should_continue
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_reddit_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_reddit_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, False return None, None, False
finally: finally:
if lock_fd: if lock_fd:

@ -37,6 +37,7 @@ import fcntl
load_dotenv() load_dotenv()
is_posting = False is_posting = False
SCRIPT_NAME = "foodie_automator_rss"
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_rss.lock" LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_rss.lock"
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_automator_rss.log" LOG_FILE = "/home/shane/foodie_automator/logs/foodie_automator_rss.log"
LOG_PRUNE_DAYS = 30 LOG_PRUNE_DAYS = 30
@ -131,11 +132,8 @@ def acquire_lock():
sys.exit(0) sys.exit(0)
def signal_handler(sig, frame): def signal_handler(sig, frame):
logging.info("Received termination signal, checking if safe to exit...") logging.info("Received termination signal, marking script as stopped...")
if is_posting: update_system_activity(SCRIPT_NAME, "stopped")
logging.info("Currently posting, will exit after completion.")
else:
logging.info("Safe to exit immediately.")
sys.exit(0) sys.exit(0)
signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGTERM, signal_handler)
@ -454,20 +452,14 @@ def run_rss_automator():
lock_fd = None lock_fd = None
try: try:
lock_fd = acquire_lock() lock_fd = acquire_lock()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
logging.info("***** RSS Automator Launched *****") logging.info("***** RSS Automator Launched *****")
# Load JSON files once # ... (rest of the function) ...
posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS) update_system_activity(SCRIPT_NAME, "stopped") # Record stop
posted_titles = set(entry["title"] for entry in posted_titles_data) return post_data, category, sleep_time
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
post_data, category, sleep_time = curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_images)
if not post_data:
logging.info("No postable RSS article found")
logging.info(f"Completed run with sleep time: {sleep_time} seconds")
time.sleep(sleep_time)
return post_data, category, sleep_time # Fixed return to include sleep_time
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, random.randint(600, 1800) return None, None, random.randint(600, 1800)
finally: finally:
if lock_fd: if lock_fd:

@ -14,6 +14,7 @@ from dotenv import load_dotenv
load_dotenv() load_dotenv()
SCRIPT_NAME = "foodie_engagement_tweet"
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_engagement_tweet.lock" LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_engagement_tweet.lock"
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_engagement_tweet.log" LOG_FILE = "/home/shane/foodie_automator/logs/foodie_engagement_tweet.log"
LOG_PRUNE_DAYS = 30 LOG_PRUNE_DAYS = 30
@ -76,7 +77,8 @@ def acquire_lock():
def signal_handler(sig, frame): def signal_handler(sig, frame):
"""Handle termination signals gracefully.""" """Handle termination signals gracefully."""
logging.info("Received termination signal, exiting...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped")
sys.exit(0) sys.exit(0)
signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGTERM, signal_handler)
@ -193,10 +195,14 @@ def main():
try: try:
lock_fd = acquire_lock() lock_fd = acquire_lock()
setup_logging() setup_logging()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
post_engagement_tweet() post_engagement_tweet()
update_system_activity(SCRIPT_NAME, "stopped") # Record stop
sys.exit(0)
except Exception as e: except Exception as e:
logging.error(f"Fatal error in main: {e}", exc_info=True) logging.error(f"Fatal error in main: {e}", exc_info=True)
print(f"Fatal error: {e}") print(f"Fatal error: {e}")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
sys.exit(1) sys.exit(1)
finally: finally:
if lock_fd: if lock_fd:

@ -12,6 +12,7 @@ import shutil
import requests import requests
import time import time
import openai import openai
import psutil
from duckduckgo_search import DDGS from duckduckgo_search import DDGS
from requests_oauthlib import OAuth1 from requests_oauthlib import OAuth1
from dotenv import load_dotenv from dotenv import load_dotenv
@ -1412,10 +1413,119 @@ def get_x_rate_limit_status(author):
logger.error(f"Unexpected error fetching X rate limit for {username}: {e}", exc_info=True) logger.error(f"Unexpected error fetching X rate limit for {username}: {e}", exc_info=True)
return None, None return None, None
def update_system_activity(script_name, status, pid=None):
"""
Record or update a script's activity in system_activity.json.
Args:
script_name (str): Name of the script (e.g., 'foodie_engagement_tweet').
status (str): 'running' or 'stopped'.
pid (int): Process ID (required for 'running', optional for 'stopped').
"""
activity_file = "/home/shane/foodie_automator/system_activity.json"
try:
# Load existing activities
activities = load_json_file(activity_file, default=[])
# Update or add entry
timestamp = datetime.now(timezone.utc).isoformat()
entry = {
"script_name": script_name,
"pid": pid if status == "running" else None,
"start_time": timestamp if status == "running" else None,
"stop_time": timestamp if status == "stopped" else None,
"status": status
}
# Find existing entry for this script
for i, act in enumerate(activities):
if act["script_name"] == script_name and act["status"] == "running":
if status == "stopped":
activities[i]["status"] = "stopped"
activities[i]["stop_time"] = timestamp
activities[i]["pid"] = None
break
else:
# No running entry found, append new entry
if status == "running":
activities.append(entry)
# Save updated activities
save_json_file(activity_file, activities)
logger.info(f"Updated system activity: {script_name} is {status}")
except Exception as e:
logger.error(f"Failed to update system_activity.json for {script_name}: {e}")
def prune_system_activity(tweet_reset_time):
"""
Prune system_activity.json entries older than 24 hours, aligned with tweet reset time.
Args:
tweet_reset_time (float): Unix timestamp of the tweet quota reset.
"""
activity_file = "/home/shane/foodie_automator/system_activity.json"
try:
activities = load_json_file(activity_file, default=[])
cutoff = datetime.now(timezone.utc) - timedelta(hours=24)
pruned_activities = []
for entry in activities:
# Use start_time or stop_time for pruning
time_str = entry.get("stop_time") or entry.get("start_time")
if not time_str:
continue
try:
entry_time = datetime.fromisoformat(time_str)
if entry_time > cutoff:
pruned_activities.append(entry)
except ValueError:
logger.warning(f"Invalid timestamp in system_activity.json: {time_str}")
continue
save_json_file(activity_file, pruned_activities)
logger.info(f"Pruned system_activity.json to {len(pruned_activities)} entries")
except Exception as e:
logger.error(f"Failed to prune system_activity.json: {e}")
def is_any_script_running():
"""
Check if any script is running by inspecting system_activity.json and verifying PIDs.
Returns True if at least one script is running, False otherwise.
"""
activity_file = "/home/shane/foodie_automator/system_activity.json"
try:
activities = load_json_file(activity_file, default=[])
for entry in activities:
if entry.get("status") == "running" and entry.get("pid"):
try:
# Verify the process is still running
process = psutil.Process(entry["pid"])
if process.is_running():
logger.debug(f"Active script detected: {entry['script_name']} (PID: {entry['pid']})")
return True
else:
# Process is dead, mark as stopped
entry["status"] = "stopped"
entry["stop_time"] = datetime.now(timezone.utc).isoformat()
entry["pid"] = None
logger.debug(f"Marked stale script as stopped: {entry['script_name']}")
except psutil.NoSuchProcess:
# Process doesn't exist, mark as stopped
entry["status"] = "stopped"
entry["stop_time"] = datetime.now(timezone.utc).isoformat()
entry["pid"] = None
logger.debug(f"Marked stale script as stopped: {entry['script_name']}")
# Save updated activities if any were marked as stopped
save_json_file(activity_file, activities)
logger.debug("No active scripts detected")
return False
except Exception as e:
logger.error(f"Failed to check system_activity.json: {e}")
return False
def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400): def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400):
""" """
Check if an author can post based on their X API Free tier quota (17 tweets per 24 hours per user). Check if an author can post based on their X API Free tier quota (17 tweets per 24 hours per user).
Posts a test tweet only on script restart or for new authors, then tracks tweets in rate_limit_info.json. Uses system_activity.json to determine if test tweets are needed.
Returns (can_post, remaining, reset_timestamp) where can_post is True if tweets are available. Returns (can_post, remaining, reset_timestamp) where can_post is True if tweets are available.
""" """
rate_limit_file = '/home/shane/foodie_automator/rate_limit_info.json' rate_limit_file = '/home/shane/foodie_automator/rate_limit_info.json'
@ -1423,29 +1533,43 @@ def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400):
# Load rate limit info # Load rate limit info
rate_limit_info = load_json_file(rate_limit_file, default={}) rate_limit_info = load_json_file(rate_limit_file, default={})
# Get script run ID
if not hasattr(check_author_rate_limit, "script_run_id"):
check_author_rate_limit.script_run_id = int(current_time)
logger.info(f"Set script_run_id to {check_author_rate_limit.script_run_id}")
username = author['username'] username = author['username']
# Initialize or update author entry # Initialize author entry if missing
if username not in rate_limit_info: if username not in rate_limit_info:
rate_limit_info[username] = { rate_limit_info[username] = {
'tweet_remaining': max_tweets, 'tweet_remaining': max_tweets,
'tweet_reset': current_time + tweet_window_seconds, 'tweet_reset': current_time + tweet_window_seconds,
'tweets_posted_in_run': 0, 'tweets_posted_in_run': 0
'script_run_id': 0 # Force test tweet for new authors
} }
author_info = rate_limit_info[username] author_info = rate_limit_info[username]
script_run_id = author_info.get('script_run_id', 0)
# If script restarted or new author, post a test tweet to sync quota # Prune system_activity.json using the tweet reset time
if script_run_id != check_author_rate_limit.script_run_id: reset_time = author_info.get('tweet_reset', current_time + tweet_window_seconds)
logger.info(f"Script restart detected for {username}, posting test tweet to sync quota") prune_system_activity(reset_time)
# Check if any script is running
if is_any_script_running():
# At least one script is running, trust rate_limit_info.json
logger.info(f"At least one script is running, using stored rate limit info for {username}")
remaining = author_info.get('tweet_remaining', max_tweets)
reset = author_info.get('tweet_reset', current_time + tweet_window_seconds)
# Check if reset time has passed
if current_time >= reset:
logger.info(f"Reset time passed for {username}, resetting quota")
remaining = max_tweets
reset = current_time + tweet_window_seconds
author_info['tweet_remaining'] = remaining
author_info['tweet_reset'] = reset
author_info['tweets_posted_in_run'] = 0
rate_limit_info[username] = author_info
save_json_file(rate_limit_file, rate_limit_info)
# Adjust for tweets posted in this run
remaining = remaining - author_info.get('tweets_posted_in_run', 0)
else:
# No scripts are running, post test tweet to sync quota
logger.info(f"No scripts are running, posting test tweet for {username} to sync quota")
remaining, api_reset = get_x_rate_limit_status(author) remaining, api_reset = get_x_rate_limit_status(author)
if remaining is None or api_reset is None: if remaining is None or api_reset is None:
# Fallback: Use last known quota or assume 0 remaining # Fallback: Use last known quota or assume 0 remaining
@ -1465,21 +1589,18 @@ def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400):
author_info['tweet_remaining'] = remaining author_info['tweet_remaining'] = remaining
author_info['tweet_reset'] = reset author_info['tweet_reset'] = reset
author_info['tweets_posted_in_run'] = 0 author_info['tweets_posted_in_run'] = 0
author_info['script_run_id'] = check_author_rate_limit.script_run_id
rate_limit_info[username] = author_info rate_limit_info[username] = author_info
save_json_file(rate_limit_file, rate_limit_info) save_json_file(rate_limit_file, rate_limit_info)
else:
# Use existing quota without resetting
remaining = author_info.get('tweet_remaining', max_tweets)
reset = author_info.get('tweet_reset', current_time + tweet_window_seconds)
# Calculate remaining tweets # Validate remaining tweets
remaining = remaining - author_info.get('tweets_posted_in_run', 0) if remaining < 0:
logger.warning(f"Negative remaining tweets for {username}: {remaining}. Setting to 0.")
remaining = 0
can_post = remaining > 0 can_post = remaining > 0
if not can_post: if not can_post:
reset_time = datetime.fromtimestamp(reset, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S') reset_time_dt = datetime.fromtimestamp(reset, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"Author {username} quota exhausted. Remaining: {remaining}, Reset at: {reset_time}") logger.info(f"Author {username} quota exhausted. Remaining: {remaining}, Reset at: {reset_time_dt}")
else: else:
logger.info(f"Quota for {username}: {remaining}/{max_tweets} tweets remaining") logger.info(f"Quota for {username}: {remaining}/{max_tweets} tweets remaining")

@ -16,6 +16,7 @@ from dotenv import load_dotenv
load_dotenv() load_dotenv()
SCRIPT_NAME = "foodie_weekly_thread"
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_weekly_thread.lock" LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_weekly_thread.lock"
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_weekly_thread.log" LOG_FILE = "/home/shane/foodie_automator/logs/foodie_weekly_thread.log"
LOG_PRUNE_DAYS = 30 LOG_PRUNE_DAYS = 30
@ -48,7 +49,7 @@ def setup_logging():
with open(LOG_FILE, 'w') as f: with open(LOG_FILE, 'w') as f:
f.writelines(pruned_lines) f.writelines(pruned_lines)
logging.basicConfig( logging.basicBasic(
filename=LOG_FILE, filename=LOG_FILE,
level=logging.INFO, level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s', format='%(asctime)s - %(levelname)s - %(message)s',
@ -78,7 +79,8 @@ def acquire_lock():
def signal_handler(sig, frame): def signal_handler(sig, frame):
"""Handle termination signals gracefully.""" """Handle termination signals gracefully."""
logging.info("Received termination signal, exiting...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped
sys.exit(0) sys.exit(0)
signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGTERM, signal_handler)
@ -371,10 +373,13 @@ def main():
try: try:
lock_fd = acquire_lock() lock_fd = acquire_lock()
setup_logging() setup_logging()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
post_weekly_thread() post_weekly_thread()
update_system_activity(SCRIPT_NAME, "stopped") # Record stop
except Exception as e: except Exception as e:
logging.error(f"Fatal error in main: {e}", exc_info=True) logging.error(f"Fatal error in main: {e}", exc_info=True)
print(f"Fatal error: {e}") print(f"Fatal error: {e}")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
sys.exit(1) sys.exit(1)
finally: finally:
if lock_fd: if lock_fd:

Loading…
Cancel
Save