|
|
|
|
@ -12,6 +12,7 @@ import shutil |
|
|
|
|
import requests |
|
|
|
|
import time |
|
|
|
|
import openai |
|
|
|
|
import psutil |
|
|
|
|
from duckduckgo_search import DDGS |
|
|
|
|
from requests_oauthlib import OAuth1 |
|
|
|
|
from dotenv import load_dotenv |
|
|
|
|
@ -1412,40 +1413,163 @@ def get_x_rate_limit_status(author): |
|
|
|
|
logger.error(f"Unexpected error fetching X rate limit for {username}: {e}", exc_info=True) |
|
|
|
|
return None, None |
|
|
|
|
|
|
|
|
|
def update_system_activity(script_name, status, pid=None): |
|
|
|
|
""" |
|
|
|
|
Record or update a script's activity in system_activity.json. |
|
|
|
|
Args: |
|
|
|
|
script_name (str): Name of the script (e.g., 'foodie_engagement_tweet'). |
|
|
|
|
status (str): 'running' or 'stopped'. |
|
|
|
|
pid (int): Process ID (required for 'running', optional for 'stopped'). |
|
|
|
|
""" |
|
|
|
|
activity_file = "/home/shane/foodie_automator/system_activity.json" |
|
|
|
|
try: |
|
|
|
|
# Load existing activities |
|
|
|
|
activities = load_json_file(activity_file, default=[]) |
|
|
|
|
|
|
|
|
|
# Update or add entry |
|
|
|
|
timestamp = datetime.now(timezone.utc).isoformat() |
|
|
|
|
entry = { |
|
|
|
|
"script_name": script_name, |
|
|
|
|
"pid": pid if status == "running" else None, |
|
|
|
|
"start_time": timestamp if status == "running" else None, |
|
|
|
|
"stop_time": timestamp if status == "stopped" else None, |
|
|
|
|
"status": status |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
# Find existing entry for this script |
|
|
|
|
for i, act in enumerate(activities): |
|
|
|
|
if act["script_name"] == script_name and act["status"] == "running": |
|
|
|
|
if status == "stopped": |
|
|
|
|
activities[i]["status"] = "stopped" |
|
|
|
|
activities[i]["stop_time"] = timestamp |
|
|
|
|
activities[i]["pid"] = None |
|
|
|
|
break |
|
|
|
|
else: |
|
|
|
|
# No running entry found, append new entry |
|
|
|
|
if status == "running": |
|
|
|
|
activities.append(entry) |
|
|
|
|
|
|
|
|
|
# Save updated activities |
|
|
|
|
save_json_file(activity_file, activities) |
|
|
|
|
logger.info(f"Updated system activity: {script_name} is {status}") |
|
|
|
|
except Exception as e: |
|
|
|
|
logger.error(f"Failed to update system_activity.json for {script_name}: {e}") |
|
|
|
|
|
|
|
|
|
def prune_system_activity(tweet_reset_time): |
|
|
|
|
""" |
|
|
|
|
Prune system_activity.json entries older than 24 hours, aligned with tweet reset time. |
|
|
|
|
Args: |
|
|
|
|
tweet_reset_time (float): Unix timestamp of the tweet quota reset. |
|
|
|
|
""" |
|
|
|
|
activity_file = "/home/shane/foodie_automator/system_activity.json" |
|
|
|
|
try: |
|
|
|
|
activities = load_json_file(activity_file, default=[]) |
|
|
|
|
cutoff = datetime.now(timezone.utc) - timedelta(hours=24) |
|
|
|
|
pruned_activities = [] |
|
|
|
|
|
|
|
|
|
for entry in activities: |
|
|
|
|
# Use start_time or stop_time for pruning |
|
|
|
|
time_str = entry.get("stop_time") or entry.get("start_time") |
|
|
|
|
if not time_str: |
|
|
|
|
continue |
|
|
|
|
try: |
|
|
|
|
entry_time = datetime.fromisoformat(time_str) |
|
|
|
|
if entry_time > cutoff: |
|
|
|
|
pruned_activities.append(entry) |
|
|
|
|
except ValueError: |
|
|
|
|
logger.warning(f"Invalid timestamp in system_activity.json: {time_str}") |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
save_json_file(activity_file, pruned_activities) |
|
|
|
|
logger.info(f"Pruned system_activity.json to {len(pruned_activities)} entries") |
|
|
|
|
except Exception as e: |
|
|
|
|
logger.error(f"Failed to prune system_activity.json: {e}") |
|
|
|
|
|
|
|
|
|
def is_any_script_running(): |
|
|
|
|
""" |
|
|
|
|
Check if any script is running by inspecting system_activity.json and verifying PIDs. |
|
|
|
|
Returns True if at least one script is running, False otherwise. |
|
|
|
|
""" |
|
|
|
|
activity_file = "/home/shane/foodie_automator/system_activity.json" |
|
|
|
|
try: |
|
|
|
|
activities = load_json_file(activity_file, default=[]) |
|
|
|
|
for entry in activities: |
|
|
|
|
if entry.get("status") == "running" and entry.get("pid"): |
|
|
|
|
try: |
|
|
|
|
# Verify the process is still running |
|
|
|
|
process = psutil.Process(entry["pid"]) |
|
|
|
|
if process.is_running(): |
|
|
|
|
logger.debug(f"Active script detected: {entry['script_name']} (PID: {entry['pid']})") |
|
|
|
|
return True |
|
|
|
|
else: |
|
|
|
|
# Process is dead, mark as stopped |
|
|
|
|
entry["status"] = "stopped" |
|
|
|
|
entry["stop_time"] = datetime.now(timezone.utc).isoformat() |
|
|
|
|
entry["pid"] = None |
|
|
|
|
logger.debug(f"Marked stale script as stopped: {entry['script_name']}") |
|
|
|
|
except psutil.NoSuchProcess: |
|
|
|
|
# Process doesn't exist, mark as stopped |
|
|
|
|
entry["status"] = "stopped" |
|
|
|
|
entry["stop_time"] = datetime.now(timezone.utc).isoformat() |
|
|
|
|
entry["pid"] = None |
|
|
|
|
logger.debug(f"Marked stale script as stopped: {entry['script_name']}") |
|
|
|
|
|
|
|
|
|
# Save updated activities if any were marked as stopped |
|
|
|
|
save_json_file(activity_file, activities) |
|
|
|
|
logger.debug("No active scripts detected") |
|
|
|
|
return False |
|
|
|
|
except Exception as e: |
|
|
|
|
logger.error(f"Failed to check system_activity.json: {e}") |
|
|
|
|
return False |
|
|
|
|
|
|
|
|
|
def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400): |
|
|
|
|
""" |
|
|
|
|
Check if an author can post based on their X API Free tier quota (17 tweets per 24 hours per user). |
|
|
|
|
Posts a test tweet only on script restart or for new authors, then tracks tweets in rate_limit_info.json. |
|
|
|
|
Uses system_activity.json to determine if test tweets are needed. |
|
|
|
|
Returns (can_post, remaining, reset_timestamp) where can_post is True if tweets are available. |
|
|
|
|
""" |
|
|
|
|
rate_limit_file = '/home/shane/foodie_automator/rate_limit_info.json' |
|
|
|
|
current_time = time.time() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Load rate limit info |
|
|
|
|
rate_limit_info = load_json_file(rate_limit_file, default={}) |
|
|
|
|
|
|
|
|
|
# Get script run ID |
|
|
|
|
if not hasattr(check_author_rate_limit, "script_run_id"): |
|
|
|
|
check_author_rate_limit.script_run_id = int(current_time) |
|
|
|
|
logger.info(f"Set script_run_id to {check_author_rate_limit.script_run_id}") |
|
|
|
|
|
|
|
|
|
username = author['username'] |
|
|
|
|
|
|
|
|
|
# Initialize or update author entry |
|
|
|
|
|
|
|
|
|
# Initialize author entry if missing |
|
|
|
|
if username not in rate_limit_info: |
|
|
|
|
rate_limit_info[username] = { |
|
|
|
|
'tweet_remaining': max_tweets, |
|
|
|
|
'tweet_reset': current_time + tweet_window_seconds, |
|
|
|
|
'tweets_posted_in_run': 0, |
|
|
|
|
'script_run_id': 0 # Force test tweet for new authors |
|
|
|
|
'tweets_posted_in_run': 0 |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
author_info = rate_limit_info[username] |
|
|
|
|
script_run_id = author_info.get('script_run_id', 0) |
|
|
|
|
|
|
|
|
|
# If script restarted or new author, post a test tweet to sync quota |
|
|
|
|
if script_run_id != check_author_rate_limit.script_run_id: |
|
|
|
|
logger.info(f"Script restart detected for {username}, posting test tweet to sync quota") |
|
|
|
|
|
|
|
|
|
# Prune system_activity.json using the tweet reset time |
|
|
|
|
reset_time = author_info.get('tweet_reset', current_time + tweet_window_seconds) |
|
|
|
|
prune_system_activity(reset_time) |
|
|
|
|
|
|
|
|
|
# Check if any script is running |
|
|
|
|
if is_any_script_running(): |
|
|
|
|
# At least one script is running, trust rate_limit_info.json |
|
|
|
|
logger.info(f"At least one script is running, using stored rate limit info for {username}") |
|
|
|
|
remaining = author_info.get('tweet_remaining', max_tweets) |
|
|
|
|
reset = author_info.get('tweet_reset', current_time + tweet_window_seconds) |
|
|
|
|
# Check if reset time has passed |
|
|
|
|
if current_time >= reset: |
|
|
|
|
logger.info(f"Reset time passed for {username}, resetting quota") |
|
|
|
|
remaining = max_tweets |
|
|
|
|
reset = current_time + tweet_window_seconds |
|
|
|
|
author_info['tweet_remaining'] = remaining |
|
|
|
|
author_info['tweet_reset'] = reset |
|
|
|
|
author_info['tweets_posted_in_run'] = 0 |
|
|
|
|
rate_limit_info[username] = author_info |
|
|
|
|
save_json_file(rate_limit_file, rate_limit_info) |
|
|
|
|
# Adjust for tweets posted in this run |
|
|
|
|
remaining = remaining - author_info.get('tweets_posted_in_run', 0) |
|
|
|
|
else: |
|
|
|
|
# No scripts are running, post test tweet to sync quota |
|
|
|
|
logger.info(f"No scripts are running, posting test tweet for {username} to sync quota") |
|
|
|
|
remaining, api_reset = get_x_rate_limit_status(author) |
|
|
|
|
if remaining is None or api_reset is None: |
|
|
|
|
# Fallback: Use last known quota or assume 0 remaining |
|
|
|
|
@ -1460,29 +1584,26 @@ def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400): |
|
|
|
|
else: |
|
|
|
|
remaining = min(remaining, max_tweets) # Ensure within Free tier limit |
|
|
|
|
reset = api_reset |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Update author info |
|
|
|
|
author_info['tweet_remaining'] = remaining |
|
|
|
|
author_info['tweet_reset'] = reset |
|
|
|
|
author_info['tweets_posted_in_run'] = 0 |
|
|
|
|
author_info['script_run_id'] = check_author_rate_limit.script_run_id |
|
|
|
|
rate_limit_info[username] = author_info |
|
|
|
|
save_json_file(rate_limit_file, rate_limit_info) |
|
|
|
|
else: |
|
|
|
|
# Use existing quota without resetting |
|
|
|
|
remaining = author_info.get('tweet_remaining', max_tweets) |
|
|
|
|
reset = author_info.get('tweet_reset', current_time + tweet_window_seconds) |
|
|
|
|
|
|
|
|
|
# Calculate remaining tweets |
|
|
|
|
remaining = remaining - author_info.get('tweets_posted_in_run', 0) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Validate remaining tweets |
|
|
|
|
if remaining < 0: |
|
|
|
|
logger.warning(f"Negative remaining tweets for {username}: {remaining}. Setting to 0.") |
|
|
|
|
remaining = 0 |
|
|
|
|
|
|
|
|
|
can_post = remaining > 0 |
|
|
|
|
if not can_post: |
|
|
|
|
reset_time = datetime.fromtimestamp(reset, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
|
logger.info(f"Author {username} quota exhausted. Remaining: {remaining}, Reset at: {reset_time}") |
|
|
|
|
reset_time_dt = datetime.fromtimestamp(reset, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
|
logger.info(f"Author {username} quota exhausted. Remaining: {remaining}, Reset at: {reset_time_dt}") |
|
|
|
|
else: |
|
|
|
|
logger.info(f"Quota for {username}: {remaining}/{max_tweets} tweets remaining") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return can_post, remaining, reset |
|
|
|
|
|
|
|
|
|
def prepare_post_data(summary, title, main_topic=None): |
|
|
|
|
|