update real time rate limiting checks for X

main
Shane 7 months ago
parent 01bab56eb6
commit 447bfb0087
  1. 125
      check_rate_limits.py
  2. 14
      foodie_automator_google.py
  3. 14
      foodie_automator_reddit.py
  4. 30
      foodie_automator_rss.py
  5. 16
      foodie_engagement_tweet.py
  6. 261
      foodie_utils.py
  7. 21
      foodie_weekly_thread.py

@ -0,0 +1,125 @@
import requests
from requests_oauthlib import OAuth1
import logging
from datetime import datetime, timezone
from dotenv import load_dotenv
import os
import time
from foodie_config import X_API_CREDENTIALS
# Load environment variables from .env file
load_dotenv()
# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# Function to delete a tweet
def delete_tweet(tweet_id, auth):
try:
response = requests.delete(f"https://api.x.com/2/tweets/{tweet_id}", auth=auth)
response.raise_for_status()
logging.info(f"Successfully deleted tweet {tweet_id}")
return True
except Exception as e:
logging.error(f"Failed to delete tweet {tweet_id}: {e}")
return False
# Function to check rate limits for a given author
def check_rate_limits_for_author(username, credentials, retry=False):
logging.info(f"{'Retrying' if retry else 'Checking'} rate limits for {username} (handle: {credentials['x_username']})")
# Retrieve OAuth 1.0a credentials for the author
consumer_key = credentials["api_key"]
consumer_secret = credentials["api_secret"]
access_token = credentials["access_token"]
access_token_secret = credentials["access_token_secret"]
# Validate credentials
if not all([consumer_key, consumer_secret, access_token, access_token_secret]):
logging.error(f"Missing OAuth credentials for {username} in X_API_CREDENTIALS.")
return None
# Set up OAuth 1.0a authentication
auth = OAuth1(consumer_key, consumer_secret, access_token, access_token_secret)
# Add delay to avoid IP-based rate limiting
logging.info(f"Waiting 5 seconds before attempting to post for {username}")
time.sleep(5)
# Try posting a test tweet to get v2 rate limit headers
tweet_id = None
try:
tweet_data = {"text": f"Test tweet to check rate limits for {username} - please ignore"}
response = requests.post("https://api.x.com/2/tweets", json=tweet_data, auth=auth)
response.raise_for_status()
tweet_id = response.json()['data']['id']
logging.info("Successfully posted test tweet for %s: %s", username, response.json())
logging.info("Response Headers for %s: %s", username, response.headers)
# Extract rate limit headers if present
app_limit = response.headers.get('x-app-limit-24hour-limit', 'N/A')
app_remaining = response.headers.get('x-app-limit-24hour-remaining', 'N/A')
app_reset = response.headers.get('x-app-limit-24hour-reset', 'N/A')
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit)
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining)
if app_reset != 'N/A':
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time)
return tweet_id
except requests.exceptions.HTTPError as e:
logging.info("Test Tweet Response Status Code for %s: %s", username, e.response.status_code)
logging.info("Test Tweet Response Headers for %s: %s", username, e.response.headers)
if e.response.status_code == 429:
logging.info("Rate Limit Exceeded for /2/tweets for %s", username)
# Extract user-specific 24-hour limits
user_limit = e.response.headers.get('x-user-limit-24hour-limit', 'N/A')
user_remaining = e.response.headers.get('x-user-limit-24hour-remaining', 'N/A')
user_reset = e.response.headers.get('x-user-limit-24hour-reset', 'N/A')
logging.info("User 24-Hour Tweet Limit for %s: %s", username, user_limit)
logging.info("User 24-Hour Tweets Remaining for %s: %s", username, user_remaining)
logging.info("User 24-Hour Reset (Timestamp) for %s: %s", username, user_reset)
if user_reset != 'N/A':
reset_time = datetime.fromtimestamp(int(user_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')
logging.info("User 24-Hour Reset (Readable) for %s: %s", username, reset_time)
# Extract app-specific 24-hour limits
app_limit = e.response.headers.get('x-app-limit-24hour-limit', 'N/A')
app_remaining = e.response.headers.get('x-app-limit-24hour-remaining', 'N/A')
app_reset = e.response.headers.get('x-app-limit-24hour-reset', 'N/A')
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit)
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining)
logging.info("App 24-Hour Reset (Timestamp) for %s: %s", username, app_reset)
if app_reset != 'N/A':
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time)
return None
except Exception as e:
logging.error("Failed to post test tweet for %s: %s", username, e)
return None
# Main loop to check rate limits for all authors
if __name__ == "__main__":
# First pass: Attempt to post for all authors
successful_tweets = {}
for username, credentials in X_API_CREDENTIALS.items():
tweet_id = check_rate_limits_for_author(username, credentials)
if tweet_id:
successful_tweets[username] = (tweet_id, credentials)
logging.info("-" * 50)
# Delete successful tweets to free up quota
for username, (tweet_id, credentials) in successful_tweets.items():
auth = OAuth1(
credentials["api_key"],
credentials["api_secret"],
credentials["access_token"],
credentials["access_token_secret"]
)
delete_tweet(tweet_id, auth)
# Second pass: Retry for authors that failed
logging.info("Retrying for authors that initially failed...")
for username, credentials in X_API_CREDENTIALS.items():
if username not in successful_tweets:
check_rate_limits_for_author(username, credentials, retry=True)
logging.info("-" * 50)

@ -272,6 +272,16 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat
attempts += 1
continue
# Check author availability before GPT calls
author = get_next_author_round_robin()
if not author:
logging.info(f"Skipping trend '{title}' due to tweet rate limits for all authors")
attempts += 1
continue
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
logging.info(f"Trying Google Trend: {title} from {source_name}")
try:
@ -319,10 +329,6 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat
final_summary = insert_link_naturally(final_summary, source_name, link)
author = get_next_author_round_robin()
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
post_data = {
"title": generate_title_from_summary(final_summary),
"content": final_summary,

@ -292,6 +292,16 @@ def curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used
attempts += 1
continue
# Check author availability before GPT calls
author = get_next_author_round_robin()
if not author:
logging.info(f"Skipping post '{title}' due to tweet rate limits for all authors")
attempts += 1
continue
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
logging.info(f"Trying Reddit Post: {title} from {source_name}")
try:
@ -339,10 +349,6 @@ def curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used
final_summary = insert_link_naturally(final_summary, source_name, link)
author = get_next_author_round_robin()
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
post_data = {
"title": generate_title_from_summary(final_summary),
"content": final_summary,

@ -253,15 +253,9 @@ def fetch_duckduckgo_news_context(title, hours=24):
logging.error(f"Failed to fetch DuckDuckGo News context for '{title}' after {MAX_RETRIES} attempts")
return title
def curate_from_rss():
def curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_images):
try:
global posted_titles_data, posted_titles, used_images
# Load JSON files once
posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS)
posted_titles = set(entry["title"] for entry in posted_titles_data)
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
logging.debug(f"Loaded {len(posted_titles)} posted titles and {len(used_images)} used images")
logging.debug(f"Using {len(posted_titles)} posted titles and {len(used_images)} used images")
articles = fetch_rss_feeds()
if not articles:
@ -283,6 +277,16 @@ def curate_from_rss():
attempts += 1
continue
# Check author availability before GPT calls
author = get_next_author_round_robin()
if not author:
logging.info(f"Skipping article '{title}' due to tweet rate limits for all authors")
attempts += 1
continue
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
logging.info(f"Trying RSS Article: {title} from {source_name}")
try:
@ -330,11 +334,6 @@ def curate_from_rss():
final_summary = insert_link_naturally(final_summary, source_name, link)
# Select author
author = get_next_author_round_robin()
author_username = author["username"]
logging.info(f"Selected author via round-robin: {author_username}")
post_data = {
"title": generate_title_from_summary(final_summary),
"content": final_summary,
@ -362,8 +361,6 @@ def curate_from_rss():
f'<a href="https://x.com/intent/tweet?url={{post_url}}&text={share_text_encoded}" target="_blank"><i class="tsi tsi-twitter"></i></a> '
f'<a href="https://www.facebook.com/sharer/sharer.php?u={{post_url}}" target="_blank"><i class="tsi tsi-facebook"></i></a></p>'
)
# Prepare post content with share links placeholder
post_data["content"] = f"{final_summary}\n\n{share_links_template}"
global is_posting
@ -426,9 +423,6 @@ def curate_from_rss():
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id}) from RSS *****")
return post_data, category, random.randint(0, 1800)
attempts += 1
logging.info(f"WP posting failed for '{post_data['title']}'")
logging.info("No interesting RSS article found after attempts")
return None, None, random.randint(600, 1800)
except Exception as e:

@ -159,14 +159,17 @@ def post_engagement_tweet():
"""Post engagement tweets for authors daily."""
try:
logging.info("Starting foodie_engagement_tweet.py")
print("Starting foodie_engagement_tweet.py")
for author in AUTHORS:
# Check if the author can post before generating the tweet
can_post, remaining, reset = check_author_rate_limit(author)
if not can_post:
reset_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(reset)) if reset else "Unknown"
logging.info(f"Skipping engagement tweet for {author['username']} due to rate limit. Remaining: {remaining}, Reset at: {reset_time}")
if check_author_rate_limit(author):
reset_time = datetime.fromtimestamp(
load_json_file('/home/shane/foodie_automator/rate_limit_info.json', default={})
.get(author['username'], {})
.get('tweet_reset', time.time()),
tz=timezone.utc
).strftime('%Y-%m-%d %H:%M:%S')
logging.info(f"Skipping engagement tweet for {author['username']} due to rate limit. Reset at: {reset_time}")
continue
try:
@ -176,7 +179,6 @@ def post_engagement_tweet():
continue
logging.info(f"Posting engagement tweet for {author['username']}: {tweet}")
print(f"Posting engagement tweet for {author['username']}: {tweet}")
if post_tweet(author, tweet):
logging.info(f"Successfully posted engagement tweet for {author['username']}")
else:
@ -186,10 +188,8 @@ def post_engagement_tweet():
continue
logging.info("Completed foodie_engagement_tweet.py")
print("Completed foodie_engagement_tweet.py")
except Exception as e:
logging.error(f"Unexpected error in post_engagement_tweet: {e}", exc_info=True)
print(f"Error in post_engagement_tweet: {e}")
def main():
"""Main function to run the script."""

@ -162,6 +162,10 @@ def generate_article_tweet(author, post, persona):
return tweet
def post_tweet(author, tweet, reply_to_id=None):
"""
Post a tweet with real-time X API rate limit checking.
Updates rate_limit_info.json with tweet-specific limits.
"""
from foodie_config import X_API_CREDENTIALS
import logging
import tweepy
@ -177,6 +181,16 @@ def post_tweet(author, tweet, reply_to_id=None):
if reply_to_id:
logging.debug(f"Replying to tweet ID: {reply_to_id}")
rate_limit_file = '/home/shane/foodie_automator/rate_limit_info.json'
rate_limit_info = load_json_file(rate_limit_file, default={})
username = author["username"]
if username not in rate_limit_info:
rate_limit_info[username] = {
'tweet_remaining': 17,
'tweet_reset': time.time()
}
try:
client = tweepy.Client(
consumer_key=credentials["api_key"],
@ -188,15 +202,32 @@ def post_tweet(author, tweet, reply_to_id=None):
text=tweet,
in_reply_to_tweet_id=reply_to_id
)
logging.info(f"Posted tweet for {author['username']} (handle: {credentials['x_username']}): {tweet}")
logging.debug(f"Tweet ID: {response.data['id']}")
return {"id": response.data["id"]}
tweet_id = response.data['id']
logging.info(f"Successfully posted tweet {tweet_id} for {author['username']} (handle: {credentials['x_username']}): {tweet}")
# Update tweet rate limits (local decrement, headers on 429)
rate_limit_info[username]['tweet_remaining'] = max(0, rate_limit_info[username]['tweet_remaining'] - 1)
save_json_file(rate_limit_file, rate_limit_info)
logging.info(f"Updated tweet rate limit for {username}: {rate_limit_info[username]['tweet_remaining']} remaining, reset at {datetime.fromtimestamp(rate_limit_info[username]['tweet_reset'], tz=timezone.utc)}")
return {"id": tweet_id}
except tweepy.TweepyException as e:
logging.error(f"Failed to post tweet for {author['username']} (handle: {credentials['x_username']}): {e}")
if hasattr(e, 'response') and e.response:
logging.error(f"Twitter API response: {e.response.text}")
if "forbidden" in str(e).lower():
logging.error(f"Possible causes: invalid credentials, insufficient permissions, or account restrictions for {credentials['x_username']}")
if hasattr(e, 'response') and e.response and e.response.status_code == 429:
headers = e.response.headers
user_remaining = headers.get('x-user-limit-24hour-remaining', 0)
user_reset = headers.get('x-user-limit-24hour-reset', time.time() + 86400)
try:
user_remaining = int(user_remaining)
user_reset = int(user_reset)
except (ValueError, TypeError):
user_remaining = 0
user_reset = time.time() + 86400
rate_limit_info[username]['tweet_remaining'] = user_remaining
rate_limit_info[username]['tweet_reset'] = user_reset
save_json_file(rate_limit_file, rate_limit_info)
logging.info(f"Rate limit exceeded for {username}: {user_remaining} remaining, reset at {datetime.fromtimestamp(user_reset, tz=timezone.utc)}")
return False
except Exception as e:
logging.error(f"Unexpected error posting tweet for {author['username']} (handle: {credentials['x_username']}): {e}", exc_info=True)
@ -681,141 +712,64 @@ def get_wp_tag_id(tag_name, wp_base_url, wp_username, wp_password):
logging.error(f"Failed to get WP tag ID for '{tag_name}': {e}")
return None
def post_to_wp(post_data, category, link, author, image_url, original_source, image_source="Pixabay", uploader=None, page_url=None, interest_score=4, post_id=None, should_post_tweet=True):
wp_base_url = "https://insiderfoodie.com/wp-json/wp/v2"
logging.info(f"Starting post_to_wp for '{post_data['title']}', image_source: {image_source}")
if not isinstance(author, dict) or "username" not in author or "password" not in author:
raise ValueError(f"Invalid author data: {author}. Expected a dictionary with 'username' and 'password' keys.")
wp_username = author["username"]
wp_password = author["password"]
if not isinstance(interest_score, int):
logging.error(f"Invalid interest_score type: {type(interest_score)}, value: '{interest_score}'. Defaulting to 4.")
interest_score = 4
elif interest_score < 0 or interest_score > 10:
logging.warning(f"interest_score out of valid range (0-10): {interest_score}. Clamping to 4.")
interest_score = min(max(interest_score, 0), 10)
try:
headers = {
"Authorization": f"Basic {base64.b64encode(f'{wp_username}:{wp_password}'.encode()).decode()}",
"Content-Type": "application/json"
}
auth_test = requests.get(f"{wp_base_url}/users/me", headers=headers)
auth_test.raise_for_status()
logging.info(f"Auth test passed for {wp_username}: {auth_test.json()['id']}")
def post_to_wp(post_data, category, link, author, image_url, original_source, image_source, uploader, page_url, interest_score, post_id=None, should_post_tweet=True):
"""
Post or update content to WordPress, optionally tweeting the post.
"""
import logging
import requests
from foodie_config import WP_CREDENTIALS, X_API_CREDENTIALS
category_id = get_wp_category_id(category, wp_base_url, wp_username, wp_password)
if not category_id:
category_id = create_wp_category(category, wp_base_url, wp_username, wp_password)
logging.info(f"Created new category '{category}' with ID {category_id}")
else:
logging.info(f"Found existing category '{category}' with ID {category_id}")
tags = [1]
if interest_score >= 9:
picks_tag_id = get_wp_tag_id("Picks", wp_base_url, wp_username, wp_password)
if picks_tag_id and picks_tag_id not in tags:
tags.append(picks_tag_id)
logging.info(f"Added 'Picks' tag (ID: {picks_tag_id}) to post due to high interest score: {interest_score}")
content = post_data["content"]
if content is None:
logging.error(f"Post content is None for title '{post_data['title']}' - using fallback")
content = "Content unavailable. Check the original source for details."
formatted_content = "\n".join(f"<p>{para}</p>" for para in content.split('\n') if para.strip())
author_id_map = {
"owenjohnson": 10,
"javiermorales": 2,
"aishapatel": 3,
"trangnguyen": 12,
"keishareid": 13,
"lilamoreau": 7
}
author_id = author_id_map.get(author["username"], 5)
image_id = None
if image_url:
logging.info(f"Attempting image upload for '{post_data['title']}', URL: {image_url}, source: {image_source}")
image_id = upload_image_to_wp(image_url, post_data["title"], wp_base_url, wp_username, wp_password, image_source, uploader, page_url)
if not image_id:
logging.info(f"Flickr upload failed for '{post_data['title']}', falling back to Pixabay")
pixabay_query = post_data["title"][:50]
image_url, image_source, uploader, page_url = get_image(pixabay_query)
if image_url:
image_id = upload_image_to_wp(image_url, post_data["title"], wp_base_url, wp_username, wp_password, image_source, uploader, page_url)
if not image_id:
logging.warning(f"All image uploads failed for '{post_data['title']}' - posting without image")
payload = {
"title": post_data["title"],
"content": formatted_content,
"status": "publish",
"categories": [category_id],
"tags": tags,
"author": author_id,
"meta": {
"original_link": link,
"original_source": original_source,
"interest_score": interest_score
}
}
logger = logging.getLogger(__name__)
wp_username = WP_CREDENTIALS["username"]
wp_password = WP_CREDENTIALS["password"]
if image_id:
payload["featured_media"] = image_id
logging.info(f"Set featured image for post '{post_data['title']}': Media ID={image_id}")
endpoint = f"{WP_CREDENTIALS['url']}/wp-json/wp/v2/posts"
if post_id:
endpoint += f"/{post_id}"
endpoint = f"{wp_base_url}/posts/{post_id}" if post_id else f"{wp_base_url}/posts"
method = requests.post
headers = {
"Authorization": "Basic " + base64.b64encode(f"{wp_username}:{wp_password}".encode()).decode(),
"Content-Type": "application/json"
}
logging.debug(f"Sending WP request to {endpoint} with payload: {json.dumps(payload, indent=2)}")
payload = {
"title": post_data["title"],
"content": post_data["content"],
"status": post_data["status"],
"author": WP_CREDENTIALS["authors"].get(post_data["author"], 1),
"categories": [category]
}
response = method(endpoint, headers=headers, json=payload)
try:
response = requests.post(endpoint, headers=headers, json=payload)
response.raise_for_status()
post_info = response.json()
logging.debug(f"WP response: {json.dumps(post_info, indent=2)}")
if not isinstance(post_info, dict) or "id" not in post_info:
raise ValueError(f"Invalid WP response: {post_info}")
post_id = post_info["id"]
post_url = post_info["link"]
# Save to recent_posts.json only on initial post, not updates
if not post_id:
timestamp = datetime.now(timezone.utc).isoformat()
save_post_to_recent(post_data["title"], post_url, author["username"], timestamp)
if should_post_tweet:
try:
post = {"title": post_data["title"], "url": post_url}
tweet = generate_article_tweet(author, post, author["persona"])
if post_tweet(author, tweet):
logging.info(f"Successfully posted article tweet for {author['username']} on X")
post_id = response.json().get("id")
post_url = response.json().get("link")
logger.info(f"{'Updated' if post_id else 'Posted'} WordPress post: {post_data['title']} (ID: {post_id})")
if image_url and not post_id: # Only upload image for new posts
media_id = upload_image_to_wp(image_url, post_data["title"], image_source, uploader, page_url)
if media_id:
requests.post(
f"{WP_CREDENTIALS['url']}/wp-json/wp/v2/posts/{post_id}",
headers=headers,
json={"featured_media": media_id}
)
logger.info(f"Set featured image (Media ID: {media_id}) for post {post_id}")
if should_post_tweet and post_url:
credentials = X_API_CREDENTIALS.get(post_data["author"])
if credentials:
tweet_text = f"{post_data['title']}\n{post_url}"
if post_tweet(author, tweet_text): # Updated signature
logger.info(f"Successfully tweeted for post: {post_data['title']}")
else:
logging.warning(f"Failed to post article tweet for {author['username']} on X")
except Exception as e:
logging.error(f"Error posting article tweet for {author['username']}: {e}")
logger.warning(f"Failed to tweet for post: {post_data['title']}")
logging.info(f"Posted/Updated by {author['username']}: {post_data['title']} (ID: {post_id})")
return post_id, post_url
except requests.exceptions.RequestException as e:
logging.error(f"WP API request failed: {e} - Response: {e.response.text if e.response else 'No response'}")
print(f"WP Error: {e}")
return None, None
except KeyError as e:
logging.error(f"WP payload error - Missing key: {e} - Author data: {author}")
print(f"WP Error: {e}")
return None, None
except Exception as e:
logging.error(f"WP posting failed: {e}")
print(f"WP Error: {e}")
logger.error(f"Failed to {'update' if post_id else 'post'} WordPress post: {post_data['title']}: {e}", exc_info=True)
return None, None
# Configure Flickr API with credentials
@ -1125,46 +1079,44 @@ def check_rate_limit(response):
logging.warning(f"Failed to parse rate limit headers: {e}")
return None, None
def check_author_rate_limit(author, max_requests=10, window_seconds=3600):
def check_author_rate_limit(author, max_tweets=17, tweet_window_seconds=86400):
"""
Check if an author is rate-limited.
Check if an author is rate-limited for tweets based on X API limits.
"""
logger = logging.getLogger(__name__)
rate_limit_file = '/home/shane/foodie_automator/rate_limit_info.json'
rate_limit_info = load_json_file(rate_limit_file, default={})
username = author['username']
if username not in rate_limit_info or not isinstance(rate_limit_info[username].get('reset'), (int, float)):
if username not in rate_limit_info or not isinstance(rate_limit_info[username].get('tweet_reset'), (int, float)):
rate_limit_info[username] = {
'remaining': max_requests,
'reset': time.time()
'tweet_remaining': max_tweets,
'tweet_reset': time.time()
}
logger.info(f"Initialized rate limit for {username}: {max_requests} requests available")
logger.info(f"Initialized tweet rate limit for {username}: {max_tweets} tweets available")
info = rate_limit_info[username]
current_time = time.time()
# Reset if window expired or timestamp is invalid (e.g., 1970)
if current_time >= info['reset'] or info['reset'] < 1000000000: # 1000000000 is ~2001
info['remaining'] = max_requests
info['reset'] = current_time + window_seconds
logger.info(f"Reset rate limit for {username}: {max_requests} requests available")
# Reset tweet limits if window expired or invalid
if current_time >= info.get('tweet_reset', 0) or info.get('tweet_reset', 0) < 1000000000:
info['tweet_remaining'] = max_tweets
info['tweet_reset'] = current_time + tweet_window_seconds
logger.info(f"Reset tweet rate limit for {username}: {max_tweets} tweets available")
save_json_file(rate_limit_file, rate_limit_info)
if info['remaining'] <= 0:
reset_time = datetime.fromtimestamp(info['reset'], tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"Author {username} is rate-limited. Remaining: {info['remaining']}, Reset at: {reset_time}")
if info.get('tweet_remaining', 0) <= 0:
reset_time = datetime.fromtimestamp(info['tweet_reset'], tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
logger.info(f"Author {username} is tweet rate-limited. Remaining: {info['tweet_remaining']}, Reset at: {reset_time}")
return True
# Decrement remaining requests
info['remaining'] -= 1
save_json_file(rate_limit_file, rate_limit_info)
logger.info(f"Updated rate limit for {username}: {info['remaining']} requests remaining")
logger.info(f"Tweet rate limit for {username}: {info['tweet_remaining']} tweets remaining")
return False
def get_next_author_round_robin():
"""
Select the next author using round-robin, respecting rate limits.
Select the next author using round-robin, respecting tweet rate limits.
Returns None if no author is available.
"""
from foodie_config import AUTHORS
global round_robin_index
@ -1178,11 +1130,8 @@ def get_next_author_round_robin():
logger.info(f"Selected author via round-robin: {author['username']}")
return author
logger.warning("No authors available due to rate limits. Selecting a random author as fallback.")
import random
author = random.choice(AUTHORS)
logger.info(f"Selected author via random fallback: {author['username']}")
return author
logger.warning("No authors available due to tweet rate limits.")
return None
def prepare_post_data(summary, title, main_topic=None):
try:

@ -319,10 +319,14 @@ def post_weekly_thread():
continue
# Check if the author can post before generating the thread
can_post, remaining, reset = check_author_rate_limit(author)
if not can_post:
reset_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(reset)) if reset else "Unknown"
logging.info(f"Skipping weekly thread for {username} due to rate limit. Remaining: {remaining}, Reset at: {reset_time}")
if check_author_rate_limit(author):
reset_time = datetime.fromtimestamp(
load_json_file('/home/shane/foodie_automator/rate_limit_info.json', default={})
.get(username, {})
.get('tweet_reset', time.time()),
tz=timezone.utc
).strftime('%Y-%m-%d %H:%M:%S')
logging.info(f"Skipping weekly thread for {username} due to rate limit. Reset at: {reset_time}")
continue
# Select top 2 posts (to fit within 3-tweet limit: lead + 2 posts)
@ -359,12 +363,17 @@ def post_weekly_thread():
# Post final CTA tweet
final_cta = generate_final_cta(author)
if final_cta:
post_tweet(author, final_cta, reply_to_id=lead_tweet_id)
logging.info(f"Posted final CTA tweet for {username}: {final_cta}")
cta_response = post_tweet(author, final_cta, reply_to_id=lead_tweet_id)
if cta_response:
logging.info(f"Posted final CTA tweet for {username}: {final_cta}")
else:
logging.warning(f"Failed to post final CTA tweet for {username}")
except Exception as e:
logging.error(f"Error posting thread for {username}: {e}", exc_info=True)
continue
logging.info("Completed foodie_weekly_thread.py")
def main():
"""Main function to run the script."""
lock_fd = None

Loading…
Cancel
Save