Compare commits
142 Commits
my-fix-bra
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
23c6c42c51 | 7 months ago |
|
|
e2e5adbff5 | 7 months ago |
|
|
3fc99a8a28 | 7 months ago |
|
|
eb27a036c4 | 7 months ago |
|
|
3cd0a9cfb6 | 7 months ago |
|
|
f28d529ac9 | 7 months ago |
|
|
b0f11666d3 | 7 months ago |
|
|
889254d151 | 7 months ago |
|
|
e53f3abc1d | 7 months ago |
|
|
055544b111 | 7 months ago |
|
|
e9913ab659 | 7 months ago |
|
|
555fe4799f | 7 months ago |
|
|
0a333f5be5 | 7 months ago |
|
|
f98340bff6 | 7 months ago |
|
|
fc47142a2c | 7 months ago |
|
|
470c775d7a | 7 months ago |
|
|
a193dbacd7 | 7 months ago |
|
|
b95952563f | 7 months ago |
|
|
c30fa1108d | 7 months ago |
|
|
b77212d88f | 7 months ago |
|
|
827adb4730 | 7 months ago |
|
|
f0c84f8660 | 7 months ago |
|
|
4ffcebd288 | 7 months ago |
|
|
491dcc8883 | 7 months ago |
|
|
46c86fc82d | 7 months ago |
|
|
04a219ed8a | 7 months ago |
|
|
b8ab6dded7 | 7 months ago |
|
|
1010a8cb2a | 7 months ago |
|
|
10f918ae24 | 7 months ago |
|
|
49835f351c | 7 months ago |
|
|
e72a3673fa | 7 months ago |
|
|
7c7c9a7b0a | 7 months ago |
|
|
eff6f585bb | 7 months ago |
|
|
05f2dfed06 | 7 months ago |
|
|
c7ccf8aed4 | 7 months ago |
|
|
54314609d8 | 7 months ago |
|
|
66fab42c73 | 7 months ago |
|
|
173897d6eb | 7 months ago |
|
|
7833cf443a | 7 months ago |
|
|
d9da9af095 | 7 months ago |
|
|
33287c8a4e | 7 months ago |
|
|
f7b84c5de8 | 7 months ago |
|
|
69eaed4464 | 7 months ago |
|
|
12b389fe2b | 7 months ago |
|
|
9c15c1b658 | 7 months ago |
|
|
12383c6d4e | 7 months ago |
|
|
964e6d1816 | 7 months ago |
|
|
599d352cbd | 7 months ago |
|
|
f47a9f1249 | 7 months ago |
|
|
a7e7a5dad4 | 7 months ago |
|
|
7d2b4938d0 | 7 months ago |
|
|
3edc8135f3 | 7 months ago |
|
|
5f38374abd | 7 months ago |
|
|
6e0f8b4759 | 7 months ago |
|
|
9870d276a3 | 7 months ago |
|
|
3b1b030025 | 7 months ago |
|
|
5cd45cf67f | 7 months ago |
|
|
a130c65edf | 7 months ago |
|
|
c36eac7587 | 7 months ago |
|
|
d54e640644 | 7 months ago |
|
|
2554693895 | 7 months ago |
|
|
c89a9df6e2 | 7 months ago |
|
|
692811190e | 7 months ago |
|
|
37f9fdcc44 | 7 months ago |
|
|
aa27d344af | 7 months ago |
|
|
e974bd1262 | 7 months ago |
|
|
8a24a93878 | 7 months ago |
|
|
6346e29b8f | 7 months ago |
|
|
ba8d54e0fe | 7 months ago |
|
|
4be19ef116 | 7 months ago |
|
|
68b2459da4 | 7 months ago |
|
|
8c7049fa4c | 7 months ago |
|
|
e972714ada | 7 months ago |
|
|
941fe12ec5 | 7 months ago |
|
|
765967fb8c | 7 months ago |
|
|
eeff0d9861 | 7 months ago |
|
|
07a68837a3 | 7 months ago |
|
|
b5417f3397 | 7 months ago |
|
|
677c9b646d | 7 months ago |
|
|
ee21e5bf6b | 7 months ago |
|
|
55d2cf81e4 | 7 months ago |
|
|
071726f016 | 7 months ago |
|
|
7c69b4a451 | 7 months ago |
|
|
7dafac8615 | 7 months ago |
|
|
903dbf21d0 | 7 months ago |
|
|
9806ecfa25 | 7 months ago |
|
|
e2fec73a72 | 7 months ago |
|
|
7950ddd0d8 | 7 months ago |
|
|
1d4fe844c3 | 7 months ago |
|
|
5561516481 | 7 months ago |
|
|
fb3adcdc4e | 7 months ago |
|
|
99403e7cfe | 7 months ago |
|
|
aa7d3aacbd | 7 months ago |
|
|
d7593f7fa7 | 7 months ago |
|
|
00e6354cff | 7 months ago |
|
|
532dd30f65 | 7 months ago |
|
|
ac50299b94 | 7 months ago |
|
|
83e69a35b7 | 7 months ago |
|
|
5ea9f20dd8 | 7 months ago |
|
|
82f4a1d8b1 | 7 months ago |
|
|
ae194b502f | 7 months ago |
|
|
c97425f5e2 | 7 months ago |
|
|
447bfb0087 | 7 months ago |
|
|
01bab56eb6 | 7 months ago |
|
|
bfeec7a560 | 7 months ago |
|
|
5f03aabde4 | 7 months ago |
|
|
753934db4f | 7 months ago |
|
|
167506ef30 | 7 months ago |
|
|
3405572ab0 | 7 months ago |
|
|
2158c780ca | 7 months ago |
|
|
4adaa3442c | 7 months ago |
|
|
e7a06e3375 | 7 months ago |
|
|
a407ece36b | 7 months ago |
|
|
dd4eeaed10 | 7 months ago |
|
|
77743121b5 | 7 months ago |
|
|
2041084962 | 7 months ago |
|
|
8825d7a9f8 | 7 months ago |
|
|
f6ab7e78d3 | 7 months ago |
|
|
4da83f1d4b | 7 months ago |
|
|
ad21bac601 | 7 months ago |
|
|
dbe76795c2 | 7 months ago |
|
|
4368bf68a5 | 7 months ago |
|
|
028dfc3fc8 | 7 months ago |
|
|
331979ca9e | 7 months ago |
|
|
1091ed34c2 | 7 months ago |
|
|
9a091a4fa4 | 7 months ago |
|
|
79f357269d | 7 months ago |
|
|
256b6c8bad | 7 months ago |
|
|
61b3de52a2 | 7 months ago |
|
|
4116d5f742 | 7 months ago |
|
|
2ecab209c5 | 7 months ago |
|
|
3d0d320648 | 7 months ago |
|
|
504d7f6349 | 7 months ago |
|
|
ccddefbc8b | 7 months ago |
|
|
d2022222c3 | 7 months ago |
|
|
7fba0fe96a | 7 months ago |
|
|
6be8493878 | 7 months ago |
|
|
e445b6ef33 | 7 months ago |
|
|
5554abdc4a | 7 months ago |
|
|
64d17d5599 | 7 months ago |
|
|
aa0f3364d5 | 7 months ago |
|
|
e5ebd000fe | 7 months ago |
13 changed files with 3904 additions and 1479 deletions
@ -0,0 +1,129 @@ |
|||||||
|
import logging |
||||||
|
|
||||||
|
logging.basicConfig( |
||||||
|
filename='/home/shane/foodie_automator/logs/check_x_capacity.log', |
||||||
|
level=logging.DEBUG, |
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s' |
||||||
|
) |
||||||
|
|
||||||
|
import requests |
||||||
|
from requests_oauthlib import OAuth1 |
||||||
|
from datetime import datetime, timezone |
||||||
|
from dotenv import load_dotenv |
||||||
|
import os |
||||||
|
import time |
||||||
|
from foodie_config import X_API_CREDENTIALS |
||||||
|
|
||||||
|
# Load environment variables from .env file |
||||||
|
load_dotenv() |
||||||
|
|
||||||
|
# Function to delete a tweet |
||||||
|
def delete_tweet(tweet_id, auth): |
||||||
|
try: |
||||||
|
response = requests.delete(f"https://api.x.com/2/tweets/{tweet_id}", auth=auth) |
||||||
|
response.raise_for_status() |
||||||
|
logging.info(f"Successfully deleted tweet {tweet_id}") |
||||||
|
return True |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to delete tweet {tweet_id}: {e}") |
||||||
|
return False |
||||||
|
|
||||||
|
# Function to check rate limits for a given author |
||||||
|
def check_rate_limits_for_author(username, credentials, retry=False): |
||||||
|
logging.info(f"{'Retrying' if retry else 'Checking'} rate limits for {username} (handle: {credentials['x_username']})") |
||||||
|
|
||||||
|
# Retrieve OAuth 1.0a credentials for the author |
||||||
|
consumer_key = credentials["api_key"] |
||||||
|
consumer_secret = credentials["api_secret"] |
||||||
|
access_token = credentials["access_token"] |
||||||
|
access_token_secret = credentials["access_token_secret"] |
||||||
|
|
||||||
|
# Validate credentials |
||||||
|
if not all([consumer_key, consumer_secret, access_token, access_token_secret]): |
||||||
|
logging.error(f"Missing OAuth credentials for {username} in X_API_CREDENTIALS.") |
||||||
|
return None |
||||||
|
|
||||||
|
# Set up OAuth 1.0a authentication |
||||||
|
auth = OAuth1(consumer_key, consumer_secret, access_token, access_token_secret) |
||||||
|
|
||||||
|
# Add delay to avoid IP-based rate limiting |
||||||
|
logging.info(f"Waiting 5 seconds before attempting to post for {username}") |
||||||
|
time.sleep(5) |
||||||
|
|
||||||
|
# Try posting a test tweet to get v2 rate limit headers |
||||||
|
tweet_id = None |
||||||
|
try: |
||||||
|
tweet_data = {"text": f"Test tweet to check rate limits for {username} - please ignore"} |
||||||
|
response = requests.post("https://api.x.com/2/tweets", json=tweet_data, auth=auth) |
||||||
|
response.raise_for_status() |
||||||
|
tweet_id = response.json()['data']['id'] |
||||||
|
logging.info("Successfully posted test tweet for %s: %s", username, response.json()) |
||||||
|
logging.info("Response Headers for %s: %s", username, response.headers) |
||||||
|
# Extract rate limit headers if present |
||||||
|
app_limit = response.headers.get('x-app-limit-24hour-limit', 'N/A') |
||||||
|
app_remaining = response.headers.get('x-app-limit-24hour-remaining', 'N/A') |
||||||
|
app_reset = response.headers.get('x-app-limit-24hour-reset', 'N/A') |
||||||
|
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit) |
||||||
|
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining) |
||||||
|
if app_reset != 'N/A': |
||||||
|
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
||||||
|
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
||||||
|
return tweet_id |
||||||
|
except requests.exceptions.HTTPError as e: |
||||||
|
logging.info("Test Tweet Response Status Code for %s: %s", username, e.response.status_code) |
||||||
|
logging.info("Test Tweet Response Headers for %s: %s", username, e.response.headers) |
||||||
|
if e.response.status_code == 429: |
||||||
|
logging.info("Rate Limit Exceeded for /2/tweets for %s", username) |
||||||
|
|
||||||
|
# Extract user-specific 24-hour limits |
||||||
|
user_limit = e.response.headers.get('x-user-limit-24hour-limit', 'N/A') |
||||||
|
user_remaining = e.response.headers.get('x-user-limit-24hour-remaining', 'N/A') |
||||||
|
user_reset = e.response.headers.get('x-user-limit-24hour-reset', 'N/A') |
||||||
|
logging.info("User 24-Hour Tweet Limit for %s: %s", username, user_limit) |
||||||
|
logging.info("User 24-Hour Tweets Remaining for %s: %s", username, user_remaining) |
||||||
|
logging.info("User 24-Hour Reset (Timestamp) for %s: %s", username, user_reset) |
||||||
|
if user_reset != 'N/A': |
||||||
|
reset_time = datetime.fromtimestamp(int(user_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
||||||
|
logging.info("User 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
||||||
|
|
||||||
|
# Extract app-specific 24-hour limits |
||||||
|
app_limit = e.response.headers.get('x-app-limit-24hour-limit', 'N/A') |
||||||
|
app_remaining = e.response.headers.get('x-app-limit-24hour-remaining', 'N/A') |
||||||
|
app_reset = e.response.headers.get('x-app-limit-24hour-reset', 'N/A') |
||||||
|
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit) |
||||||
|
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining) |
||||||
|
logging.info("App 24-Hour Reset (Timestamp) for %s: %s", username, app_reset) |
||||||
|
if app_reset != 'N/A': |
||||||
|
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
||||||
|
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
||||||
|
return None |
||||||
|
except Exception as e: |
||||||
|
logging.error("Failed to post test tweet for %s: %s", username, e) |
||||||
|
return None |
||||||
|
|
||||||
|
# Main loop to check rate limits for all authors |
||||||
|
if __name__ == "__main__": |
||||||
|
# First pass: Attempt to post for all authors |
||||||
|
successful_tweets = {} |
||||||
|
for username, credentials in X_API_CREDENTIALS.items(): |
||||||
|
tweet_id = check_rate_limits_for_author(username, credentials) |
||||||
|
if tweet_id: |
||||||
|
successful_tweets[username] = (tweet_id, credentials) |
||||||
|
logging.info("-" * 50) |
||||||
|
|
||||||
|
# Delete successful tweets to free up quota |
||||||
|
for username, (tweet_id, credentials) in successful_tweets.items(): |
||||||
|
auth = OAuth1( |
||||||
|
credentials["api_key"], |
||||||
|
credentials["api_secret"], |
||||||
|
credentials["access_token"], |
||||||
|
credentials["access_token_secret"] |
||||||
|
) |
||||||
|
delete_tweet(tweet_id, auth) |
||||||
|
|
||||||
|
# Second pass: Retry for authors that failed |
||||||
|
logging.info("Retrying for authors that initially failed...") |
||||||
|
for username, credentials in X_API_CREDENTIALS.items(): |
||||||
|
if username not in successful_tweets: |
||||||
|
check_rate_limits_for_author(username, credentials, retry=True) |
||||||
|
logging.info("-" * 50) |
||||||
@ -0,0 +1,236 @@ |
|||||||
|
# foodie_engagement_generator.py |
||||||
|
import json |
||||||
|
import logging |
||||||
|
import random |
||||||
|
import signal |
||||||
|
import sys |
||||||
|
import fcntl |
||||||
|
import os |
||||||
|
import time |
||||||
|
from datetime import datetime, timedelta, timezone |
||||||
|
from openai import OpenAI |
||||||
|
from foodie_utils import AUTHORS, SUMMARY_MODEL, load_json_file, save_json_file, update_system_activity |
||||||
|
from foodie_config import X_API_CREDENTIALS, AUTHOR_BACKGROUNDS_FILE |
||||||
|
from dotenv import load_dotenv |
||||||
|
|
||||||
|
load_dotenv() |
||||||
|
|
||||||
|
SCRIPT_NAME = "foodie_engagement_generator" |
||||||
|
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_engagement_generator.lock" |
||||||
|
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_engagement_generator.log" |
||||||
|
ENGAGEMENT_TWEETS_FILE = "/home/shane/foodie_automator/engagement_tweets.json" |
||||||
|
LOG_PRUNE_DAYS = 30 |
||||||
|
MAX_RETRIES = 3 |
||||||
|
RETRY_BACKOFF = 2 |
||||||
|
|
||||||
|
def setup_logging(): |
||||||
|
"""Initialize logging with pruning of old logs.""" |
||||||
|
try: |
||||||
|
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True) |
||||||
|
if os.path.exists(LOG_FILE): |
||||||
|
with open(LOG_FILE, 'r') as f: |
||||||
|
lines = f.readlines() |
||||||
|
cutoff = datetime.now(timezone.utc) - timedelta(days=LOG_PRUNE_DAYS) |
||||||
|
pruned_lines = [] |
||||||
|
malformed_count = 0 |
||||||
|
for line in lines: |
||||||
|
if len(line) < 19 or not line[:19].replace('-', '').replace(':', '').replace(' ', '').isdigit(): |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
try: |
||||||
|
timestamp = datetime.strptime(line[:19], '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) |
||||||
|
if timestamp > cutoff: |
||||||
|
pruned_lines.append(line) |
||||||
|
except ValueError: |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
if malformed_count > 0: |
||||||
|
logging.info(f"Skipped {malformed_count} malformed log lines during pruning") |
||||||
|
with open(LOG_FILE, 'w') as f: |
||||||
|
f.writelines(pruned_lines) |
||||||
|
|
||||||
|
logging.basicConfig( |
||||||
|
filename=LOG_FILE, |
||||||
|
level=logging.INFO, |
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s', |
||||||
|
datefmt='%Y-%m-%d %H:%M:%S' |
||||||
|
) |
||||||
|
console_handler = logging.StreamHandler() |
||||||
|
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) |
||||||
|
logging.getLogger().addHandler(console_handler) |
||||||
|
logging.getLogger("openai").setLevel(logging.WARNING) |
||||||
|
logging.info("Logging initialized for foodie_engagement_generator.py") |
||||||
|
except Exception as e: |
||||||
|
print(f"Failed to setup logging: {e}") |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def acquire_lock(): |
||||||
|
"""Acquire a lock to prevent concurrent runs.""" |
||||||
|
os.makedirs(os.path.dirname(LOCK_FILE), exist_ok=True) |
||||||
|
lock_fd = open(LOCK_FILE, 'w') |
||||||
|
try: |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) |
||||||
|
lock_fd.write(str(os.getpid())) |
||||||
|
lock_fd.flush() |
||||||
|
return lock_fd |
||||||
|
except IOError: |
||||||
|
logging.info("Another instance of foodie_engagement_generator.py is running") |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
def signal_handler(sig, frame): |
||||||
|
"""Handle termination signals gracefully.""" |
||||||
|
logging.info("Received termination signal, marking script as stopped...") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler) |
||||||
|
signal.signal(signal.SIGINT, signal_handler) |
||||||
|
|
||||||
|
# Initialize OpenAI client |
||||||
|
try: |
||||||
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
||||||
|
if not os.getenv("OPENAI_API_KEY"): |
||||||
|
logging.error("OPENAI_API_KEY is not set in environment variables") |
||||||
|
raise ValueError("OPENAI_API_KEY is required") |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to initialize OpenAI client: {e}", exc_info=True) |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
# Load author backgrounds |
||||||
|
try: |
||||||
|
with open(AUTHOR_BACKGROUNDS_FILE, 'r') as f: |
||||||
|
AUTHOR_BACKGROUNDS = json.load(f) |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to load author_backgrounds.json: {e}", exc_info=True) |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def generate_engagement_tweet(author): |
||||||
|
"""Generate an engagement tweet using author background themes.""" |
||||||
|
credentials = X_API_CREDENTIALS.get(author["username"]) |
||||||
|
if not credentials: |
||||||
|
logging.error(f"No X credentials found for {author['username']}") |
||||||
|
return None |
||||||
|
author_handle = credentials["x_username"] |
||||||
|
|
||||||
|
background = next((bg for bg in AUTHOR_BACKGROUNDS if bg["username"] == author["username"]), {}) |
||||||
|
if not background or "engagement_themes" not in background: |
||||||
|
logging.warning(f"No background or engagement themes found for {author['username']}") |
||||||
|
theme = "food trends" |
||||||
|
else: |
||||||
|
theme = random.choice(background["engagement_themes"]) |
||||||
|
|
||||||
|
prompt = ( |
||||||
|
f"Generate a concise tweet (under 230 characters) for {author_handle}. " |
||||||
|
f"Create an engaging question or statement about {theme} to spark interaction. " |
||||||
|
f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. " |
||||||
|
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " |
||||||
|
f"Do not include emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
||||||
|
) |
||||||
|
|
||||||
|
for attempt in range(MAX_RETRIES): |
||||||
|
try: |
||||||
|
response = client.chat.completions.create( |
||||||
|
model=SUMMARY_MODEL, |
||||||
|
messages=[ |
||||||
|
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
||||||
|
{"role": "user", "content": prompt} |
||||||
|
], |
||||||
|
max_tokens=100, |
||||||
|
temperature=0.7 |
||||||
|
) |
||||||
|
tweet = response.choices[0].message.content.strip() |
||||||
|
if len(tweet) > 280: |
||||||
|
tweet = tweet[:277] + "..." |
||||||
|
logging.debug(f"Generated engagement tweet: {tweet}") |
||||||
|
return tweet |
||||||
|
except Exception as e: |
||||||
|
logging.warning(f"Failed to generate engagement tweet for {author['username']} (attempt {attempt + 1}): {e}") |
||||||
|
if attempt < MAX_RETRIES - 1: |
||||||
|
time.sleep(RETRY_BACKOFF * (2 ** attempt)) |
||||||
|
else: |
||||||
|
logging.error(f"Failed to generate engagement tweet after {MAX_RETRIES} attempts") |
||||||
|
engagement_templates = [ |
||||||
|
f"What's the most mouthwatering {theme} you've seen this week? Share below and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"{theme.capitalize()} lovers unite! What's your go-to pick? Tell us and like this tweet for more from {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"Ever tried a {theme} that blew your mind? Share your favorites and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"What {theme} trend are you loving right now? Let us know and like this tweet to keep up with {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com" |
||||||
|
] |
||||||
|
template = random.choice(engagement_templates) |
||||||
|
logging.info(f"Using fallback engagement tweet: {template}") |
||||||
|
return template |
||||||
|
|
||||||
|
def generate_engagement_tweets(): |
||||||
|
"""Generate engagement tweets for authors and save to file.""" |
||||||
|
try: |
||||||
|
logging.info("Starting foodie_engagement_generator.py") |
||||||
|
tweets = [] |
||||||
|
timestamp = datetime.now(timezone.utc).isoformat() |
||||||
|
|
||||||
|
for author in AUTHORS: |
||||||
|
try: |
||||||
|
tweet = generate_engagement_tweet(author) |
||||||
|
if not tweet: |
||||||
|
logging.error(f"Failed to generate engagement tweet for {author['username']}, skipping") |
||||||
|
continue |
||||||
|
|
||||||
|
# Collect tweet data |
||||||
|
tweet_data = { |
||||||
|
"username": author["username"], |
||||||
|
"x_handle": X_API_CREDENTIALS[author["username"]]["x_username"], |
||||||
|
"tweet": tweet, |
||||||
|
"timestamp": timestamp |
||||||
|
} |
||||||
|
tweets.append(tweet_data) |
||||||
|
logging.info(f"Generated engagement tweet for {author['username']}: {tweet}") |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Error generating engagement tweet for {author['username']}: {e}", exc_info=True) |
||||||
|
continue |
||||||
|
|
||||||
|
# Save tweets to file, overwriting any existing content |
||||||
|
if tweets: |
||||||
|
try: |
||||||
|
tweet_data = { |
||||||
|
"timestamp": timestamp, |
||||||
|
"tweets": tweets |
||||||
|
} |
||||||
|
save_json_file(ENGAGEMENT_TWEETS_FILE, tweet_data) |
||||||
|
logging.info(f"Saved {len(tweets)} engagement tweets to {ENGAGEMENT_TWEETS_FILE}") |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to save engagement tweets to {ENGAGEMENT_TWEETS_FILE}: {e}") |
||||||
|
else: |
||||||
|
logging.warning("No engagement tweets generated, nothing to save") |
||||||
|
|
||||||
|
logging.info("Completed foodie_engagement_generator.py") |
||||||
|
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
||||||
|
return True, sleep_time |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Unexpected error in generate_engagement_tweets: {e}", exc_info=True) |
||||||
|
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
||||||
|
return False, sleep_time |
||||||
|
|
||||||
|
def main(): |
||||||
|
"""Main function to run the script.""" |
||||||
|
lock_fd = None |
||||||
|
try: |
||||||
|
lock_fd = acquire_lock() |
||||||
|
setup_logging() |
||||||
|
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start |
||||||
|
success, sleep_time = generate_engagement_tweets() |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop |
||||||
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
||||||
|
return success, sleep_time |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Fatal error in main: {e}", exc_info=True) |
||||||
|
print(f"Fatal error: {e}") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error |
||||||
|
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
||||||
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
||||||
|
return False, sleep_time |
||||||
|
finally: |
||||||
|
if lock_fd: |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
||||||
|
lock_fd.close() |
||||||
|
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None |
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
success, sleep_time = main() |
||||||
@ -1,76 +1,324 @@ |
|||||||
import random |
# foodie_engagement_tweet.py |
||||||
|
import json |
||||||
import logging |
import logging |
||||||
|
import random |
||||||
|
import signal |
||||||
|
import sys |
||||||
|
import fcntl |
||||||
|
import os |
||||||
|
import time |
||||||
from datetime import datetime, timedelta, timezone |
from datetime import datetime, timedelta, timezone |
||||||
from openai import OpenAI # Add this import |
from openai import OpenAI |
||||||
from foodie_utils import post_tweet, AUTHORS, SUMMARY_MODEL |
from foodie_utils import ( |
||||||
from dotenv import load_dotenv # Add this import |
post_tweet, |
||||||
|
AUTHORS, |
||||||
# Setup logging |
SUMMARY_MODEL, |
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
check_author_rate_limit, |
||||||
|
load_json_file, |
||||||
|
save_json_file, # Add this |
||||||
|
update_system_activity, |
||||||
|
get_next_author_round_robin |
||||||
|
) |
||||||
|
from foodie_config import X_API_CREDENTIALS, AUTHOR_BACKGROUNDS_FILE |
||||||
|
from dotenv import load_dotenv |
||||||
|
|
||||||
# Load environment variables |
print("Loading environment variables") |
||||||
load_dotenv() |
load_dotenv() |
||||||
|
print(f"Environment variables loaded: OPENAI_API_KEY={bool(os.getenv('OPENAI_API_KEY'))}") |
||||||
|
|
||||||
|
SCRIPT_NAME = "foodie_engagement_tweet" |
||||||
|
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_engagement_tweet.lock" |
||||||
|
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_engagement_tweet.log" |
||||||
|
LOG_PRUNE_DAYS = 30 |
||||||
|
MAX_RETRIES = 3 |
||||||
|
RETRY_BACKOFF = 2 |
||||||
|
|
||||||
|
def setup_logging(): |
||||||
|
"""Initialize logging with pruning of old logs.""" |
||||||
|
print("Entering setup_logging") |
||||||
|
try: |
||||||
|
log_dir = os.path.dirname(LOG_FILE) |
||||||
|
print(f"Ensuring log directory exists: {log_dir}") |
||||||
|
os.makedirs(log_dir, exist_ok=True) |
||||||
|
print(f"Log directory permissions: {os.stat(log_dir).st_mode & 0o777}, owner: {os.stat(log_dir).st_uid}") |
||||||
|
|
||||||
|
if os.path.exists(LOG_FILE): |
||||||
|
print(f"Pruning old logs in {LOG_FILE}") |
||||||
|
with open(LOG_FILE, 'r') as f: |
||||||
|
lines = f.readlines() |
||||||
|
cutoff = datetime.now(timezone.utc) - timedelta(days=LOG_PRUNE_DAYS) |
||||||
|
pruned_lines = [] |
||||||
|
malformed_count = 0 |
||||||
|
for line in lines: |
||||||
|
if len(line) < 19 or not line[:19].replace('-', '').replace(':', '').replace(' ', '').isdigit(): |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
try: |
||||||
|
timestamp = datetime.strptime(line[:19], '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) |
||||||
|
if timestamp > cutoff: |
||||||
|
pruned_lines.append(line) |
||||||
|
except ValueError: |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
print(f"Skipped {malformed_count} malformed log lines during pruning") |
||||||
|
with open(LOG_FILE, 'w') as f: |
||||||
|
f.writelines(pruned_lines) |
||||||
|
print(f"Log file pruned, new size: {os.path.getsize(LOG_FILE)} bytes") |
||||||
|
|
||||||
|
print(f"Configuring logging to {LOG_FILE}") |
||||||
|
logging.basicConfig( |
||||||
|
filename=LOG_FILE, |
||||||
|
level=logging.INFO, |
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s', |
||||||
|
datefmt='%Y-%m-%d %H:%M:%S' |
||||||
|
) |
||||||
|
console_handler = logging.StreamHandler() |
||||||
|
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) |
||||||
|
logging.getLogger().addHandler(console_handler) |
||||||
|
logging.getLogger("openai").setLevel(logging.WARNING) |
||||||
|
logging.info("Logging initialized for foodie_engagement_tweet.py") |
||||||
|
print("Logging setup complete") |
||||||
|
except Exception as e: |
||||||
|
print(f"Failed to setup logging: {e}") |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def acquire_lock(): |
||||||
|
"""Acquire a lock to prevent concurrent runs.""" |
||||||
|
print("Entering acquire_lock") |
||||||
|
try: |
||||||
|
lock_dir = os.path.dirname(LOCK_FILE) |
||||||
|
print(f"Ensuring lock directory exists: {lock_dir}") |
||||||
|
os.makedirs(lock_dir, exist_ok=True) |
||||||
|
print(f"Opening lock file: {LOCK_FILE}") |
||||||
|
lock_fd = open(LOCK_FILE, 'w') |
||||||
|
print(f"Attempting to acquire lock on {LOCK_FILE}") |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) |
||||||
|
lock_fd.write(str(os.getpid())) |
||||||
|
lock_fd.flush() |
||||||
|
print(f"Lock acquired, PID: {os.getpid()}") |
||||||
|
return lock_fd |
||||||
|
except IOError as e: |
||||||
|
print(f"Failed to acquire lock, another instance is running: {e}") |
||||||
|
logging.info("Another instance of foodie_engagement_tweet.py is running") |
||||||
|
sys.exit(0) |
||||||
|
except Exception as e: |
||||||
|
print(f"Unexpected error in acquire_lock: {e}") |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def signal_handler(sig, frame): |
||||||
|
"""Handle termination signals gracefully.""" |
||||||
|
print(f"Received signal: {sig}") |
||||||
|
logging.info("Received termination signal, marking script as stopped...") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler) |
||||||
|
signal.signal(signal.SIGINT, signal_handler) |
||||||
|
|
||||||
# Initialize OpenAI client |
# Initialize OpenAI client |
||||||
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
print("Initializing OpenAI client") |
||||||
|
try: |
||||||
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
||||||
|
if not os.getenv("OPENAI_API_KEY"): |
||||||
|
print("OPENAI_API_KEY is not set") |
||||||
|
logging.error("OPENAI_API_KEY is not set in environment variables") |
||||||
|
raise ValueError("OPENAI_API_KEY is required") |
||||||
|
print("OpenAI client initialized") |
||||||
|
except Exception as e: |
||||||
|
print(f"Failed to initialize OpenAI client: {e}") |
||||||
|
logging.error(f"Failed to initialize OpenAI client: {e}", exc_info=True) |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
def generate_engagement_tweet(author): |
# Load author backgrounds |
||||||
author_handle = author["x_username"] # Updated to use x_username from X_API_CREDENTIALS |
print(f"Loading author backgrounds from {AUTHOR_BACKGROUNDS_FILE}") |
||||||
prompt = ( |
try: |
||||||
f"Generate a concise tweet (under 280 characters) for {author_handle}. " |
with open(AUTHOR_BACKGROUNDS_FILE, 'r') as f: |
||||||
f"Create an engaging food-related question or statement to spark interaction. " |
AUTHOR_BACKGROUNDS = json.load(f) |
||||||
f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. " |
print(f"Author backgrounds loaded: {len(AUTHOR_BACKGROUNDS)} entries") |
||||||
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " |
except Exception as e: |
||||||
f"Do not include emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
print(f"Failed to load author_backgrounds.json: {e}") |
||||||
) |
logging.error(f"Failed to load author_backgrounds.json: {e}", exc_info=True) |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def generate_engagement_tweet(author): |
||||||
|
"""Generate an engagement tweet using author background themes and persona.""" |
||||||
|
print(f"Generating tweet for author: {author['username']}") |
||||||
try: |
try: |
||||||
response = client.chat.completions.create( |
credentials = X_API_CREDENTIALS.get(author["username"]) |
||||||
model=SUMMARY_MODEL, |
if not credentials: |
||||||
messages=[ |
print(f"No X credentials found for {author['username']}") |
||||||
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
logging.error(f"No X credentials found for {author['username']}") |
||||||
{"role": "user", "content": prompt} |
return None |
||||||
], |
author_handle = credentials["x_username"] |
||||||
max_tokens=100, |
print(f"Author handle: {author_handle}") |
||||||
temperature=0.7 |
|
||||||
|
background = next((bg for bg in AUTHOR_BACKGROUNDS if bg["username"] == author["username"]), {}) |
||||||
|
if not background or "engagement_themes" not in background: |
||||||
|
print(f"No background or themes for {author['username']}, using default theme") |
||||||
|
logging.warning(f"No background or engagement themes found for {author['username']}") |
||||||
|
theme = "food trends" |
||||||
|
else: |
||||||
|
theme = random.choice(background["engagement_themes"]) |
||||||
|
print(f"Selected theme: {theme}") |
||||||
|
|
||||||
|
# Get the author's persona from AUTHORS |
||||||
|
persona = next((a["persona"] for a in AUTHORS if a["username"] == author["username"]), "Unknown") |
||||||
|
|
||||||
|
prompt = ( |
||||||
|
f"Generate a concise tweet (under 230 characters) for {author_handle} as a {persona}. " |
||||||
|
f"Create an engaging, specific question about {theme} to spark interaction (e.g., 'What's your go-to sushi spot in Tokyo?'). " |
||||||
|
f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. " |
||||||
|
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " |
||||||
|
f"Do not include emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
||||||
) |
) |
||||||
tweet = response.choices[0].message.content.strip() |
print(f"OpenAI prompt: {prompt}") |
||||||
if len(tweet) > 280: |
|
||||||
tweet = tweet[:277] + "..." |
for attempt in range(MAX_RETRIES): |
||||||
return tweet |
print(f"Attempt {attempt + 1} to generate tweet") |
||||||
|
try: |
||||||
|
response = client.chat.completions.create( |
||||||
|
model=SUMMARY_MODEL, |
||||||
|
messages=[ |
||||||
|
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
||||||
|
{"role": "user", "content": prompt} |
||||||
|
], |
||||||
|
max_tokens=100, |
||||||
|
temperature=0.7 |
||||||
|
) |
||||||
|
tweet = response.choices[0].message.content.strip() |
||||||
|
if len(tweet) > 280: |
||||||
|
tweet = tweet[:277] + "..." |
||||||
|
print(f"Generated tweet: {tweet}") |
||||||
|
logging.debug(f"Generated engagement tweet: {tweet}") |
||||||
|
return tweet |
||||||
|
except Exception as e: |
||||||
|
print(f"Failed to generate tweet (attempt {attempt + 1}): {e}") |
||||||
|
logging.warning(f"Failed to generate engagement tweet for {author['username']} (attempt {attempt + 1}): {e}") |
||||||
|
if attempt < MAX_RETRIES - 1: |
||||||
|
time.sleep(RETRY_BACKOFF * (2 ** attempt)) |
||||||
|
else: |
||||||
|
print(f"Exhausted retries for {author['username']}") |
||||||
|
logging.error(f"Failed to generate engagement tweet after {MAX_RETRIES} attempts") |
||||||
|
engagement_templates = [ |
||||||
|
f"What's your favorite {theme} dish? Share below and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"Which {theme} spot is a must-visit? Tell us and like this tweet for more from {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"Got a {theme} hidden gem? Share it and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
||||||
|
f"What's the best {theme} you've tried? Let us know and like this tweet to keep up with {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com" |
||||||
|
] |
||||||
|
template = random.choice(engagement_templates) |
||||||
|
print(f"Using fallback tweet: {template}") |
||||||
|
logging.info(f"Using fallback engagement tweet: {template}") |
||||||
|
return template |
||||||
except Exception as e: |
except Exception as e: |
||||||
logging.warning(f"Failed to generate engagement tweet for {author['username']}: {e}") |
print(f"Error in generate_engagement_tweet for {author['username']}: {e}") |
||||||
# Fallback templates |
logging.error(f"Error in generate_engagement_tweet for {author['username']}: {e}", exc_info=True) |
||||||
engagement_templates = [ |
return None |
||||||
f"Whats the most mouthwatering dish youve seen this week Share below and follow {author_handle} for more foodie ideas on InsiderFoodie.com Link: https://insiderfoodie.com", |
|
||||||
f"Food lovers unite Whats your go to comfort food Tell us and like this tweet for more tasty ideas from {author_handle} on InsiderFoodie.com Link: https://insiderfoodie.com", |
|
||||||
f"Ever tried a dish that looked too good to eat Share your favorites and follow {author_handle} for more culinary trends on InsiderFoodie.com Link: https://insiderfoodie.com", |
|
||||||
f"What food trend are you loving right now Let us know and like this tweet to keep up with {author_handle} on InsiderFoodie.com Link: https://insiderfoodie.com" |
|
||||||
] |
|
||||||
template = random.choice(engagement_templates) |
|
||||||
return template |
|
||||||
|
|
||||||
def post_engagement_tweet(): |
def post_engagement_tweet(): |
||||||
# Reference date for calculating the 2-day interval |
"""Post engagement tweets for all authors with a delay between posts.""" |
||||||
reference_date = datetime(2025, 4, 29, tzinfo=timezone.utc) # Starting from April 29, 2025 |
print("Entering post_engagement_tweet") |
||||||
current_date = datetime.now(timezone.utc) |
try: |
||||||
|
logging.info("Starting foodie_engagement_tweet.py") |
||||||
# Calculate the number of days since the reference date |
posted = False |
||||||
days_since_reference = (current_date - reference_date).days |
state_file = '/home/shane/foodie_automator/author_state.json' |
||||||
|
state = load_json_file(state_file, default={'last_author_index': -1}) |
||||||
# Post only if the number of days since the reference date is divisible by 2 |
delay_seconds = 30 # Delay between posts to avoid rate limits and spread engagement |
||||||
if days_since_reference % 2 == 0: |
|
||||||
logging.info("Today is an engagement tweet day (every 2 days). Posting...") |
# Iterate through all authors |
||||||
for author in AUTHORS: |
for index, author in enumerate(AUTHORS): |
||||||
tweet = generate_engagement_tweet(author) |
username = author['username'] |
||||||
|
print(f"Processing author: {username}") |
||||||
logging.info(f"Posting engagement tweet for {author['username']}: {tweet}") |
logging.info(f"Processing author: {username}") |
||||||
if post_tweet(author, tweet): |
|
||||||
logging.info(f"Successfully posted engagement tweet for {author['username']}") |
try: |
||||||
else: |
print("Checking rate limit") |
||||||
logging.warning(f"Failed to post engagement tweet for {author['username']}") |
if not check_author_rate_limit(author): |
||||||
else: |
print(f"Rate limit exceeded for {username}, skipping") |
||||||
logging.info("Today is not an engagement tweet day (every 2 days). Skipping...") |
logging.info(f"Rate limit exceeded for {username}, skipping") |
||||||
|
continue |
||||||
|
|
||||||
|
print("Generating tweet") |
||||||
|
tweet = generate_engagement_tweet(author) |
||||||
|
if not tweet: |
||||||
|
print(f"Failed to generate tweet for {username}, skipping") |
||||||
|
logging.error(f"Failed to generate engagement tweet for {username}, skipping") |
||||||
|
continue |
||||||
|
|
||||||
|
print(f"Posting tweet: {tweet}") |
||||||
|
logging.info(f"Posting engagement tweet for {username}: {tweet}") |
||||||
|
if post_tweet(author, tweet): |
||||||
|
print(f"Successfully posted tweet for {username}") |
||||||
|
logging.info(f"Successfully posted engagement tweet for {username}") |
||||||
|
posted = True |
||||||
|
# Update last_author_index to maintain round-robin consistency |
||||||
|
state['last_author_index'] = index |
||||||
|
save_json_file(state_file, state) |
||||||
|
else: |
||||||
|
print(f"Failed to post tweet for {username}") |
||||||
|
logging.warning(f"Failed to post tweet for {username}") |
||||||
|
|
||||||
|
# Add delay between posts (except for the last author) |
||||||
|
if index < len(AUTHORS) - 1: |
||||||
|
print(f"Waiting {delay_seconds} seconds before next post") |
||||||
|
logging.info(f"Waiting {delay_seconds} seconds before next post") |
||||||
|
time.sleep(delay_seconds) |
||||||
|
|
||||||
|
except Exception as e: |
||||||
|
print(f"Error posting tweet for {username}: {e}") |
||||||
|
logging.error(f"Error posting tweet for {username}: {e}", exc_info=True) |
||||||
|
continue |
||||||
|
|
||||||
|
print("Completed post_engagement_tweet") |
||||||
|
logging.info("Completed foodie_engagement_tweet.py") |
||||||
|
sleep_time = 86400 # 1 day for cron |
||||||
|
return posted, sleep_time |
||||||
|
except Exception as e: |
||||||
|
print(f"Unexpected error in post_engagement_tweet: {e}") |
||||||
|
logging.error(f"Unexpected error in post_engagement_tweet: {e}", exc_info=True) |
||||||
|
sleep_time = 86400 # 1 day |
||||||
|
return False, sleep_time |
||||||
|
|
||||||
|
def main(): |
||||||
|
"""Main function to run the script.""" |
||||||
|
print("Starting main") |
||||||
|
lock_fd = None |
||||||
|
try: |
||||||
|
print("Acquiring lock") |
||||||
|
lock_fd = acquire_lock() |
||||||
|
print("Setting up logging") |
||||||
|
setup_logging() |
||||||
|
print("Updating system activity to running") |
||||||
|
update_system_activity(SCRIPT_NAME, "running", os.getpid()) |
||||||
|
print("Checking author state file") |
||||||
|
author_state_file = "/home/shane/foodie_automator/author_state.json" |
||||||
|
if not os.path.exists(author_state_file): |
||||||
|
print(f"Author state file not found: {author_state_file}") |
||||||
|
logging.error(f"Author state file not found: {author_state_file}") |
||||||
|
raise FileNotFoundError(f"Author state file not found: {author_state_file}") |
||||||
|
print(f"Author state file exists: {author_state_file}") |
||||||
|
print("Posting engagement tweet") |
||||||
|
posted, sleep_time = post_engagement_tweet() |
||||||
|
print("Updating system activity to stopped") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") |
||||||
|
print(f"Run completed, posted: {posted}, sleep_time: {sleep_time}") |
||||||
|
logging.info(f"Run completed, posted: {posted}, sleep_time: {sleep_time} seconds") |
||||||
|
return posted, sleep_time |
||||||
|
except Exception as e: |
||||||
|
print(f"Exception in main: {e}") |
||||||
|
logging.error(f"Fatal error in main: {e}", exc_info=True) |
||||||
|
print(f"Fatal error: {e}") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") |
||||||
|
sleep_time = 86400 # 1 day for cron |
||||||
|
print(f"Run completed, sleep_time: {sleep_time}") |
||||||
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
||||||
|
return False, sleep_time |
||||||
|
finally: |
||||||
|
if lock_fd: |
||||||
|
print("Releasing lock") |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
||||||
|
lock_fd.close() |
||||||
|
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None |
||||||
|
print(f"Lock file removed: {LOCK_FILE}") |
||||||
|
|
||||||
if __name__ == "__main__": |
if __name__ == "__main__": |
||||||
post_engagement_tweet() |
posted, sleep_time = main() |
||||||
File diff suppressed because it is too large
Load Diff
@ -1,133 +1,395 @@ |
|||||||
|
# foodie_weekly_thread.py |
||||||
import json |
import json |
||||||
from datetime import datetime, timedelta |
import os |
||||||
import logging |
import logging |
||||||
import random |
import random |
||||||
from openai import OpenAI # Add this import |
import signal |
||||||
from foodie_utils import post_tweet, AUTHORS, SUMMARY_MODEL |
import sys |
||||||
|
import fcntl |
||||||
|
import time |
||||||
|
import re |
||||||
|
from datetime import datetime, timedelta, timezone |
||||||
|
from openai import OpenAI |
||||||
|
from foodie_utils import AUTHORS, SUMMARY_MODEL, load_json_file, save_json_file, update_system_activity |
||||||
|
from foodie_config import X_API_CREDENTIALS, RECENT_POSTS_FILE |
||||||
|
from dotenv import load_dotenv |
||||||
|
import shutil |
||||||
|
|
||||||
# Setup logging |
load_dotenv() |
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
|
||||||
|
SCRIPT_NAME = "foodie_weekly_thread" |
||||||
|
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_weekly_thread.lock" |
||||||
|
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_weekly_thread.log" |
||||||
|
WEEKLY_THREADS_FILE = "/home/shane/foodie_automator/weekly_threads.json" |
||||||
|
LOG_PRUNE_DAYS = 30 |
||||||
|
MAX_RETRIES = 3 |
||||||
|
RETRY_BACKOFF = 2 |
||||||
|
|
||||||
|
def setup_logging(): |
||||||
|
"""Initialize logging with pruning of old logs.""" |
||||||
|
try: |
||||||
|
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True) |
||||||
|
if os.path.exists(LOG_FILE): |
||||||
|
with open(LOG_FILE, 'r') as f: |
||||||
|
lines = f.readlines() |
||||||
|
cutoff = datetime.now(timezone.utc) - timedelta(days=LOG_PRUNE_DAYS) |
||||||
|
pruned_lines = [] |
||||||
|
malformed_count = 0 |
||||||
|
for line in lines: |
||||||
|
if len(line) < 19 or not line[:19].replace('-', '').replace(':', '').replace(' ', '').isdigit(): |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
try: |
||||||
|
timestamp = datetime.strptime(line[:19], '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) |
||||||
|
if timestamp > cutoff: |
||||||
|
pruned_lines.append(line) |
||||||
|
except ValueError: |
||||||
|
malformed_count += 1 |
||||||
|
continue |
||||||
|
if malformed_count > 0: |
||||||
|
logging.info(f"Skipped {malformed_count} malformed log lines during pruning") |
||||||
|
with open(LOG_FILE, 'w') as f: |
||||||
|
f.writelines(pruned_lines) |
||||||
|
|
||||||
|
logging.basicConfig( |
||||||
|
filename=LOG_FILE, |
||||||
|
level=logging.INFO, |
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s', |
||||||
|
datefmt='%Y-%m-%d %H:%M:%S' |
||||||
|
) |
||||||
|
console_handler = logging.StreamHandler() |
||||||
|
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) |
||||||
|
logging.getLogger().addHandler(console_handler) |
||||||
|
logging.getLogger("openai").setLevel(logging.WARNING) |
||||||
|
logging.info("Logging initialized for foodie_weekly_thread.py") |
||||||
|
except Exception as e: |
||||||
|
print(f"Failed to setup logging: {e}") |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
def acquire_lock(): |
||||||
|
"""Acquire a lock to prevent concurrent runs.""" |
||||||
|
os.makedirs(os.path.dirname(LOCK_FILE), exist_ok=True) |
||||||
|
lock_fd = open(LOCK_FILE, 'w') |
||||||
|
try: |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) |
||||||
|
lock_fd.write(str(os.getpid())) |
||||||
|
lock_fd.flush() |
||||||
|
return lock_fd |
||||||
|
except IOError: |
||||||
|
logging.info("Another instance of foodie_weekly_thread.py is running") |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
def signal_handler(sig, frame): |
||||||
|
"""Handle termination signals gracefully.""" |
||||||
|
logging.info("Received termination signal, marking script as stopped...") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler) |
||||||
|
signal.signal(signal.SIGINT, signal_handler) |
||||||
|
|
||||||
# Initialize OpenAI client |
# Initialize OpenAI client |
||||||
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
try: |
||||||
|
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
||||||
|
if not os.getenv("OPENAI_API_KEY"): |
||||||
|
logging.error("OPENAI_API_KEY is not set in environment variables") |
||||||
|
raise ValueError("OPENAI_API_KEY is required") |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to initialize OpenAI client: {e}", exc_info=True) |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
RECENT_POSTS_FILE = "/home/shane/foodie_automator/recent_posts.json" |
def generate_intro_tweet(author): |
||||||
|
"""Generate an intro tweet for the weekly thread.""" |
||||||
|
credentials = X_API_CREDENTIALS.get(author["username"]) |
||||||
|
if not credentials: |
||||||
|
logging.error(f"No X credentials found for {author['username']}") |
||||||
|
return None |
||||||
|
author_handle = credentials["x_username"] |
||||||
|
logging.debug(f"Generating intro tweet for {author_handle}") |
||||||
|
|
||||||
|
prompt = ( |
||||||
|
f"Generate a concise tweet (under 200 characters) for {author_handle}. " |
||||||
|
f"Introduce a thread of their top 10 foodie posts of the week on InsiderFoodie.com. " |
||||||
|
f"Make it engaging, create curiosity, and include a call to action to visit InsiderFoodie.com or follow {author_handle}. " |
||||||
|
f"Avoid using the word 'elevate'—use humanized language like 'level up' or 'bring to life'. " |
||||||
|
f"Strictly exclude emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
||||||
|
) |
||||||
|
|
||||||
|
for attempt in range(MAX_RETRIES): |
||||||
|
try: |
||||||
|
response = client.chat.completions.create( |
||||||
|
model=SUMMARY_MODEL, |
||||||
|
messages=[ |
||||||
|
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
||||||
|
{"role": "user", "content": prompt} |
||||||
|
], |
||||||
|
max_tokens=150, |
||||||
|
temperature=0.7 |
||||||
|
) |
||||||
|
tweet = response.choices[0].message.content.strip() |
||||||
|
tweet = re.sub(r'[\U0001F000-\U0001FFFF]', '', tweet) # Remove emojis |
||||||
|
if len(tweet) > 280: |
||||||
|
tweet = tweet[:277] + "..." |
||||||
|
logging.debug(f"Generated intro tweet: {tweet}") |
||||||
|
return tweet |
||||||
|
except Exception as e: |
||||||
|
logging.warning(f"Failed to generate intro tweet for {author['username']} (attempt {attempt + 1}): {e}") |
||||||
|
if attempt < MAX_RETRIES - 1: |
||||||
|
time.sleep(RETRY_BACKOFF * (2 ** attempt)) |
||||||
|
else: |
||||||
|
logging.error(f"Failed to generate intro tweet after {MAX_RETRIES} attempts") |
||||||
|
fallback = ( |
||||||
|
f"Top 10 foodie posts this week by {author_handle}! Visit InsiderFoodie.com and follow {author_handle} for more." |
||||||
|
) |
||||||
|
logging.info(f"Using fallback intro tweet: {fallback}") |
||||||
|
return fallback |
||||||
|
|
||||||
|
def generate_final_cta(author): |
||||||
|
"""Generate a final CTA tweet for the weekly thread using GPT.""" |
||||||
|
credentials = X_API_CREDENTIALS.get(author["username"]) |
||||||
|
if not credentials: |
||||||
|
logging.error(f"No X credentials found for {author['username']}") |
||||||
|
return None |
||||||
|
author_handle = credentials["x_username"] |
||||||
|
logging.debug(f"Generating final CTA tweet for {author_handle}") |
||||||
|
|
||||||
|
prompt = ( |
||||||
|
f"Generate a concise tweet (under 200 characters) for {author_handle}. " |
||||||
|
f"Conclude a thread of their top 10 foodie posts of the week on InsiderFoodie.com. " |
||||||
|
f"Make it engaging, value-driven, in the style of Neil Patel. " |
||||||
|
f"Include a call to action to visit InsiderFoodie.com and follow {author_handle}. " |
||||||
|
f"Mention that top 10 foodie trends are shared every Monday. " |
||||||
|
f"Avoid using the word 'elevate'—use humanized language like 'level up' or 'bring to life'. " |
||||||
|
f"Strictly exclude emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
||||||
|
) |
||||||
|
|
||||||
|
for attempt in range(MAX_RETRIES): |
||||||
|
try: |
||||||
|
response = client.chat.completions.create( |
||||||
|
model=SUMMARY_MODEL, |
||||||
|
messages=[ |
||||||
|
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
||||||
|
{"role": "user", "content": prompt} |
||||||
|
], |
||||||
|
max_tokens=150, |
||||||
|
temperature=0.7 |
||||||
|
) |
||||||
|
tweet = response.choices[0].message.content.strip() |
||||||
|
tweet = re.sub(r'[\U0001F000-\U0001FFFF]', '', tweet) # Remove emojis |
||||||
|
if len(tweet) > 280: |
||||||
|
tweet = tweet[:277] + "..." |
||||||
|
logging.debug(f"Generated final CTA tweet: {tweet}") |
||||||
|
return tweet |
||||||
|
except Exception as e: |
||||||
|
logging.warning(f"Failed to generate final CTA tweet for {author['username']} (attempt {attempt + 1}): {e}") |
||||||
|
if attempt < MAX_RETRIES - 1: |
||||||
|
time.sleep(RETRY_BACKOFF * (2 ** attempt)) |
||||||
|
else: |
||||||
|
logging.error(f"Failed to generate final CTA tweet after {MAX_RETRIES} attempts") |
||||||
|
fallback = ( |
||||||
|
f"Want more foodie insights? Visit insiderfoodie.com and follow {author_handle} " |
||||||
|
f"for top 10 foodie trends every Monday." |
||||||
|
) |
||||||
|
logging.info(f"Using fallback final CTA tweet: {fallback}") |
||||||
|
return fallback |
||||||
|
|
||||||
def load_recent_posts(): |
def load_recent_posts(): |
||||||
posts = [] |
"""Load and deduplicate posts from recent_posts.json.""" |
||||||
if not os.path.exists(RECENT_POSTS_FILE): |
logging.debug(f"Attempting to load posts from {RECENT_POSTS_FILE}") |
||||||
return posts |
posts = load_json_file(RECENT_POSTS_FILE) |
||||||
|
|
||||||
with open(RECENT_POSTS_FILE, 'r') as f: |
if not posts: |
||||||
for line in f: |
logging.warning(f"No valid posts loaded from {RECENT_POSTS_FILE}") |
||||||
if line.strip(): |
return [] |
||||||
try: |
|
||||||
entry = json.loads(line.strip()) |
|
||||||
posts.append(entry) |
|
||||||
except json.JSONDecodeError as e: |
|
||||||
logging.warning(f"Skipping invalid JSON line in {RECENT_POSTS_FILE}: {e}") |
|
||||||
|
|
||||||
return posts |
# Deduplicate posts |
||||||
|
unique_posts = {} |
||||||
|
for post in posts: |
||||||
|
try: |
||||||
|
required_fields = ["title", "url", "author_username", "timestamp"] |
||||||
|
if not all(key in post for key in required_fields): |
||||||
|
logging.warning(f"Skipping invalid post: missing fields {post}") |
||||||
|
continue |
||||||
|
datetime.fromisoformat(post["timestamp"].replace('Z', '+00:00')) |
||||||
|
key = (post["title"], post["url"], post["author_username"]) |
||||||
|
if key not in unique_posts: |
||||||
|
unique_posts[key] = post |
||||||
|
else: |
||||||
|
logging.debug(f"Skipping duplicate post: {post['title']}") |
||||||
|
except (KeyError, ValueError) as e: |
||||||
|
logging.warning(f"Skipping post due to invalid format: {e}") |
||||||
|
continue |
||||||
|
|
||||||
|
deduped_posts = list(unique_posts.values()) |
||||||
|
logging.info(f"Loaded {len(deduped_posts)} unique posts from {RECENT_POSTS_FILE}") |
||||||
|
return deduped_posts |
||||||
|
|
||||||
def filter_posts_for_week(posts, start_date, end_date): |
def filter_posts_for_week(posts, start_date, end_date): |
||||||
|
"""Filter posts within the given week range.""" |
||||||
filtered_posts = [] |
filtered_posts = [] |
||||||
for post in posts: |
for post in posts: |
||||||
timestamp = datetime.fromisoformat(post["timestamp"]) |
try: |
||||||
if start_date <= timestamp <= end_date: |
post_date = datetime.fromisoformat(post["timestamp"]) |
||||||
filtered_posts.append(post) |
logging.debug(f"Checking post: title={post['title']}, timestamp={post_date}, in range {start_date} to {end_date}") |
||||||
|
if start_date <= post_date <= end_date: |
||||||
|
filtered_posts.append(post) |
||||||
|
logging.debug(f"Included post: {post['title']}") |
||||||
|
else: |
||||||
|
logging.debug(f"Excluded post: {post['title']} (timestamp {post_date} outside range)") |
||||||
|
except (KeyError, ValueError) as e: |
||||||
|
logging.warning(f"Skipping post due to invalid format: {e}") |
||||||
|
continue |
||||||
|
logging.info(f"Filtered to {len(filtered_posts)} posts for the week") |
||||||
return filtered_posts |
return filtered_posts |
||||||
|
|
||||||
def generate_intro_tweet(author): |
def generate_weekly_thread(): |
||||||
author_handle = author["handle"] |
"""Generate weekly thread content for each author and save to file on Mondays.""" |
||||||
prompt = ( |
logging.info("Starting foodie_weekly_thread.py") |
||||||
f"Generate a concise tweet (under 280 characters) for {author_handle}. " |
|
||||||
f"Introduce a thread of their top 10 foodie posts of the week on InsiderFoodie.com. " |
|
||||||
f"Make it engaging, create curiosity, and include a call to action to visit InsiderFoodie.com, follow {author_handle}, or like the thread. " |
|
||||||
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " |
|
||||||
f"Do not include emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
|
||||||
) |
|
||||||
|
|
||||||
try: |
|
||||||
response = client.chat.completions.create( |
|
||||||
model=SUMMARY_MODEL, |
|
||||||
messages=[ |
|
||||||
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
|
||||||
{"role": "user", "content": prompt} |
|
||||||
], |
|
||||||
max_tokens=100, |
|
||||||
temperature=0.7 |
|
||||||
) |
|
||||||
tweet = response.choices[0].message.content.strip() |
|
||||||
if len(tweet) > 280: |
|
||||||
tweet = tweet[:277] + "..." |
|
||||||
return tweet |
|
||||||
except Exception as e: |
|
||||||
logging.warning(f"Failed to generate intro tweet for {author['username']}: {e}") |
|
||||||
# Fallback template |
|
||||||
return ( |
|
||||||
f"This weeks top 10 foodie finds by {author_handle} Check out the best on InsiderFoodie.com " |
|
||||||
f"Follow {author_handle} for more and like this thread to stay in the loop Visit us at https://insiderfoodie.com" |
|
||||||
) |
|
||||||
|
|
||||||
def post_weekly_thread(): |
# Check if today is Monday |
||||||
# Determine the date range (Monday to Sunday of the past week) |
|
||||||
today = datetime.now(timezone.utc) |
today = datetime.now(timezone.utc) |
||||||
days_since_monday = (today.weekday() + 1) % 7 + 7 # Go back to previous Monday |
if today.weekday() != 0: # 0 = Monday |
||||||
start_date = (today - timedelta(days=days_since_monday)).replace(hour=0, minute=0, second=0, microsecond=0) |
logging.info(f"Today is not Monday (weekday: {today.weekday()}), skipping weekly thread") |
||||||
end_date = start_date + timedelta(days=6, hours=23, minutes=59, seconds=59) |
return |
||||||
|
|
||||||
|
# Calculate date range: 7 days prior to run date |
||||||
|
start_date = (today - timedelta(days=7)).replace(hour=0, minute=0, second=0, microsecond=0) |
||||||
|
end_date = (today - timedelta(days=1)).replace(hour=23, minute=59, second=59, microsecond=999999) |
||||||
logging.info(f"Fetching posts from {start_date} to {end_date}") |
logging.info(f"Fetching posts from {start_date} to {end_date}") |
||||||
|
|
||||||
# Load and filter posts |
# Load and filter posts |
||||||
all_posts = load_recent_posts() |
recent_posts = load_json_file(RECENT_POSTS_FILE) |
||||||
weekly_posts = filter_posts_for_week(all_posts, start_date, end_date) |
logging.info(f"Loaded {len(recent_posts)} posts from {RECENT_POSTS_FILE}") |
||||||
|
|
||||||
|
# Deduplicate posts |
||||||
|
seen = set() |
||||||
|
deduped_posts = [] |
||||||
|
for post in recent_posts: |
||||||
|
key = (post["title"], post["url"], post["author_username"]) |
||||||
|
if key not in seen: |
||||||
|
seen.add(key) |
||||||
|
deduped_posts.append(post) |
||||||
|
logging.info(f"Filtered to {len(deduped_posts)} unique posts after deduplication") |
||||||
|
|
||||||
|
weekly_posts = filter_posts_for_week(deduped_posts, start_date, end_date) |
||||||
|
if not weekly_posts: |
||||||
|
logging.warning(f"No posts found within the week range {start_date} to {end_date}, exiting generate_weekly_thread") |
||||||
|
return |
||||||
|
|
||||||
# Group posts by author |
# Group posts by author |
||||||
posts_by_author = {} |
posts_by_author = {author["username"]: [] for author in AUTHORS} |
||||||
for post in weekly_posts: |
for post in weekly_posts: |
||||||
author = post["author_username"] # Updated to match the key in recent_posts.json |
username = post["author_username"] |
||||||
if author not in posts_by_author: |
if username in posts_by_author: |
||||||
posts_by_author[author] = [] |
posts_by_author[username].append(post) |
||||||
posts_by_author[author].append(post) |
|
||||||
|
# Generate thread content for each author |
||||||
|
thread_content = [] |
||||||
|
timestamp = datetime.now(timezone.utc).isoformat() |
||||||
|
|
||||||
# For each author, post a thread |
|
||||||
for author in AUTHORS: |
for author in AUTHORS: |
||||||
author_posts = posts_by_author.get(author["username"], []) |
username = author["username"] |
||||||
|
author_posts = posts_by_author.get(username, []) |
||||||
if not author_posts: |
if not author_posts: |
||||||
logging.info(f"No posts found for {author['username']} this week") |
logging.info(f"No posts found for {username}, skipping") |
||||||
continue |
continue |
||||||
|
|
||||||
# Sort by timestamp (as a proxy for interest_score) and take top 10 |
# Select top 2 posts (to fit within 3-tweet limit: lead + 2 posts) |
||||||
author_posts.sort(key=lambda x: x.get("timestamp", ""), reverse=True) |
author_posts = sorted(author_posts, key=lambda x: datetime.fromisoformat(x["timestamp"]), reverse=True) |
||||||
top_posts = author_posts[:10] |
selected_posts = author_posts[:2] |
||||||
|
logging.info(f"Found {len(author_posts)} posts for {username}, selected {len(selected_posts)}") |
||||||
|
|
||||||
if not top_posts: |
# Generate thread content |
||||||
continue |
try: |
||||||
|
# Generate intro tweet |
||||||
|
intro_tweet = generate_intro_tweet(author) |
||||||
|
if not intro_tweet: |
||||||
|
logging.error(f"Failed to generate intro tweet for {username}, skipping") |
||||||
|
continue |
||||||
|
|
||||||
|
# Generate thread tweets (up to 2) |
||||||
|
thread_tweets = [] |
||||||
|
for i, post in enumerate(selected_posts, 1): |
||||||
|
thread_tweet = ( |
||||||
|
f"{i}. {post['title']} " |
||||||
|
f"Read more: {post['url']}" |
||||||
|
) |
||||||
|
if len(thread_tweet) > 280: |
||||||
|
thread_tweet = f"{i}. {post['title'][:200]}... Read more: {post['url']}" |
||||||
|
thread_tweets.append(thread_tweet) |
||||||
|
logging.info(f"Generated thread tweet {i} for {username}: {thread_tweet}") |
||||||
|
|
||||||
|
# Generate final CTA tweet |
||||||
|
final_cta = generate_final_cta(author) |
||||||
|
if not final_cta: |
||||||
|
logging.error(f"Failed to generate final CTA tweet for {username}, using fallback") |
||||||
|
final_cta = ( |
||||||
|
f"Want more foodie insights? Visit insiderfoodie.com and follow {X_API_CREDENTIALS[username]['x_username']} " |
||||||
|
f"for top 10 foodie trends every Monday." |
||||||
|
) |
||||||
|
|
||||||
# First tweet: Intro with CTA (generated by GPT) |
# Collect thread content for this author |
||||||
intro_tweet = generate_intro_tweet(author) |
author_thread = { |
||||||
|
"username": username, |
||||||
|
"x_handle": X_API_CREDENTIALS[username]["x_username"], |
||||||
|
"intro_tweet": intro_tweet, |
||||||
|
"thread_tweets": thread_tweets, |
||||||
|
"final_cta": final_cta, |
||||||
|
"timestamp": timestamp |
||||||
|
} |
||||||
|
thread_content.append(author_thread) |
||||||
|
logging.info(f"Generated thread content for {username}") |
||||||
|
|
||||||
logging.info(f"Posting intro tweet for {author['username']}: {intro_tweet}") |
except Exception as e: |
||||||
intro_response = post_tweet(author, intro_tweet) |
logging.error(f"Error generating thread content for {username}: {e}", exc_info=True) |
||||||
if not intro_response: |
|
||||||
logging.warning(f"Failed to post intro tweet for {author['username']}") |
|
||||||
continue |
continue |
||||||
|
|
||||||
intro_tweet_id = intro_response.get("id") |
# Save thread content to file, overwriting any existing content |
||||||
|
if thread_content: |
||||||
|
try: |
||||||
|
# Backup existing file before overwriting |
||||||
|
if os.path.exists(WEEKLY_THREADS_FILE): |
||||||
|
backup_dir = "/home/shane/foodie_automator/backups" |
||||||
|
os.makedirs(backup_dir, exist_ok=True) |
||||||
|
backup_file = f"{backup_dir}/weekly_threads_{timestamp.replace(':', '-')}.json" |
||||||
|
shutil.copy(WEEKLY_THREADS_FILE, backup_file) |
||||||
|
logging.info(f"Backed up existing {WEEKLY_THREADS_FILE} to {backup_file}") |
||||||
|
|
||||||
# Post each top post as a reply in the thread |
# Save new thread content, overwriting the file |
||||||
for i, post in enumerate(top_posts, 1): |
thread_data = { |
||||||
post_tweet_content = ( |
"week_start": start_date.isoformat(), |
||||||
f"{i}. {post['title']} Link: {post['url']}" |
"week_end": end_date.isoformat(), |
||||||
) |
"timestamp": timestamp, |
||||||
logging.info(f"Posting thread reply {i} for {author['username']}: {post_tweet_content}") |
"threads": thread_content |
||||||
post_tweet(author, post_tweet_content, reply_to_id=intro_tweet_id) |
} |
||||||
|
save_json_file(WEEKLY_THREADS_FILE, thread_data) |
||||||
|
logging.info(f"Saved thread content for {len(thread_content)} authors to {WEEKLY_THREADS_FILE}") |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Failed to save thread content to {WEEKLY_THREADS_FILE}: {e}") |
||||||
|
else: |
||||||
|
logging.warning("No thread content generated, nothing to save") |
||||||
|
|
||||||
|
logging.info("Completed foodie_weekly_thread.py") |
||||||
|
|
||||||
logging.info(f"Successfully posted weekly thread for {author['username']}") |
def main(): |
||||||
|
"""Main function to run the script.""" |
||||||
|
lock_fd = None |
||||||
|
try: |
||||||
|
lock_fd = acquire_lock() |
||||||
|
setup_logging() |
||||||
|
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start |
||||||
|
generate_weekly_thread() |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop |
||||||
|
except Exception as e: |
||||||
|
logging.error(f"Fatal error in main: {e}", exc_info=True) |
||||||
|
print(f"Fatal error: {e}") |
||||||
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error |
||||||
|
sys.exit(1) |
||||||
|
finally: |
||||||
|
if lock_fd: |
||||||
|
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
||||||
|
lock_fd.close() |
||||||
|
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None |
||||||
|
|
||||||
if __name__ == "__main__": |
if __name__ == "__main__": |
||||||
# Run only on Sundays |
main() |
||||||
if datetime.now(timezone.utc).weekday() == 6: # Sunday (0 = Monday, 6 = Sunday) |
|
||||||
post_weekly_thread() |
|
||||||
else: |
|
||||||
logging.info("Not Sunday - skipping weekly thread posting") |
|
||||||
@ -1,95 +1,195 @@ |
|||||||
#!/bin/bash |
#!/bin/bash |
||||||
|
|
||||||
# Directory to monitor |
|
||||||
BASE_DIR="/home/shane/foodie_automator" |
BASE_DIR="/home/shane/foodie_automator" |
||||||
|
LOG_DIR="$BASE_DIR/logs" |
||||||
|
LOCK_DIR="$BASE_DIR/locks" |
||||||
|
LOG_FILE="$LOG_DIR/manage_scripts.log" |
||||||
|
VENV_PYTHON="$BASE_DIR/venv/bin/python" |
||||||
CHECKSUM_FILE="$BASE_DIR/.file_checksum" |
CHECKSUM_FILE="$BASE_DIR/.file_checksum" |
||||||
LOG_FILE="$BASE_DIR/manage_scripts.log" |
|
||||||
|
|
||||||
# Log function |
mkdir -p "$LOG_DIR" "$LOCK_DIR" || { echo "Error: Failed to create directories"; exit 1; } |
||||||
|
|
||||||
log() { |
log() { |
||||||
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE" |
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE" |
||||||
|
echo "$1" |
||||||
} |
} |
||||||
|
|
||||||
# Calculate checksum of files (excluding logs, JSON files, and venv) |
|
||||||
calculate_checksum() { |
calculate_checksum() { |
||||||
find "$BASE_DIR" -type f \ |
find "$BASE_DIR" -type f \ |
||||||
-not -path "$BASE_DIR/*.log" \ |
-not -path "$BASE_DIR/logs/*" \ |
||||||
-not -path "$BASE_DIR/*.json" \ |
-not -path "$BASE_DIR/*.json" \ |
||||||
-not -path "$BASE_DIR/.file_checksum" \ |
-not -path "$BASE_DIR/.file_checksum" \ |
||||||
-not -path "$BASE_DIR/venv/*" \ |
-not -path "$BASE_DIR/venv/*" \ |
||||||
|
-not -path "$BASE_DIR/locks/*" \ |
||||||
-exec sha256sum {} \; | sort | sha256sum | awk '{print $1}' |
-exec sha256sum {} \; | sort | sha256sum | awk '{print $1}' |
||||||
} |
} |
||||||
|
|
||||||
# Check if scripts are running |
|
||||||
check_running() { |
check_running() { |
||||||
pgrep -f "python3.*foodie_automator" > /dev/null |
local script_name="$1" |
||||||
|
local lock_file="$LOCK_DIR/${script_name}.lock" |
||||||
|
if [ -f "$lock_file" ]; then |
||||||
|
local pid=$(cat "$lock_file") |
||||||
|
if ps -p "$pid" > /dev/null; then |
||||||
|
log "$script_name is already running (PID: $pid)" |
||||||
|
return 0 |
||||||
|
else |
||||||
|
log "Stale lock file for $script_name, removing" |
||||||
|
rm -f "$lock_file" |
||||||
|
fi |
||||||
|
fi |
||||||
|
return 1 |
||||||
} |
} |
||||||
|
|
||||||
# Stop scripts |
run_script() { |
||||||
stop_scripts() { |
local script="$1" |
||||||
log "Stopping scripts..." |
local script_name="${script%.py}" |
||||||
pkill -TERM -f "python3.*foodie_automator" || true |
local script_log="$LOG_DIR/${script_name}.log" |
||||||
sleep 10 |
if check_running "$script_name"; then |
||||||
pkill -9 -f "python3.*foodie_automator" || true |
echo "0" # Skip sleep |
||||||
log "Scripts stopped." |
return 1 |
||||||
|
fi |
||||||
|
log "Running $script..." |
||||||
|
"$VENV_PYTHON" "$BASE_DIR/$script" >> "$script_log" 2>&1 & |
||||||
|
local pid=$! |
||||||
|
echo "$pid" > "$LOCK_DIR/${script_name}.lock" |
||||||
|
wait "$pid" |
||||||
|
local exit_code=$? |
||||||
|
if [ $exit_code -eq 0 ]; then |
||||||
|
log "$script completed successfully" |
||||||
|
else |
||||||
|
log "$script failed with exit code $exit_code" |
||||||
|
fi |
||||||
|
sleep_time=$(grep "sleep_time:" "$script_log" | tail -n 1 | grep -oP 'sleep_time: \K[0-9]+' || echo $((RANDOM % 601 + 1200))) |
||||||
|
log "$script completed, sleep_time: $sleep_time seconds" |
||||||
|
rm -f "$LOCK_DIR/${script_name}.lock" |
||||||
|
echo "$sleep_time" |
||||||
} |
} |
||||||
|
|
||||||
# Start scripts |
stop_scripts() { |
||||||
start_scripts() { |
log "Stopping scripts..." |
||||||
log "Starting scripts..." |
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
||||||
cd "$BASE_DIR" |
|
||||||
source venv/bin/activate |
|
||||||
# Find all foodie_automator_*.py scripts and start them |
|
||||||
for script in foodie_automator_*.py; do |
|
||||||
if [ -f "$script" ]; then |
if [ -f "$script" ]; then |
||||||
log "Starting $script..." |
local script_name="${script%.py}" |
||||||
nohup python3 "$script" >> "${script%.py}.log" 2>&1 & |
if pkill -TERM -f "$VENV_PYTHON.*$script_name"; then |
||||||
|
log "Sent TERM to $script_name" |
||||||
|
sleep 2 |
||||||
|
pkill -9 -f "$VENV_PYTHON.*$script_name" || true |
||||||
|
else |
||||||
|
log "No running $script_name found" |
||||||
|
fi |
||||||
|
rm -f "$LOCK_DIR/${script_name}.lock" |
||||||
|
log "Removed lock file for $script_name" |
||||||
fi |
fi |
||||||
done |
done |
||||||
log "All scripts started." |
log "Scripts stopped." |
||||||
} |
} |
||||||
|
|
||||||
# Update dependencies |
|
||||||
update_dependencies() { |
update_dependencies() { |
||||||
log "Updating dependencies..." |
log "Updating dependencies..." |
||||||
cd "$BASE_DIR" |
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; } |
||||||
# Create venv if it doesn't exist |
|
||||||
if [ ! -d "venv" ]; then |
if [ ! -d "venv" ]; then |
||||||
python3 -m venv venv |
python3 -m venv venv |
||||||
|
log "Created new virtual environment" |
||||||
fi |
fi |
||||||
source venv/bin/activate |
source "$BASE_DIR/venv/bin/activate" |
||||||
pip install --upgrade pip |
|
||||||
pip install -r requirements.txt || (pip install requests openai beautifulsoup4 feedparser praw duckduckgo_search selenium Pillow pytesseract webdriver-manager && log "Fallback: Installed core dependencies") |
|
||||||
log "Dependencies updated." |
log "Dependencies updated." |
||||||
} |
} |
||||||
|
|
||||||
# Main logic |
if [ "$1" == "stop" ]; then |
||||||
|
log "Received stop command, stopping all scripts..." |
||||||
|
stop_scripts |
||||||
|
for script in foodie_engagement_generator.py foodie_weekly_thread.py; do |
||||||
|
local script_name="${script%.py}" |
||||||
|
if pkill -TERM -f "$VENV_PYTHON.*$script_name"; then |
||||||
|
log "Sent TERM to $script_name" |
||||||
|
sleep 2 |
||||||
|
pkill -9 -f "$VENV_PYTHON.*$script_name" || true |
||||||
|
else |
||||||
|
log "No running $script_name found" |
||||||
|
fi |
||||||
|
rm -f "$LOCK_DIR/$script_name.lock" |
||||||
|
log "Stopped $script_name" |
||||||
|
done |
||||||
|
log "All scripts stopped. Reminder: Disable cron jobs (crontab -e)." |
||||||
|
exit 0 |
||||||
|
fi |
||||||
|
|
||||||
|
if [ "$1" == "start" ]; then |
||||||
|
log "Received start command, starting all scripts..." |
||||||
|
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; } |
||||||
|
source "$BASE_DIR/venv/bin/activate" |
||||||
|
if [ -f "$BASE_DIR/.env" ]; then |
||||||
|
while IFS='=' read -r key value; do |
||||||
|
if [[ ! -z "$key" && ! "$key" =~ ^# ]]; then |
||||||
|
export "$key=$value" |
||||||
|
fi |
||||||
|
done < <(grep -v '^#' "$BASE_DIR/.env") |
||||||
|
log ".env variables loaded" |
||||||
|
else |
||||||
|
log "Error: .env file not found" |
||||||
|
exit 1 |
||||||
|
fi |
||||||
|
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
||||||
|
if [ -f "$script" ]; then |
||||||
|
sleep_time=$(run_script "$script" | tail -n 1) |
||||||
|
if [ "$sleep_time" != "0" ]; then |
||||||
|
log "Sleeping for $sleep_time seconds after $script" |
||||||
|
sleep "$sleep_time" |
||||||
|
fi |
||||||
|
else |
||||||
|
log "Script $script not found" |
||||||
|
fi |
||||||
|
done |
||||||
|
if [ -f "foodie_engagement_generator.py" ]; then |
||||||
|
if ! check_running "foodie_engagement_generator"; then |
||||||
|
log "Running foodie_engagement_generator.py..." |
||||||
|
"$VENV_PYTHON" "foodie_engagement_generator.py" >> "$LOG_DIR/foodie_engagement_generator.log" 2>&1 |
||||||
|
log "foodie_engagement_generator.py completed" |
||||||
|
fi |
||||||
|
fi |
||||||
|
log "All scripts started. Ensure cron jobs are enabled (crontab -l)." |
||||||
|
exit 0 |
||||||
|
fi |
||||||
|
|
||||||
log "Checking for file changes..." |
log "Checking for file changes..." |
||||||
CURRENT_CHECKSUM=$(calculate_checksum) |
CURRENT_CHECKSUM=$(calculate_checksum) |
||||||
|
|
||||||
if [ -f "$CHECKSUM_FILE" ]; then |
if [ -f "$CHECKSUM_FILE" ]; then |
||||||
PREVIOUS_CHECKSUM=$(cat "$CHECKSUM_FILE") |
PREVIOUS_CHECKSUM=$(cat "$CHECKSUM_FILE") |
||||||
else |
else |
||||||
PREVIOUS_CHECKSUM="" |
PREVIOUS_CHECKSUM="" |
||||||
fi |
fi |
||||||
|
|
||||||
if [ "$CURRENT_CHECKSUM" != "$PREVIOUS_CHECKSUM" ]; then |
if [ "$CURRENT_CHECKSUM" != "$PREVIOUS_CHECKSUM" ]; then |
||||||
log "File changes detected. Previous checksum: $PREVIOUS_CHECKSUM, Current checksum: $CURRENT_CHECKSUM" |
log "File changes detected. Previous checksum: $PREVIOUS_CHECKSUM, Current checksum: $CURRENT_CHECKSUM" |
||||||
|
if pgrep -f "$VENV_PYTHON.*foodie_automator" > /dev/null; then |
||||||
# Stop scripts if running |
|
||||||
if check_running; then |
|
||||||
stop_scripts |
stop_scripts |
||||||
fi |
fi |
||||||
|
|
||||||
# Update dependencies |
|
||||||
update_dependencies |
update_dependencies |
||||||
|
|
||||||
# Start scripts |
|
||||||
start_scripts |
|
||||||
|
|
||||||
# Update checksum |
|
||||||
echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE" |
echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE" |
||||||
log "Checksum updated." |
log "Checksum updated." |
||||||
|
fi |
||||||
|
cd "$BASE_DIR" |
||||||
|
source "$BASE_DIR/venv/bin/activate" |
||||||
|
if [ -f "$BASE_DIR/.env" ]; then |
||||||
|
while IFS='=' read -r key value; do |
||||||
|
if [[ ! -z "$key" && ! "$key" =~ ^# ]]; then |
||||||
|
export "$key=$value" |
||||||
|
fi |
||||||
|
done < <(grep -v '^#' "$BASE_DIR/.env") |
||||||
|
log ".env variables loaded" |
||||||
else |
else |
||||||
log "No file changes detected." |
log "Error: .env file not found" |
||||||
|
exit 1 |
||||||
fi |
fi |
||||||
|
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
||||||
|
if [ -f "$script" ]; then |
||||||
|
sleep_time=$(run_script "$script" | tail -n 1) |
||||||
|
if [ "$sleep_time" != "0" ]; then |
||||||
|
log "Sleeping for $sleep_time seconds after $script" |
||||||
|
sleep "$sleep_time" |
||||||
|
fi |
||||||
|
else |
||||||
|
log "Script $script not found" |
||||||
|
fi |
||||||
|
done |
||||||
|
log "All scripts processed." |
||||||
|
exit 0 |
||||||
Loading…
Reference in new issue