Compare commits
No commits in common. 'main' and 'my-fix-branch' have entirely different histories.
main
...
my-fix-bra
13 changed files with 1479 additions and 3904 deletions
@ -1,129 +0,0 @@ |
|||||||
import logging |
|
||||||
|
|
||||||
logging.basicConfig( |
|
||||||
filename='/home/shane/foodie_automator/logs/check_x_capacity.log', |
|
||||||
level=logging.DEBUG, |
|
||||||
format='%(asctime)s - %(levelname)s - %(message)s' |
|
||||||
) |
|
||||||
|
|
||||||
import requests |
|
||||||
from requests_oauthlib import OAuth1 |
|
||||||
from datetime import datetime, timezone |
|
||||||
from dotenv import load_dotenv |
|
||||||
import os |
|
||||||
import time |
|
||||||
from foodie_config import X_API_CREDENTIALS |
|
||||||
|
|
||||||
# Load environment variables from .env file |
|
||||||
load_dotenv() |
|
||||||
|
|
||||||
# Function to delete a tweet |
|
||||||
def delete_tweet(tweet_id, auth): |
|
||||||
try: |
|
||||||
response = requests.delete(f"https://api.x.com/2/tweets/{tweet_id}", auth=auth) |
|
||||||
response.raise_for_status() |
|
||||||
logging.info(f"Successfully deleted tweet {tweet_id}") |
|
||||||
return True |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Failed to delete tweet {tweet_id}: {e}") |
|
||||||
return False |
|
||||||
|
|
||||||
# Function to check rate limits for a given author |
|
||||||
def check_rate_limits_for_author(username, credentials, retry=False): |
|
||||||
logging.info(f"{'Retrying' if retry else 'Checking'} rate limits for {username} (handle: {credentials['x_username']})") |
|
||||||
|
|
||||||
# Retrieve OAuth 1.0a credentials for the author |
|
||||||
consumer_key = credentials["api_key"] |
|
||||||
consumer_secret = credentials["api_secret"] |
|
||||||
access_token = credentials["access_token"] |
|
||||||
access_token_secret = credentials["access_token_secret"] |
|
||||||
|
|
||||||
# Validate credentials |
|
||||||
if not all([consumer_key, consumer_secret, access_token, access_token_secret]): |
|
||||||
logging.error(f"Missing OAuth credentials for {username} in X_API_CREDENTIALS.") |
|
||||||
return None |
|
||||||
|
|
||||||
# Set up OAuth 1.0a authentication |
|
||||||
auth = OAuth1(consumer_key, consumer_secret, access_token, access_token_secret) |
|
||||||
|
|
||||||
# Add delay to avoid IP-based rate limiting |
|
||||||
logging.info(f"Waiting 5 seconds before attempting to post for {username}") |
|
||||||
time.sleep(5) |
|
||||||
|
|
||||||
# Try posting a test tweet to get v2 rate limit headers |
|
||||||
tweet_id = None |
|
||||||
try: |
|
||||||
tweet_data = {"text": f"Test tweet to check rate limits for {username} - please ignore"} |
|
||||||
response = requests.post("https://api.x.com/2/tweets", json=tweet_data, auth=auth) |
|
||||||
response.raise_for_status() |
|
||||||
tweet_id = response.json()['data']['id'] |
|
||||||
logging.info("Successfully posted test tweet for %s: %s", username, response.json()) |
|
||||||
logging.info("Response Headers for %s: %s", username, response.headers) |
|
||||||
# Extract rate limit headers if present |
|
||||||
app_limit = response.headers.get('x-app-limit-24hour-limit', 'N/A') |
|
||||||
app_remaining = response.headers.get('x-app-limit-24hour-remaining', 'N/A') |
|
||||||
app_reset = response.headers.get('x-app-limit-24hour-reset', 'N/A') |
|
||||||
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit) |
|
||||||
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining) |
|
||||||
if app_reset != 'N/A': |
|
||||||
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
|
||||||
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
|
||||||
return tweet_id |
|
||||||
except requests.exceptions.HTTPError as e: |
|
||||||
logging.info("Test Tweet Response Status Code for %s: %s", username, e.response.status_code) |
|
||||||
logging.info("Test Tweet Response Headers for %s: %s", username, e.response.headers) |
|
||||||
if e.response.status_code == 429: |
|
||||||
logging.info("Rate Limit Exceeded for /2/tweets for %s", username) |
|
||||||
|
|
||||||
# Extract user-specific 24-hour limits |
|
||||||
user_limit = e.response.headers.get('x-user-limit-24hour-limit', 'N/A') |
|
||||||
user_remaining = e.response.headers.get('x-user-limit-24hour-remaining', 'N/A') |
|
||||||
user_reset = e.response.headers.get('x-user-limit-24hour-reset', 'N/A') |
|
||||||
logging.info("User 24-Hour Tweet Limit for %s: %s", username, user_limit) |
|
||||||
logging.info("User 24-Hour Tweets Remaining for %s: %s", username, user_remaining) |
|
||||||
logging.info("User 24-Hour Reset (Timestamp) for %s: %s", username, user_reset) |
|
||||||
if user_reset != 'N/A': |
|
||||||
reset_time = datetime.fromtimestamp(int(user_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
|
||||||
logging.info("User 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
|
||||||
|
|
||||||
# Extract app-specific 24-hour limits |
|
||||||
app_limit = e.response.headers.get('x-app-limit-24hour-limit', 'N/A') |
|
||||||
app_remaining = e.response.headers.get('x-app-limit-24hour-remaining', 'N/A') |
|
||||||
app_reset = e.response.headers.get('x-app-limit-24hour-reset', 'N/A') |
|
||||||
logging.info("App 24-Hour Tweet Limit for %s: %s", username, app_limit) |
|
||||||
logging.info("App 24-Hour Tweets Remaining for %s: %s", username, app_remaining) |
|
||||||
logging.info("App 24-Hour Reset (Timestamp) for %s: %s", username, app_reset) |
|
||||||
if app_reset != 'N/A': |
|
||||||
reset_time = datetime.fromtimestamp(int(app_reset), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') |
|
||||||
logging.info("App 24-Hour Reset (Readable) for %s: %s", username, reset_time) |
|
||||||
return None |
|
||||||
except Exception as e: |
|
||||||
logging.error("Failed to post test tweet for %s: %s", username, e) |
|
||||||
return None |
|
||||||
|
|
||||||
# Main loop to check rate limits for all authors |
|
||||||
if __name__ == "__main__": |
|
||||||
# First pass: Attempt to post for all authors |
|
||||||
successful_tweets = {} |
|
||||||
for username, credentials in X_API_CREDENTIALS.items(): |
|
||||||
tweet_id = check_rate_limits_for_author(username, credentials) |
|
||||||
if tweet_id: |
|
||||||
successful_tweets[username] = (tweet_id, credentials) |
|
||||||
logging.info("-" * 50) |
|
||||||
|
|
||||||
# Delete successful tweets to free up quota |
|
||||||
for username, (tweet_id, credentials) in successful_tweets.items(): |
|
||||||
auth = OAuth1( |
|
||||||
credentials["api_key"], |
|
||||||
credentials["api_secret"], |
|
||||||
credentials["access_token"], |
|
||||||
credentials["access_token_secret"] |
|
||||||
) |
|
||||||
delete_tweet(tweet_id, auth) |
|
||||||
|
|
||||||
# Second pass: Retry for authors that failed |
|
||||||
logging.info("Retrying for authors that initially failed...") |
|
||||||
for username, credentials in X_API_CREDENTIALS.items(): |
|
||||||
if username not in successful_tweets: |
|
||||||
check_rate_limits_for_author(username, credentials, retry=True) |
|
||||||
logging.info("-" * 50) |
|
||||||
@ -1,236 +0,0 @@ |
|||||||
# foodie_engagement_generator.py |
|
||||||
import json |
|
||||||
import logging |
|
||||||
import random |
|
||||||
import signal |
|
||||||
import sys |
|
||||||
import fcntl |
|
||||||
import os |
|
||||||
import time |
|
||||||
from datetime import datetime, timedelta, timezone |
|
||||||
from openai import OpenAI |
|
||||||
from foodie_utils import AUTHORS, SUMMARY_MODEL, load_json_file, save_json_file, update_system_activity |
|
||||||
from foodie_config import X_API_CREDENTIALS, AUTHOR_BACKGROUNDS_FILE |
|
||||||
from dotenv import load_dotenv |
|
||||||
|
|
||||||
load_dotenv() |
|
||||||
|
|
||||||
SCRIPT_NAME = "foodie_engagement_generator" |
|
||||||
LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_engagement_generator.lock" |
|
||||||
LOG_FILE = "/home/shane/foodie_automator/logs/foodie_engagement_generator.log" |
|
||||||
ENGAGEMENT_TWEETS_FILE = "/home/shane/foodie_automator/engagement_tweets.json" |
|
||||||
LOG_PRUNE_DAYS = 30 |
|
||||||
MAX_RETRIES = 3 |
|
||||||
RETRY_BACKOFF = 2 |
|
||||||
|
|
||||||
def setup_logging(): |
|
||||||
"""Initialize logging with pruning of old logs.""" |
|
||||||
try: |
|
||||||
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True) |
|
||||||
if os.path.exists(LOG_FILE): |
|
||||||
with open(LOG_FILE, 'r') as f: |
|
||||||
lines = f.readlines() |
|
||||||
cutoff = datetime.now(timezone.utc) - timedelta(days=LOG_PRUNE_DAYS) |
|
||||||
pruned_lines = [] |
|
||||||
malformed_count = 0 |
|
||||||
for line in lines: |
|
||||||
if len(line) < 19 or not line[:19].replace('-', '').replace(':', '').replace(' ', '').isdigit(): |
|
||||||
malformed_count += 1 |
|
||||||
continue |
|
||||||
try: |
|
||||||
timestamp = datetime.strptime(line[:19], '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) |
|
||||||
if timestamp > cutoff: |
|
||||||
pruned_lines.append(line) |
|
||||||
except ValueError: |
|
||||||
malformed_count += 1 |
|
||||||
continue |
|
||||||
if malformed_count > 0: |
|
||||||
logging.info(f"Skipped {malformed_count} malformed log lines during pruning") |
|
||||||
with open(LOG_FILE, 'w') as f: |
|
||||||
f.writelines(pruned_lines) |
|
||||||
|
|
||||||
logging.basicConfig( |
|
||||||
filename=LOG_FILE, |
|
||||||
level=logging.INFO, |
|
||||||
format='%(asctime)s - %(levelname)s - %(message)s', |
|
||||||
datefmt='%Y-%m-%d %H:%M:%S' |
|
||||||
) |
|
||||||
console_handler = logging.StreamHandler() |
|
||||||
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) |
|
||||||
logging.getLogger().addHandler(console_handler) |
|
||||||
logging.getLogger("openai").setLevel(logging.WARNING) |
|
||||||
logging.info("Logging initialized for foodie_engagement_generator.py") |
|
||||||
except Exception as e: |
|
||||||
print(f"Failed to setup logging: {e}") |
|
||||||
sys.exit(1) |
|
||||||
|
|
||||||
def acquire_lock(): |
|
||||||
"""Acquire a lock to prevent concurrent runs.""" |
|
||||||
os.makedirs(os.path.dirname(LOCK_FILE), exist_ok=True) |
|
||||||
lock_fd = open(LOCK_FILE, 'w') |
|
||||||
try: |
|
||||||
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) |
|
||||||
lock_fd.write(str(os.getpid())) |
|
||||||
lock_fd.flush() |
|
||||||
return lock_fd |
|
||||||
except IOError: |
|
||||||
logging.info("Another instance of foodie_engagement_generator.py is running") |
|
||||||
sys.exit(0) |
|
||||||
|
|
||||||
def signal_handler(sig, frame): |
|
||||||
"""Handle termination signals gracefully.""" |
|
||||||
logging.info("Received termination signal, marking script as stopped...") |
|
||||||
update_system_activity(SCRIPT_NAME, "stopped") |
|
||||||
sys.exit(0) |
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, signal_handler) |
|
||||||
signal.signal(signal.SIGINT, signal_handler) |
|
||||||
|
|
||||||
# Initialize OpenAI client |
|
||||||
try: |
|
||||||
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
|
||||||
if not os.getenv("OPENAI_API_KEY"): |
|
||||||
logging.error("OPENAI_API_KEY is not set in environment variables") |
|
||||||
raise ValueError("OPENAI_API_KEY is required") |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Failed to initialize OpenAI client: {e}", exc_info=True) |
|
||||||
sys.exit(1) |
|
||||||
|
|
||||||
# Load author backgrounds |
|
||||||
try: |
|
||||||
with open(AUTHOR_BACKGROUNDS_FILE, 'r') as f: |
|
||||||
AUTHOR_BACKGROUNDS = json.load(f) |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Failed to load author_backgrounds.json: {e}", exc_info=True) |
|
||||||
sys.exit(1) |
|
||||||
|
|
||||||
def generate_engagement_tweet(author): |
|
||||||
"""Generate an engagement tweet using author background themes.""" |
|
||||||
credentials = X_API_CREDENTIALS.get(author["username"]) |
|
||||||
if not credentials: |
|
||||||
logging.error(f"No X credentials found for {author['username']}") |
|
||||||
return None |
|
||||||
author_handle = credentials["x_username"] |
|
||||||
|
|
||||||
background = next((bg for bg in AUTHOR_BACKGROUNDS if bg["username"] == author["username"]), {}) |
|
||||||
if not background or "engagement_themes" not in background: |
|
||||||
logging.warning(f"No background or engagement themes found for {author['username']}") |
|
||||||
theme = "food trends" |
|
||||||
else: |
|
||||||
theme = random.choice(background["engagement_themes"]) |
|
||||||
|
|
||||||
prompt = ( |
|
||||||
f"Generate a concise tweet (under 230 characters) for {author_handle}. " |
|
||||||
f"Create an engaging question or statement about {theme} to spark interaction. " |
|
||||||
f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. " |
|
||||||
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " |
|
||||||
f"Do not include emojis, hashtags, or reward-driven incentives (e.g., giveaways)." |
|
||||||
) |
|
||||||
|
|
||||||
for attempt in range(MAX_RETRIES): |
|
||||||
try: |
|
||||||
response = client.chat.completions.create( |
|
||||||
model=SUMMARY_MODEL, |
|
||||||
messages=[ |
|
||||||
{"role": "system", "content": "You are a social media expert crafting engaging tweets."}, |
|
||||||
{"role": "user", "content": prompt} |
|
||||||
], |
|
||||||
max_tokens=100, |
|
||||||
temperature=0.7 |
|
||||||
) |
|
||||||
tweet = response.choices[0].message.content.strip() |
|
||||||
if len(tweet) > 280: |
|
||||||
tweet = tweet[:277] + "..." |
|
||||||
logging.debug(f"Generated engagement tweet: {tweet}") |
|
||||||
return tweet |
|
||||||
except Exception as e: |
|
||||||
logging.warning(f"Failed to generate engagement tweet for {author['username']} (attempt {attempt + 1}): {e}") |
|
||||||
if attempt < MAX_RETRIES - 1: |
|
||||||
time.sleep(RETRY_BACKOFF * (2 ** attempt)) |
|
||||||
else: |
|
||||||
logging.error(f"Failed to generate engagement tweet after {MAX_RETRIES} attempts") |
|
||||||
engagement_templates = [ |
|
||||||
f"What's the most mouthwatering {theme} you've seen this week? Share below and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
|
||||||
f"{theme.capitalize()} lovers unite! What's your go-to pick? Tell us and like this tweet for more from {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com", |
|
||||||
f"Ever tried a {theme} that blew your mind? Share your favorites and follow {author_handle} for more on InsiderFoodie.com! Link: https://insiderfoodie.com", |
|
||||||
f"What {theme} trend are you loving right now? Let us know and like this tweet to keep up with {author_handle} on InsiderFoodie.com! Link: https://insiderfoodie.com" |
|
||||||
] |
|
||||||
template = random.choice(engagement_templates) |
|
||||||
logging.info(f"Using fallback engagement tweet: {template}") |
|
||||||
return template |
|
||||||
|
|
||||||
def generate_engagement_tweets(): |
|
||||||
"""Generate engagement tweets for authors and save to file.""" |
|
||||||
try: |
|
||||||
logging.info("Starting foodie_engagement_generator.py") |
|
||||||
tweets = [] |
|
||||||
timestamp = datetime.now(timezone.utc).isoformat() |
|
||||||
|
|
||||||
for author in AUTHORS: |
|
||||||
try: |
|
||||||
tweet = generate_engagement_tweet(author) |
|
||||||
if not tweet: |
|
||||||
logging.error(f"Failed to generate engagement tweet for {author['username']}, skipping") |
|
||||||
continue |
|
||||||
|
|
||||||
# Collect tweet data |
|
||||||
tweet_data = { |
|
||||||
"username": author["username"], |
|
||||||
"x_handle": X_API_CREDENTIALS[author["username"]]["x_username"], |
|
||||||
"tweet": tweet, |
|
||||||
"timestamp": timestamp |
|
||||||
} |
|
||||||
tweets.append(tweet_data) |
|
||||||
logging.info(f"Generated engagement tweet for {author['username']}: {tweet}") |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Error generating engagement tweet for {author['username']}: {e}", exc_info=True) |
|
||||||
continue |
|
||||||
|
|
||||||
# Save tweets to file, overwriting any existing content |
|
||||||
if tweets: |
|
||||||
try: |
|
||||||
tweet_data = { |
|
||||||
"timestamp": timestamp, |
|
||||||
"tweets": tweets |
|
||||||
} |
|
||||||
save_json_file(ENGAGEMENT_TWEETS_FILE, tweet_data) |
|
||||||
logging.info(f"Saved {len(tweets)} engagement tweets to {ENGAGEMENT_TWEETS_FILE}") |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Failed to save engagement tweets to {ENGAGEMENT_TWEETS_FILE}: {e}") |
|
||||||
else: |
|
||||||
logging.warning("No engagement tweets generated, nothing to save") |
|
||||||
|
|
||||||
logging.info("Completed foodie_engagement_generator.py") |
|
||||||
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
|
||||||
return True, sleep_time |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Unexpected error in generate_engagement_tweets: {e}", exc_info=True) |
|
||||||
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
|
||||||
return False, sleep_time |
|
||||||
|
|
||||||
def main(): |
|
||||||
"""Main function to run the script.""" |
|
||||||
lock_fd = None |
|
||||||
try: |
|
||||||
lock_fd = acquire_lock() |
|
||||||
setup_logging() |
|
||||||
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start |
|
||||||
success, sleep_time = generate_engagement_tweets() |
|
||||||
update_system_activity(SCRIPT_NAME, "stopped") # Record stop |
|
||||||
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
|
||||||
return success, sleep_time |
|
||||||
except Exception as e: |
|
||||||
logging.error(f"Fatal error in main: {e}", exc_info=True) |
|
||||||
print(f"Fatal error: {e}") |
|
||||||
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error |
|
||||||
sleep_time = random.randint(82800, 86400) # ~23–24 hours |
|
||||||
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
|
||||||
return False, sleep_time |
|
||||||
finally: |
|
||||||
if lock_fd: |
|
||||||
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
|
||||||
lock_fd.close() |
|
||||||
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None |
|
||||||
|
|
||||||
if __name__ == "__main__": |
|
||||||
success, sleep_time = main() |
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,195 +1,95 @@ |
|||||||
#!/bin/bash |
#!/bin/bash |
||||||
|
|
||||||
|
# Directory to monitor |
||||||
BASE_DIR="/home/shane/foodie_automator" |
BASE_DIR="/home/shane/foodie_automator" |
||||||
LOG_DIR="$BASE_DIR/logs" |
|
||||||
LOCK_DIR="$BASE_DIR/locks" |
|
||||||
LOG_FILE="$LOG_DIR/manage_scripts.log" |
|
||||||
VENV_PYTHON="$BASE_DIR/venv/bin/python" |
|
||||||
CHECKSUM_FILE="$BASE_DIR/.file_checksum" |
CHECKSUM_FILE="$BASE_DIR/.file_checksum" |
||||||
|
LOG_FILE="$BASE_DIR/manage_scripts.log" |
||||||
|
|
||||||
mkdir -p "$LOG_DIR" "$LOCK_DIR" || { echo "Error: Failed to create directories"; exit 1; } |
# Log function |
||||||
|
|
||||||
log() { |
log() { |
||||||
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE" |
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE" |
||||||
echo "$1" |
|
||||||
} |
} |
||||||
|
|
||||||
|
# Calculate checksum of files (excluding logs, JSON files, and venv) |
||||||
calculate_checksum() { |
calculate_checksum() { |
||||||
find "$BASE_DIR" -type f \ |
find "$BASE_DIR" -type f \ |
||||||
-not -path "$BASE_DIR/logs/*" \ |
-not -path "$BASE_DIR/*.log" \ |
||||||
-not -path "$BASE_DIR/*.json" \ |
-not -path "$BASE_DIR/*.json" \ |
||||||
-not -path "$BASE_DIR/.file_checksum" \ |
-not -path "$BASE_DIR/.file_checksum" \ |
||||||
-not -path "$BASE_DIR/venv/*" \ |
-not -path "$BASE_DIR/venv/*" \ |
||||||
-not -path "$BASE_DIR/locks/*" \ |
|
||||||
-exec sha256sum {} \; | sort | sha256sum | awk '{print $1}' |
-exec sha256sum {} \; | sort | sha256sum | awk '{print $1}' |
||||||
} |
} |
||||||
|
|
||||||
|
# Check if scripts are running |
||||||
check_running() { |
check_running() { |
||||||
local script_name="$1" |
pgrep -f "python3.*foodie_automator" > /dev/null |
||||||
local lock_file="$LOCK_DIR/${script_name}.lock" |
|
||||||
if [ -f "$lock_file" ]; then |
|
||||||
local pid=$(cat "$lock_file") |
|
||||||
if ps -p "$pid" > /dev/null; then |
|
||||||
log "$script_name is already running (PID: $pid)" |
|
||||||
return 0 |
|
||||||
else |
|
||||||
log "Stale lock file for $script_name, removing" |
|
||||||
rm -f "$lock_file" |
|
||||||
fi |
|
||||||
fi |
|
||||||
return 1 |
|
||||||
} |
|
||||||
|
|
||||||
run_script() { |
|
||||||
local script="$1" |
|
||||||
local script_name="${script%.py}" |
|
||||||
local script_log="$LOG_DIR/${script_name}.log" |
|
||||||
if check_running "$script_name"; then |
|
||||||
echo "0" # Skip sleep |
|
||||||
return 1 |
|
||||||
fi |
|
||||||
log "Running $script..." |
|
||||||
"$VENV_PYTHON" "$BASE_DIR/$script" >> "$script_log" 2>&1 & |
|
||||||
local pid=$! |
|
||||||
echo "$pid" > "$LOCK_DIR/${script_name}.lock" |
|
||||||
wait "$pid" |
|
||||||
local exit_code=$? |
|
||||||
if [ $exit_code -eq 0 ]; then |
|
||||||
log "$script completed successfully" |
|
||||||
else |
|
||||||
log "$script failed with exit code $exit_code" |
|
||||||
fi |
|
||||||
sleep_time=$(grep "sleep_time:" "$script_log" | tail -n 1 | grep -oP 'sleep_time: \K[0-9]+' || echo $((RANDOM % 601 + 1200))) |
|
||||||
log "$script completed, sleep_time: $sleep_time seconds" |
|
||||||
rm -f "$LOCK_DIR/${script_name}.lock" |
|
||||||
echo "$sleep_time" |
|
||||||
} |
} |
||||||
|
|
||||||
|
# Stop scripts |
||||||
stop_scripts() { |
stop_scripts() { |
||||||
log "Stopping scripts..." |
log "Stopping scripts..." |
||||||
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
pkill -TERM -f "python3.*foodie_automator" || true |
||||||
|
sleep 10 |
||||||
|
pkill -9 -f "python3.*foodie_automator" || true |
||||||
|
log "Scripts stopped." |
||||||
|
} |
||||||
|
|
||||||
|
# Start scripts |
||||||
|
start_scripts() { |
||||||
|
log "Starting scripts..." |
||||||
|
cd "$BASE_DIR" |
||||||
|
source venv/bin/activate |
||||||
|
# Find all foodie_automator_*.py scripts and start them |
||||||
|
for script in foodie_automator_*.py; do |
||||||
if [ -f "$script" ]; then |
if [ -f "$script" ]; then |
||||||
local script_name="${script%.py}" |
log "Starting $script..." |
||||||
if pkill -TERM -f "$VENV_PYTHON.*$script_name"; then |
nohup python3 "$script" >> "${script%.py}.log" 2>&1 & |
||||||
log "Sent TERM to $script_name" |
|
||||||
sleep 2 |
|
||||||
pkill -9 -f "$VENV_PYTHON.*$script_name" || true |
|
||||||
else |
|
||||||
log "No running $script_name found" |
|
||||||
fi |
|
||||||
rm -f "$LOCK_DIR/${script_name}.lock" |
|
||||||
log "Removed lock file for $script_name" |
|
||||||
fi |
fi |
||||||
done |
done |
||||||
log "Scripts stopped." |
log "All scripts started." |
||||||
} |
} |
||||||
|
|
||||||
|
# Update dependencies |
||||||
update_dependencies() { |
update_dependencies() { |
||||||
log "Updating dependencies..." |
log "Updating dependencies..." |
||||||
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; } |
cd "$BASE_DIR" |
||||||
|
# Create venv if it doesn't exist |
||||||
if [ ! -d "venv" ]; then |
if [ ! -d "venv" ]; then |
||||||
python3 -m venv venv |
python3 -m venv venv |
||||||
log "Created new virtual environment" |
|
||||||
fi |
fi |
||||||
source "$BASE_DIR/venv/bin/activate" |
source venv/bin/activate |
||||||
|
pip install --upgrade pip |
||||||
|
pip install -r requirements.txt || (pip install requests openai beautifulsoup4 feedparser praw duckduckgo_search selenium Pillow pytesseract webdriver-manager && log "Fallback: Installed core dependencies") |
||||||
log "Dependencies updated." |
log "Dependencies updated." |
||||||
} |
} |
||||||
|
|
||||||
if [ "$1" == "stop" ]; then |
# Main logic |
||||||
log "Received stop command, stopping all scripts..." |
|
||||||
stop_scripts |
|
||||||
for script in foodie_engagement_generator.py foodie_weekly_thread.py; do |
|
||||||
local script_name="${script%.py}" |
|
||||||
if pkill -TERM -f "$VENV_PYTHON.*$script_name"; then |
|
||||||
log "Sent TERM to $script_name" |
|
||||||
sleep 2 |
|
||||||
pkill -9 -f "$VENV_PYTHON.*$script_name" || true |
|
||||||
else |
|
||||||
log "No running $script_name found" |
|
||||||
fi |
|
||||||
rm -f "$LOCK_DIR/$script_name.lock" |
|
||||||
log "Stopped $script_name" |
|
||||||
done |
|
||||||
log "All scripts stopped. Reminder: Disable cron jobs (crontab -e)." |
|
||||||
exit 0 |
|
||||||
fi |
|
||||||
|
|
||||||
if [ "$1" == "start" ]; then |
|
||||||
log "Received start command, starting all scripts..." |
|
||||||
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; } |
|
||||||
source "$BASE_DIR/venv/bin/activate" |
|
||||||
if [ -f "$BASE_DIR/.env" ]; then |
|
||||||
while IFS='=' read -r key value; do |
|
||||||
if [[ ! -z "$key" && ! "$key" =~ ^# ]]; then |
|
||||||
export "$key=$value" |
|
||||||
fi |
|
||||||
done < <(grep -v '^#' "$BASE_DIR/.env") |
|
||||||
log ".env variables loaded" |
|
||||||
else |
|
||||||
log "Error: .env file not found" |
|
||||||
exit 1 |
|
||||||
fi |
|
||||||
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
|
||||||
if [ -f "$script" ]; then |
|
||||||
sleep_time=$(run_script "$script" | tail -n 1) |
|
||||||
if [ "$sleep_time" != "0" ]; then |
|
||||||
log "Sleeping for $sleep_time seconds after $script" |
|
||||||
sleep "$sleep_time" |
|
||||||
fi |
|
||||||
else |
|
||||||
log "Script $script not found" |
|
||||||
fi |
|
||||||
done |
|
||||||
if [ -f "foodie_engagement_generator.py" ]; then |
|
||||||
if ! check_running "foodie_engagement_generator"; then |
|
||||||
log "Running foodie_engagement_generator.py..." |
|
||||||
"$VENV_PYTHON" "foodie_engagement_generator.py" >> "$LOG_DIR/foodie_engagement_generator.log" 2>&1 |
|
||||||
log "foodie_engagement_generator.py completed" |
|
||||||
fi |
|
||||||
fi |
|
||||||
log "All scripts started. Ensure cron jobs are enabled (crontab -l)." |
|
||||||
exit 0 |
|
||||||
fi |
|
||||||
|
|
||||||
log "Checking for file changes..." |
log "Checking for file changes..." |
||||||
CURRENT_CHECKSUM=$(calculate_checksum) |
CURRENT_CHECKSUM=$(calculate_checksum) |
||||||
|
|
||||||
if [ -f "$CHECKSUM_FILE" ]; then |
if [ -f "$CHECKSUM_FILE" ]; then |
||||||
PREVIOUS_CHECKSUM=$(cat "$CHECKSUM_FILE") |
PREVIOUS_CHECKSUM=$(cat "$CHECKSUM_FILE") |
||||||
else |
else |
||||||
PREVIOUS_CHECKSUM="" |
PREVIOUS_CHECKSUM="" |
||||||
fi |
fi |
||||||
|
|
||||||
if [ "$CURRENT_CHECKSUM" != "$PREVIOUS_CHECKSUM" ]; then |
if [ "$CURRENT_CHECKSUM" != "$PREVIOUS_CHECKSUM" ]; then |
||||||
log "File changes detected. Previous checksum: $PREVIOUS_CHECKSUM, Current checksum: $CURRENT_CHECKSUM" |
log "File changes detected. Previous checksum: $PREVIOUS_CHECKSUM, Current checksum: $CURRENT_CHECKSUM" |
||||||
if pgrep -f "$VENV_PYTHON.*foodie_automator" > /dev/null; then |
|
||||||
|
# Stop scripts if running |
||||||
|
if check_running; then |
||||||
stop_scripts |
stop_scripts |
||||||
fi |
fi |
||||||
|
|
||||||
|
# Update dependencies |
||||||
update_dependencies |
update_dependencies |
||||||
|
|
||||||
|
# Start scripts |
||||||
|
start_scripts |
||||||
|
|
||||||
|
# Update checksum |
||||||
echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE" |
echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE" |
||||||
log "Checksum updated." |
log "Checksum updated." |
||||||
fi |
|
||||||
cd "$BASE_DIR" |
|
||||||
source "$BASE_DIR/venv/bin/activate" |
|
||||||
if [ -f "$BASE_DIR/.env" ]; then |
|
||||||
while IFS='=' read -r key value; do |
|
||||||
if [[ ! -z "$key" && ! "$key" =~ ^# ]]; then |
|
||||||
export "$key=$value" |
|
||||||
fi |
|
||||||
done < <(grep -v '^#' "$BASE_DIR/.env") |
|
||||||
log ".env variables loaded" |
|
||||||
else |
else |
||||||
log "Error: .env file not found" |
log "No file changes detected." |
||||||
exit 1 |
|
||||||
fi |
fi |
||||||
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do |
|
||||||
if [ -f "$script" ]; then |
|
||||||
sleep_time=$(run_script "$script" | tail -n 1) |
|
||||||
if [ "$sleep_time" != "0" ]; then |
|
||||||
log "Sleeping for $sleep_time seconds after $script" |
|
||||||
sleep "$sleep_time" |
|
||||||
fi |
|
||||||
else |
|
||||||
log "Script $script not found" |
|
||||||
fi |
|
||||||
done |
|
||||||
log "All scripts processed." |
|
||||||
exit 0 |
|
||||||
Loading…
Reference in new issue