Shane 7 months ago
parent e972714ada
commit 8c7049fa4c
  1. 79
      foodie_automator_google.py
  2. 39
      foodie_automator_reddit.py
  3. 29
      foodie_automator_rss.py
  4. 20
      foodie_engagement_tweet.py
  5. 115
      manage_scripts.sh

@ -37,7 +37,7 @@ import fcntl
load_dotenv() load_dotenv()
# Define constants at the top # Define constants at the top
SCRIPT_NAME = "foodie_automator_google" # Added SCRIPT_NAME SCRIPT_NAME = "foodie_automator_google"
POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json' POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json'
USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json' USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json'
EXPIRATION_HOURS = 24 EXPIRATION_HOURS = 24
@ -54,7 +54,7 @@ used_images = set(entry["title"] for entry in used_images_data if "title" in ent
def signal_handler(sig, frame): def signal_handler(sig, frame):
logging.info("Received termination signal, marking script as stopped...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped update_system_activity(SCRIPT_NAME, "stopped")
if is_posting: if is_posting:
logging.info("Currently posting, will exit after completion.") logging.info("Currently posting, will exit after completion.")
else: else:
@ -228,6 +228,7 @@ def fetch_duckduckgo_news_context(trend_title, hours=24):
for r in results: for r in results:
try: try:
date_str = r["date"] date_str = r["date"]
# Handle both ISO formats with and without timezone
if '+00:00' in date_str: if '+00:00' in date_str:
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S+00:00").replace(tzinfo=timezone.utc) dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S+00:00").replace(tzinfo=timezone.utc)
else: else:
@ -276,7 +277,8 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat
if not unique_trends: if not unique_trends:
logging.info("No Google Trends data available across regions") logging.info("No Google Trends data available across regions")
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
# Sort trends by search volume in descending order # Sort trends by search volume in descending order
unique_trends.sort(key=lambda x: x["search_volume"], reverse=True) unique_trends.sort(key=lambda x: x["search_volume"], reverse=True)
@ -444,62 +446,16 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat
logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}") logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}")
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Google Trends *****") logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Google Trends *****")
return post_data, category, True sleep_time = random.randint(1200, 1800) # 20–30 minutes
return post_data, category, sleep_time
logging.info("No interesting Google Trend found after attempts") logging.info("No interesting Google Trend found after attempts")
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Unexpected error in curate_from_google_trends: {e}", exc_info=True) logging.error(f"Unexpected error in curate_from_google_trends: {e}", exc_info=True)
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
# System Activity Tracking
def update_system_activity(script_name, status, pid=None):
"""Update the system activity JSON file with the script's status."""
activity_file = "/home/shane/foodie_automator/system_activity.json"
activity_data = []
# Load existing data
if os.path.exists(activity_file):
try:
with open(activity_file, 'r') as f:
activity_data = json.load(f)
except json.JSONDecodeError:
logging.error("Corrupted system_activity.json, resetting to empty list")
# Find or create entry for the script
script_entry = next((entry for entry in activity_data if entry["script_name"] == script_name), None)
if not script_entry:
script_entry = {
"script_name": script_name,
"pid": None,
"start_time": None,
"stop_time": None,
"status": "stopped"
}
activity_data.append(script_entry)
# Update the entry
if status == "running":
script_entry.update({
"pid": pid,
"start_time": datetime.now(timezone.utc).isoformat(),
"stop_time": None,
"status": "running"
})
elif status == "stopped":
script_entry.update({
"pid": None,
"stop_time": datetime.now(timezone.utc).isoformat(),
"status": "stopped"
})
# Save updated data
try:
with open(activity_file, 'w') as f:
json.dump(activity_data, f, indent=2)
logging.info(f"Updated system activity: {script_name} is {status}")
except Exception as e:
logging.error(f"Failed to update system_activity.json: {e}")
def run_google_trends_automator(): def run_google_trends_automator():
lock_fd = None lock_fd = None
@ -512,16 +468,19 @@ def run_google_trends_automator():
posted_titles = set(entry["title"] for entry in posted_titles_data) posted_titles = set(entry["title"] for entry in posted_titles_data)
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
post_data, category, should_continue = curate_from_google_trends(posted_titles_data, posted_titles, used_images_data, used_images) post_data, category, sleep_time = curate_from_google_trends(posted_titles_data, posted_titles, used_images_data, used_images)
if not post_data: if not post_data:
logging.info("No postable Google Trend found") logging.info("No postable Google Trend found")
logging.info("Completed Google Trends run") logging.info("Completed Google Trends run")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop update_system_activity(SCRIPT_NAME, "stopped") # Record stop
return post_data, category, should_continue logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return post_data, category, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return None, None, sleep_time
finally: finally:
if lock_fd: if lock_fd:
fcntl.flock(lock_fd, fcntl.LOCK_UN) fcntl.flock(lock_fd, fcntl.LOCK_UN)
@ -530,5 +489,5 @@ def run_google_trends_automator():
if __name__ == "__main__": if __name__ == "__main__":
setup_logging() setup_logging()
post_data, category, should_continue = run_google_trends_automator() post_data, category, sleep_time = run_google_trends_automator()
logging.info(f"Run completed, should_continue: {should_continue}") logging.info(f"Run completed, sleep_time: {sleep_time} seconds")

@ -42,7 +42,7 @@ LOCK_FILE = "/home/shane/foodie_automator/locks/foodie_automator_reddit.lock"
def signal_handler(sig, frame): def signal_handler(sig, frame):
logging.info("Received termination signal, marking script as stopped...") logging.info("Received termination signal, marking script as stopped...")
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped update_system_activity(SCRIPT_NAME, "stopped")
if is_posting: if is_posting:
logging.info("Currently posting, will exit after completion.") logging.info("Currently posting, will exit after completion.")
else: else:
@ -242,7 +242,17 @@ def fetch_reddit_posts():
client_secret=REDDIT_CLIENT_SECRET, client_secret=REDDIT_CLIENT_SECRET,
user_agent=REDDIT_USER_AGENT user_agent=REDDIT_USER_AGENT
) )
feeds = ['FoodPorn', 'restaurant', 'FoodIndustry', 'food'] feeds = [
"food",
"FoodPorn",
"spicy"
"spicy",
"KoreanFood",
"JapaneseFood",
"DessertPorn",
"ChineseFood",
"IndianFood"
]
articles = [] articles = []
cutoff_date = datetime.now(timezone.utc) - timedelta(hours=EXPIRATION_HOURS) cutoff_date = datetime.now(timezone.utc) - timedelta(hours=EXPIRATION_HOURS)
@ -298,7 +308,8 @@ def curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used
posts = fetch_reddit_posts() posts = fetch_reddit_posts()
if not posts: if not posts:
logging.info("No Reddit posts available") logging.info("No Reddit posts available")
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
attempts = 0 attempts = 0
max_attempts = 10 max_attempts = 10
@ -466,13 +477,16 @@ def curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used
logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}") logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}")
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Reddit *****") logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Reddit *****")
return post_data, category, True sleep_time = random.randint(1200, 1800) # 20–30 minutes
return post_data, category, sleep_time
logging.info("No interesting Reddit post found after attempts") logging.info("No interesting Reddit post found after attempts")
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Unexpected error in curate_from_reddit: {e}", exc_info=True) logging.error(f"Unexpected error in curate_from_reddit: {e}", exc_info=True)
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
def run_reddit_automator(): def run_reddit_automator():
lock_fd = None lock_fd = None
@ -485,16 +499,19 @@ def run_reddit_automator():
posted_titles = set(entry["title"] for entry in posted_titles_data) posted_titles = set(entry["title"] for entry in posted_titles_data)
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
post_data, category, should_continue = curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used_images) post_data, category, sleep_time = curate_from_reddit(posted_titles_data, posted_titles, used_images_data, used_images)
if not post_data: if not post_data:
logging.info("No postable Reddit article found") logging.info("No postable Reddit article found")
logging.info("Completed Reddit run") logging.info("Completed Reddit run")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop update_system_activity(SCRIPT_NAME, "stopped") # Record stop
return post_data, category, should_continue logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return post_data, category, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_reddit_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_reddit_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, False sleep_time = random.randint(1200, 1800) # 20–30 minutes
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return None, None, sleep_time
finally: finally:
if lock_fd: if lock_fd:
fcntl.flock(lock_fd, fcntl.LOCK_UN) fcntl.flock(lock_fd, fcntl.LOCK_UN)
@ -503,5 +520,5 @@ def run_reddit_automator():
if __name__ == "__main__": if __name__ == "__main__":
setup_logging() setup_logging()
post_data, category, should_continue = run_reddit_automator() post_data, category, sleep_time = run_reddit_automator()
logging.info(f"Run completed, should_continue: {should_continue}") logging.info(f"Run completed, sleep_time: {sleep_time} seconds")

@ -261,7 +261,8 @@ def curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_im
articles = fetch_rss_feeds() articles = fetch_rss_feeds()
if not articles: if not articles:
logging.info("No RSS articles available") logging.info("No RSS articles available")
return None, None, random.randint(600, 1800) sleep_time = random.randint(1200, 1800) # 20–30 minutes
return None, None, sleep_time
attempts = 0 attempts = 0
max_attempts = 10 max_attempts = 10
@ -419,8 +420,7 @@ def curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_im
logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}") logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}")
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from RSS *****") logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from RSS *****")
# Sleep for 20 to 30 minutes (1200 to 1800 seconds) sleep_time = random.randint(1200, 1800) # 20–30 minutes
sleep_time = random.randint(1200, 1800)
return post_data, category, sleep_time return post_data, category, sleep_time
except Exception as e: except Exception as e:
@ -439,13 +439,11 @@ def curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_im
is_posting = False is_posting = False
logging.info("No interesting RSS article found after attempts") logging.info("No interesting RSS article found after attempts")
# Sleep for 20 to 30 minutes (1200 to 1800 seconds) sleep_time = random.randint(1200, 1800) # 20–30 minutes
sleep_time = random.randint(1200, 1800)
return None, None, sleep_time return None, None, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Unexpected error in curate_from_rss: {e}", exc_info=True) logging.error(f"Unexpected error in curate_from_rss: {e}", exc_info=True)
# Sleep for 20 to 30 minutes (1200 to 1800 seconds) sleep_time = random.randint(1200, 1800) # 20–30 minutes
sleep_time = random.randint(1200, 1800)
return None, None, sleep_time return None, None, sleep_time
def run_rss_automator(): def run_rss_automator():
@ -454,13 +452,23 @@ def run_rss_automator():
lock_fd = acquire_lock() lock_fd = acquire_lock()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
logging.info("***** RSS Automator Launched *****") logging.info("***** RSS Automator Launched *****")
# ... (rest of the function) ... posted_titles_data = load_json_file(POSTED_TITLES_FILE, EXPIRATION_HOURS)
posted_titles = set(entry["title"] for entry in posted_titles_data)
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS)
used_images = set(entry["title"] for entry in used_images_data if "title" in entry)
post_data, category, sleep_time = curate_from_rss(posted_titles_data, posted_titles, used_images_data, used_images)
if not post_data:
logging.info("No postable RSS article found")
logging.info("Completed RSS run")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop update_system_activity(SCRIPT_NAME, "stopped") # Record stop
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return post_data, category, sleep_time return post_data, category, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True) logging.error(f"Fatal error in run_rss_automator: {e}", exc_info=True)
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
return None, None, random.randint(600, 1800) sleep_time = random.randint(1200, 1800) # Fixed to 20–30 minutes
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return None, None, sleep_time
finally: finally:
if lock_fd: if lock_fd:
fcntl.flock(lock_fd, fcntl.LOCK_UN) fcntl.flock(lock_fd, fcntl.LOCK_UN)
@ -468,4 +476,5 @@ def run_rss_automator():
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None
if __name__ == "__main__": if __name__ == "__main__":
run_rss_automator() post_data, category, sleep_time = run_rss_automator()
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")

@ -6,6 +6,7 @@ import signal
import sys import sys
import fcntl import fcntl
import os import os
import time
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from openai import OpenAI from openai import OpenAI
from foodie_utils import post_tweet, AUTHORS, SUMMARY_MODEL, check_author_rate_limit, load_json_file, update_system_activity from foodie_utils import post_tweet, AUTHORS, SUMMARY_MODEL, check_author_rate_limit, load_json_file, update_system_activity
@ -118,7 +119,7 @@ def generate_engagement_tweet(author):
theme = random.choice(background["engagement_themes"]) theme = random.choice(background["engagement_themes"])
prompt = ( prompt = (
f"Generate a concise tweet (under 280 characters) for {author_handle}. " f"Generate a concise tweet (under 230 characters) for {author_handle}. "
f"Create an engaging question or statement about {theme} to spark interaction. " f"Create an engaging question or statement about {theme} to spark interaction. "
f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. " f"Include a call to action to follow {author_handle} or like the tweet, and mention InsiderFoodie.com with a link to https://insiderfoodie.com. "
f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. " f"Avoid using the word 'elevate'—use more humanized language like 'level up' or 'bring to life'. "
@ -161,6 +162,7 @@ def post_engagement_tweet():
"""Post engagement tweets for authors daily.""" """Post engagement tweets for authors daily."""
try: try:
logging.info("Starting foodie_engagement_tweet.py") logging.info("Starting foodie_engagement_tweet.py")
posted = False
for author in AUTHORS: for author in AUTHORS:
# Check if the author can post before generating the tweet # Check if the author can post before generating the tweet
@ -179,6 +181,7 @@ def post_engagement_tweet():
logging.info(f"Posting engagement tweet for {author['username']}: {tweet}") logging.info(f"Posting engagement tweet for {author['username']}: {tweet}")
if post_tweet(author, tweet): if post_tweet(author, tweet):
logging.info(f"Successfully posted engagement tweet for {author['username']}") logging.info(f"Successfully posted engagement tweet for {author['username']}")
posted = True
else: else:
logging.warning(f"Failed to post engagement tweet for {author['username']}") logging.warning(f"Failed to post engagement tweet for {author['username']}")
except Exception as e: except Exception as e:
@ -186,8 +189,12 @@ def post_engagement_tweet():
continue continue
logging.info("Completed foodie_engagement_tweet.py") logging.info("Completed foodie_engagement_tweet.py")
sleep_time = random.randint(1200, 1800) # 20–30 minutes
return posted, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Unexpected error in post_engagement_tweet: {e}", exc_info=True) logging.error(f"Unexpected error in post_engagement_tweet: {e}", exc_info=True)
sleep_time = random.randint(1200, 1800) # 20–30 minutes
return False, sleep_time
def main(): def main():
"""Main function to run the script.""" """Main function to run the script."""
@ -196,14 +203,17 @@ def main():
lock_fd = acquire_lock() lock_fd = acquire_lock()
setup_logging() setup_logging()
update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start update_system_activity(SCRIPT_NAME, "running", os.getpid()) # Record start
post_engagement_tweet() posted, sleep_time = post_engagement_tweet()
update_system_activity(SCRIPT_NAME, "stopped") # Record stop update_system_activity(SCRIPT_NAME, "stopped") # Record stop
sys.exit(0) logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return posted, sleep_time
except Exception as e: except Exception as e:
logging.error(f"Fatal error in main: {e}", exc_info=True) logging.error(f"Fatal error in main: {e}", exc_info=True)
print(f"Fatal error: {e}") print(f"Fatal error: {e}")
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error
sys.exit(1) sleep_time = random.randint(1200, 1800) # 20–30 minutes
logging.info(f"Run completed, sleep_time: {sleep_time} seconds")
return False, sleep_time
finally: finally:
if lock_fd: if lock_fd:
fcntl.flock(lock_fd, fcntl.LOCK_UN) fcntl.flock(lock_fd, fcntl.LOCK_UN)
@ -211,4 +221,4 @@ def main():
os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None os.remove(LOCK_FILE) if os.path.exists(LOCK_FILE) else None
if __name__ == "__main__": if __name__ == "__main__":
main() posted, sleep_time = main()

@ -40,21 +40,21 @@ check_running() {
return 1 return 1
} }
# Create lock file # Run a script and extract sleep_time
create_lock() { run_script() {
local script_name="$1" local script="$1"
local lock_file="$LOCK_DIR/${script_name}.lock" local script_name="${script%.py}"
mkdir -p "$LOCK_DIR" local script_log="$BASE_DIR/logs/${script_name}.log"
echo $$ > "$lock_file" if check_running "$script_name"; then
log "Created lock file for $script_name (PID: $$)" return 1
} fi
log "Running $script..."
# Remove lock file # Run script and capture output
remove_lock() { "$VENV_PYTHON" "$script" >> "$script_log" 2>&1
local script_name="$1" # Extract sleep_time from the last log line
local lock_file="$LOCK_DIR/${script_name}.lock" sleep_time=$(tail -n 1 "$script_log" | grep -oP 'sleep_time: \K[0-9]+' || echo $((RANDOM % 601 + 1200)))
rm -f "$lock_file" log "$script completed, sleep_time: $sleep_time seconds"
log "Removed lock file for $script_name" echo "$sleep_time"
} }
# Stop scripts # Stop scripts
@ -71,74 +71,22 @@ stop_scripts() {
if [ -f "$script" ] && [ "$script" != "foodie_weekly_thread.py" ] && [ "$script" != "foodie_engagement_tweet.py" ]; then if [ -f "$script" ] && [ "$script" != "foodie_weekly_thread.py" ] && [ "$script" != "foodie_engagement_tweet.py" ]; then
local script_name="${script%.py}" local script_name="${script%.py}"
pkill -9 -f "$VENV_PYTHON.*$script_name" || true pkill -9 -f "$VENV_PYTHON.*$script_name" || true
remove_lock "$script_name" rm -f "$LOCK_DIR/${script_name}.lock"
log "Removed lock file for $script_name"
fi fi
done done
log "Scripts stopped." log "Scripts stopped."
} }
# Start scripts
start_scripts() {
log "Starting scripts..."
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; }
# Source virtual environment
if [ -f "$BASE_DIR/venv/bin/activate" ]; then
source "$BASE_DIR/venv/bin/activate"
else
log "Error: Virtual environment not found at $BASE_DIR/venv"
exit 1
fi
# Load .env variables
if [ -f "$BASE_DIR/.env" ]; then
export $(grep -v '^#' "$BASE_DIR/.env" | xargs)
log ".env variables loaded"
else
log "Error: .env file not found at $BASE_DIR/.env"
exit 1
fi
# Find and start all foodie_automator_*.py scripts (excluding weekly/engagement)
for script in foodie_automator_*.py; do
if [ -f "$script" ] && [ "$script" != "foodie_weekly_thread.py" ] && [ "$script" != "foodie_engagement_tweet.py" ]; then
local script_name="${script%.py}"
if ! check_running "$script_name"; then
log "Starting $script..."
create_lock "$script_name"
nohup "$VENV_PYTHON" "$script" >> "$BASE_DIR/logs/${script_name}.log" 2>&1 &
if [ $? -eq 0 ]; then
log "$script started successfully"
else
log "Failed to start $script"
remove_lock "$script_name"
fi
fi
fi
done
log "All scripts started."
}
# Update dependencies # Update dependencies
update_dependencies() { update_dependencies() {
log "Updating dependencies..." log "Updating dependencies..."
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; } cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; }
# Create venv if it doesn't exist
if [ ! -d "venv" ]; then if [ ! -d "venv" ]; then
python3 -m venv venv python3 -m venv venv
log "Created new virtual environment" log "Created new virtual environment"
fi fi
source "$BASE_DIR/venv/bin/activate"
# Source virtual environment
if [ -f "$BASE_DIR/venv/bin/activate" ]; then
source "$BASE_DIR/venv/bin/activate"
else
log "Error: Virtual environment not found at $BASE_DIR/venv"
exit 1
fi
# Update pip and install requirements
"$VENV_PYTHON" -m pip install --upgrade pip "$VENV_PYTHON" -m pip install --upgrade pip
if [ -f "requirements.txt" ]; then if [ -f "requirements.txt" ]; then
"$VENV_PYTHON" -m pip install -r requirements.txt || { "$VENV_PYTHON" -m pip install -r requirements.txt || {
@ -174,14 +122,33 @@ if [ "$CURRENT_CHECKSUM" != "$PREVIOUS_CHECKSUM" ]; then
# Update dependencies # Update dependencies
update_dependencies update_dependencies
# Start scripts
start_scripts
# Update checksum # Update checksum
echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE" echo "$CURRENT_CHECKSUM" > "$CHECKSUM_FILE"
log "Checksum updated." log "Checksum updated."
fi
# Run scripts sequentially if not running
cd "$BASE_DIR" || { log "Failed to change to $BASE_DIR"; exit 1; }
source "$BASE_DIR/venv/bin/activate"
if [ -f "$BASE_DIR/.env" ]; then
export $(grep -v '^#' "$BASE_DIR/.env" | xargs)
log ".env variables loaded"
else else
log "No file changes detected." log "Error: .env file not found at $BASE_DIR/.env"
exit 1
fi fi
for script in foodie_automator_rss.py foodie_automator_reddit.py foodie_automator_google.py; do
if [ -f "$script" ]; then
sleep_time=$(run_script "$script")
if [ -n "$sleep_time" ]; then
log "Sleeping for $sleep_time seconds after $script"
sleep "$sleep_time"
fi
else
log "Script $script not found"
fi
done
log "All scripts processed."
exit 0 exit 0
Loading…
Cancel
Save