|
|
|
@ -37,7 +37,7 @@ import fcntl |
|
|
|
load_dotenv() |
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
|
|
|
# Define constants at the top |
|
|
|
# Define constants at the top |
|
|
|
SCRIPT_NAME = "foodie_automator_google" # Added SCRIPT_NAME |
|
|
|
SCRIPT_NAME = "foodie_automator_google" |
|
|
|
POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json' |
|
|
|
POSTED_TITLES_FILE = '/home/shane/foodie_automator/posted_google_titles.json' |
|
|
|
USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json' |
|
|
|
USED_IMAGES_FILE = '/home/shane/foodie_automator/used_images.json' |
|
|
|
EXPIRATION_HOURS = 24 |
|
|
|
EXPIRATION_HOURS = 24 |
|
|
|
@ -54,7 +54,7 @@ used_images = set(entry["title"] for entry in used_images_data if "title" in ent |
|
|
|
|
|
|
|
|
|
|
|
def signal_handler(sig, frame): |
|
|
|
def signal_handler(sig, frame): |
|
|
|
logging.info("Received termination signal, marking script as stopped...") |
|
|
|
logging.info("Received termination signal, marking script as stopped...") |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") # Added to mark as stopped |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") |
|
|
|
if is_posting: |
|
|
|
if is_posting: |
|
|
|
logging.info("Currently posting, will exit after completion.") |
|
|
|
logging.info("Currently posting, will exit after completion.") |
|
|
|
else: |
|
|
|
else: |
|
|
|
@ -228,6 +228,7 @@ def fetch_duckduckgo_news_context(trend_title, hours=24): |
|
|
|
for r in results: |
|
|
|
for r in results: |
|
|
|
try: |
|
|
|
try: |
|
|
|
date_str = r["date"] |
|
|
|
date_str = r["date"] |
|
|
|
|
|
|
|
# Handle both ISO formats with and without timezone |
|
|
|
if '+00:00' in date_str: |
|
|
|
if '+00:00' in date_str: |
|
|
|
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S+00:00").replace(tzinfo=timezone.utc) |
|
|
|
dt = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S+00:00").replace(tzinfo=timezone.utc) |
|
|
|
else: |
|
|
|
else: |
|
|
|
@ -276,7 +277,8 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat |
|
|
|
|
|
|
|
|
|
|
|
if not unique_trends: |
|
|
|
if not unique_trends: |
|
|
|
logging.info("No Google Trends data available across regions") |
|
|
|
logging.info("No Google Trends data available across regions") |
|
|
|
return None, None, False |
|
|
|
sleep_time = random.randint(1200, 1800) # 20–30 minutes |
|
|
|
|
|
|
|
return None, None, sleep_time |
|
|
|
|
|
|
|
|
|
|
|
# Sort trends by search volume in descending order |
|
|
|
# Sort trends by search volume in descending order |
|
|
|
unique_trends.sort(key=lambda x: x["search_volume"], reverse=True) |
|
|
|
unique_trends.sort(key=lambda x: x["search_volume"], reverse=True) |
|
|
|
@ -444,62 +446,16 @@ def curate_from_google_trends(posted_titles_data, posted_titles, used_images_dat |
|
|
|
logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}") |
|
|
|
logging.info(f"Saved image '{image_url}' to {USED_IMAGES_FILE}") |
|
|
|
|
|
|
|
|
|
|
|
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Google Trends *****") |
|
|
|
logging.info(f"***** SUCCESS: Posted '{post_data['title']}' (ID: {post_id or 'N/A'}) from Google Trends *****") |
|
|
|
return post_data, category, True |
|
|
|
sleep_time = random.randint(1200, 1800) # 20–30 minutes |
|
|
|
|
|
|
|
return post_data, category, sleep_time |
|
|
|
|
|
|
|
|
|
|
|
logging.info("No interesting Google Trend found after attempts") |
|
|
|
logging.info("No interesting Google Trend found after attempts") |
|
|
|
return None, None, False |
|
|
|
sleep_time = random.randint(1200, 1800) # 20–30 minutes |
|
|
|
|
|
|
|
return None, None, sleep_time |
|
|
|
except Exception as e: |
|
|
|
except Exception as e: |
|
|
|
logging.error(f"Unexpected error in curate_from_google_trends: {e}", exc_info=True) |
|
|
|
logging.error(f"Unexpected error in curate_from_google_trends: {e}", exc_info=True) |
|
|
|
return None, None, False |
|
|
|
sleep_time = random.randint(1200, 1800) # 20–30 minutes |
|
|
|
|
|
|
|
return None, None, sleep_time |
|
|
|
# System Activity Tracking |
|
|
|
|
|
|
|
def update_system_activity(script_name, status, pid=None): |
|
|
|
|
|
|
|
"""Update the system activity JSON file with the script's status.""" |
|
|
|
|
|
|
|
activity_file = "/home/shane/foodie_automator/system_activity.json" |
|
|
|
|
|
|
|
activity_data = [] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Load existing data |
|
|
|
|
|
|
|
if os.path.exists(activity_file): |
|
|
|
|
|
|
|
try: |
|
|
|
|
|
|
|
with open(activity_file, 'r') as f: |
|
|
|
|
|
|
|
activity_data = json.load(f) |
|
|
|
|
|
|
|
except json.JSONDecodeError: |
|
|
|
|
|
|
|
logging.error("Corrupted system_activity.json, resetting to empty list") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Find or create entry for the script |
|
|
|
|
|
|
|
script_entry = next((entry for entry in activity_data if entry["script_name"] == script_name), None) |
|
|
|
|
|
|
|
if not script_entry: |
|
|
|
|
|
|
|
script_entry = { |
|
|
|
|
|
|
|
"script_name": script_name, |
|
|
|
|
|
|
|
"pid": None, |
|
|
|
|
|
|
|
"start_time": None, |
|
|
|
|
|
|
|
"stop_time": None, |
|
|
|
|
|
|
|
"status": "stopped" |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
activity_data.append(script_entry) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Update the entry |
|
|
|
|
|
|
|
if status == "running": |
|
|
|
|
|
|
|
script_entry.update({ |
|
|
|
|
|
|
|
"pid": pid, |
|
|
|
|
|
|
|
"start_time": datetime.now(timezone.utc).isoformat(), |
|
|
|
|
|
|
|
"stop_time": None, |
|
|
|
|
|
|
|
"status": "running" |
|
|
|
|
|
|
|
}) |
|
|
|
|
|
|
|
elif status == "stopped": |
|
|
|
|
|
|
|
script_entry.update({ |
|
|
|
|
|
|
|
"pid": None, |
|
|
|
|
|
|
|
"stop_time": datetime.now(timezone.utc).isoformat(), |
|
|
|
|
|
|
|
"status": "stopped" |
|
|
|
|
|
|
|
}) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Save updated data |
|
|
|
|
|
|
|
try: |
|
|
|
|
|
|
|
with open(activity_file, 'w') as f: |
|
|
|
|
|
|
|
json.dump(activity_data, f, indent=2) |
|
|
|
|
|
|
|
logging.info(f"Updated system activity: {script_name} is {status}") |
|
|
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
|
|
logging.error(f"Failed to update system_activity.json: {e}") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_google_trends_automator(): |
|
|
|
def run_google_trends_automator(): |
|
|
|
lock_fd = None |
|
|
|
lock_fd = None |
|
|
|
@ -512,16 +468,19 @@ def run_google_trends_automator(): |
|
|
|
posted_titles = set(entry["title"] for entry in posted_titles_data) |
|
|
|
posted_titles = set(entry["title"] for entry in posted_titles_data) |
|
|
|
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) |
|
|
|
used_images_data = load_json_file(USED_IMAGES_FILE, IMAGE_EXPIRATION_DAYS) |
|
|
|
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) |
|
|
|
used_images = set(entry["title"] for entry in used_images_data if "title" in entry) |
|
|
|
post_data, category, should_continue = curate_from_google_trends(posted_titles_data, posted_titles, used_images_data, used_images) |
|
|
|
post_data, category, sleep_time = curate_from_google_trends(posted_titles_data, posted_titles, used_images_data, used_images) |
|
|
|
if not post_data: |
|
|
|
if not post_data: |
|
|
|
logging.info("No postable Google Trend found") |
|
|
|
logging.info("No postable Google Trend found") |
|
|
|
logging.info("Completed Google Trends run") |
|
|
|
logging.info("Completed Google Trends run") |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop |
|
|
|
return post_data, category, should_continue |
|
|
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
|
|
|
|
|
|
|
return post_data, category, sleep_time |
|
|
|
except Exception as e: |
|
|
|
except Exception as e: |
|
|
|
logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True) |
|
|
|
logging.error(f"Fatal error in run_google_trends_automator: {e}", exc_info=True) |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error |
|
|
|
update_system_activity(SCRIPT_NAME, "stopped") # Record stop on error |
|
|
|
return None, None, False |
|
|
|
sleep_time = random.randint(1200, 1800) # 20–30 minutes |
|
|
|
|
|
|
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |
|
|
|
|
|
|
|
return None, None, sleep_time |
|
|
|
finally: |
|
|
|
finally: |
|
|
|
if lock_fd: |
|
|
|
if lock_fd: |
|
|
|
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
|
|
|
fcntl.flock(lock_fd, fcntl.LOCK_UN) |
|
|
|
@ -530,5 +489,5 @@ def run_google_trends_automator(): |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
if __name__ == "__main__": |
|
|
|
setup_logging() |
|
|
|
setup_logging() |
|
|
|
post_data, category, should_continue = run_google_trends_automator() |
|
|
|
post_data, category, sleep_time = run_google_trends_automator() |
|
|
|
logging.info(f"Run completed, should_continue: {should_continue}") |
|
|
|
logging.info(f"Run completed, sleep_time: {sleep_time} seconds") |