|
|
|
|
@ -20,21 +20,20 @@ from selenium.common.exceptions import TimeoutException |
|
|
|
|
from duckduckgo_search import DDGS |
|
|
|
|
from foodie_config import ( |
|
|
|
|
AUTHORS, RECIPE_KEYWORDS, PROMO_KEYWORDS, HOME_KEYWORDS, PRODUCT_KEYWORDS, |
|
|
|
|
PERSONA_CONFIGS, CATEGORIES, CTAS, get_clean_source_name, X_API_CREDENTIALS |
|
|
|
|
PERSONA_CONFIGS, CATEGORIES, get_clean_source_name, X_API_CREDENTIALS |
|
|
|
|
) |
|
|
|
|
from foodie_utils import ( |
|
|
|
|
load_json_file, save_json_file, get_image, generate_image_query, |
|
|
|
|
upload_image_to_wp, select_best_persona, determine_paragraph_count, |
|
|
|
|
is_interesting, generate_title_from_summary, summarize_with_gpt4o, |
|
|
|
|
generate_category_from_summary, post_to_wp, prepare_post_data, |
|
|
|
|
smart_image_and_filter, insert_link_naturally, get_flickr_image # Updated function name |
|
|
|
|
smart_image_and_filter, insert_link_naturally, get_flickr_image |
|
|
|
|
) |
|
|
|
|
from foodie_hooks import get_dynamic_hook, select_best_cta |
|
|
|
|
from foodie_hooks import get_dynamic_hook, select_best_cta, get_viral_share_prompt # Added import |
|
|
|
|
from dotenv import load_dotenv |
|
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
|
# Flag to indicate if we're in the middle of posting |
|
|
|
|
is_posting = False |
|
|
|
|
|
|
|
|
|
def signal_handler(sig, frame): |
|
|
|
|
@ -179,7 +178,6 @@ def fetch_duckduckgo_news_context(trend_title, hours=24): |
|
|
|
|
return trend_title |
|
|
|
|
|
|
|
|
|
def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
# Fetch Google Trends data for each geo |
|
|
|
|
all_trends = [] |
|
|
|
|
for geo in geo_list: |
|
|
|
|
trends = scrape_google_trends(geo=geo) |
|
|
|
|
@ -189,7 +187,7 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
if not all_trends: |
|
|
|
|
print("No Google Trends data available") |
|
|
|
|
logging.info("No Google Trends data available") |
|
|
|
|
return None, None, random.randint(600, 1800) # Return a default sleep time |
|
|
|
|
return None, None, random.randint(600, 1800) |
|
|
|
|
|
|
|
|
|
attempts = 0 |
|
|
|
|
max_attempts = 10 |
|
|
|
|
@ -210,7 +208,6 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
print(f"Trying Google Trend: {title} from {source_name}") |
|
|
|
|
logging.info(f"Trying Google Trend: {title} from {source_name}") |
|
|
|
|
|
|
|
|
|
# Check if the trend should be filtered out |
|
|
|
|
image_query, relevance_keywords, skip = smart_image_and_filter(title, summary) |
|
|
|
|
if skip: |
|
|
|
|
print(f"Skipping filtered Google Trend: {title}") |
|
|
|
|
@ -218,7 +215,6 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
attempts += 1 |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
# Calculate interest score |
|
|
|
|
scoring_content = f"{title}\n\n{summary}" |
|
|
|
|
interest_score = is_interesting(scoring_content) |
|
|
|
|
logging.info(f"Interest score for '{title}': {interest_score}") |
|
|
|
|
@ -228,7 +224,6 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
attempts += 1 |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
# Summarize the trend |
|
|
|
|
num_paragraphs = determine_paragraph_count(interest_score) |
|
|
|
|
extra_prompt = ( |
|
|
|
|
f"Generate exactly {num_paragraphs} paragraphs.\n" |
|
|
|
|
@ -252,24 +247,27 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
|
|
|
|
|
final_summary = insert_link_naturally(final_summary, source_name, link) |
|
|
|
|
|
|
|
|
|
# Prepare post data |
|
|
|
|
post_data, author, category, image_url, image_source, uploader, pixabay_url = prepare_post_data(final_summary, title) |
|
|
|
|
if not post_data: |
|
|
|
|
attempts += 1 |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
# Fetch image |
|
|
|
|
image_url, image_source, uploader, page_url = get_flickr_image(image_query, relevance_keywords) |
|
|
|
|
if not image_url: |
|
|
|
|
image_url, image_source, uploader, page_url = get_image(image_query) |
|
|
|
|
|
|
|
|
|
# Generate hooks and initial CTA |
|
|
|
|
hook = get_dynamic_hook(post_data["title"]).strip() |
|
|
|
|
cta = select_best_cta(post_data["title"], final_summary, post_url=None) |
|
|
|
|
|
|
|
|
|
post_data["content"] = f"{final_summary}\n\n{cta}" |
|
|
|
|
# Generate viral share prompt |
|
|
|
|
share_prompt = get_viral_share_prompt(post_data["title"], final_summary) |
|
|
|
|
share_links_template = ( |
|
|
|
|
f'<p>{share_prompt} ' |
|
|
|
|
f'<a href="https://x.com/intent/tweet?url={{post_url}}&text={{share_text}}" target="_blank"><i class="tsi tsi-twitter"></i></a> ' |
|
|
|
|
f'<a href="https://www.facebook.com/sharer/sharer.php?u={{post_url}}" target="_blank"><i class="tsi tsi-facebook"></i></a></p>' |
|
|
|
|
) |
|
|
|
|
post_data["content"] = f"{final_summary}\n\n{cta}\n\n{share_links_template}" |
|
|
|
|
|
|
|
|
|
# Post to WordPress and tweet |
|
|
|
|
global is_posting |
|
|
|
|
is_posting = True |
|
|
|
|
try: |
|
|
|
|
@ -284,14 +282,18 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
uploader=uploader, |
|
|
|
|
pixabay_url=pixabay_url, |
|
|
|
|
interest_score=interest_score, |
|
|
|
|
should_post_tweet=True # Post the X tweet on the first call |
|
|
|
|
should_post_tweet=True |
|
|
|
|
) |
|
|
|
|
finally: |
|
|
|
|
is_posting = False |
|
|
|
|
|
|
|
|
|
if post_id: |
|
|
|
|
share_text = f"Check out this foodie gem! {post_data['title']}" |
|
|
|
|
share_text_encoded = quote(share_text) |
|
|
|
|
post_url_encoded = quote(post_url) |
|
|
|
|
share_links = share_links_template.format(post_url=post_url_encoded, share_text=share_text_encoded) |
|
|
|
|
cta = select_best_cta(post_data["title"], final_summary, post_url=post_url) |
|
|
|
|
post_data["content"] = f"{final_summary}\n\n{cta}" |
|
|
|
|
post_data["content"] = f"{final_summary}\n\n{cta}\n\n{share_links}" |
|
|
|
|
is_posting = True |
|
|
|
|
try: |
|
|
|
|
post_to_wp( |
|
|
|
|
@ -306,7 +308,7 @@ def curate_from_google_trends(geo_list=['US']): |
|
|
|
|
pixabay_url=pixabay_url, |
|
|
|
|
interest_score=interest_score, |
|
|
|
|
post_id=post_id, |
|
|
|
|
should_post_tweet=False # Skip X tweet on the update call |
|
|
|
|
should_post_tweet=False |
|
|
|
|
) |
|
|
|
|
finally: |
|
|
|
|
is_posting = False |
|
|
|
|
@ -337,7 +339,7 @@ def run_google_trends_automator(): |
|
|
|
|
geo_list = ['US', 'GB', 'AU'] |
|
|
|
|
post_data, category, sleep_time = curate_from_google_trends(geo_list=geo_list) |
|
|
|
|
if sleep_time is None: |
|
|
|
|
sleep_time = random.randint(600, 1800) # Fallback sleep time |
|
|
|
|
sleep_time = random.randint(600, 1800) |
|
|
|
|
print(f"Sleeping for {sleep_time}s") |
|
|
|
|
logging.info(f"Completed run with sleep time: {sleep_time} seconds") |
|
|
|
|
time.sleep(sleep_time) |
|
|
|
|
|