Shane 7 months ago
parent 64d17d5599
commit 5554abdc4a
  1. 4
      foodie_automator_google.py
  2. 2
      foodie_automator_reddit.py
  3. 3
      foodie_automator_rss.py
  4. 89
      foodie_utils.py

@ -208,7 +208,7 @@ def curate_from_google_trends(geo_list=['US']):
print(f"Trying Google Trend: {title} from {source_name}")
logging.info(f"Trying Google Trend: {title} from {source_name}")
image_query, relevance_keywords, skip = smart_image_and_filter(title, summary)
image_query, relevance_keywords, main_topic, skip = smart_image_and_filter(title, summary)
if skip:
print(f"Skipping filtered Google Trend: {title}")
logging.info(f"Skipping filtered Google Trend: {title}")
@ -250,7 +250,7 @@ def curate_from_google_trends(geo_list=['US']):
final_summary = insert_link_naturally(final_summary, source_name, link)
post_data, author, category, image_url, image_source, uploader, pixabay_url = prepare_post_data(final_summary, title)
post_data, author, category, image_url, image_source, uploader, pixabay_url = prepare_post_data(final_summary, title, main_topic)
if not post_data:
attempts += 1
continue

@ -266,7 +266,7 @@ def curate_from_reddit():
print(f"Trying Reddit Post: {title} from {source_name}")
logging.info(f"Trying Reddit Post: {title} from {source_name}")
image_query, relevance_keywords, skip = smart_image_and_filter(title, summary)
image_query, relevance_keywords, main_topic, skip = smart_image_and_filter(title, summary)
if skip or any(keyword in title.lower() or keyword in raw_title.lower() for keyword in RECIPE_KEYWORDS + ["homemade"]):
print(f"Skipping filtered Reddit post: {title}")
logging.info(f"Skipping filtered Reddit post: {title}")

@ -282,8 +282,9 @@ def curate_from_rss():
continue
final_summary = insert_link_naturally(final_summary, source_name, link)
post_data, author, category, image_url, image_source, uploader, pixabay_url = prepare_post_data(final_summary, title)
post_data, author, category, image_url, image_source, uploader, pixabay_url = prepare_post_data(final_summary, title, main_topic)
if not post_data:
logging.info(f"Post data preparation failed for '{title}'")
attempts += 1
continue

@ -1158,42 +1158,63 @@ def select_best_author(summary):
logging.error(f"Author selection failed: {e}")
return "owenjohnson"
def prepare_post_data(final_summary, original_title, context_info=""):
innovative_title = generate_title_from_summary(final_summary)
if not innovative_title:
logging.info(f"Title generation failed for '{original_title}' {context_info}")
return None, None, None, None, None, None, None
# Pass innovative_title and final_summary as separate arguments
search_query, relevance_keywords, _ = generate_image_query(innovative_title, final_summary)
if not search_query:
logging.info(f"Image query generation failed for '{innovative_title}' {context_info}")
return None, None, None, None, None, None, None
logging.info(f"Fetching Flickr image for query: '{search_query}' {context_info}")
image_url, image_source, uploader, page_url = get_flickr_image(search_query, relevance_keywords)
if not image_url:
logging.info(f"Flickr fetch failed for '{search_query}' - falling back to Pixabay {context_info}")
# Use the same title and summary for fallback
image_query, _, _ = generate_image_query(innovative_title, final_summary)
image_url, image_source, uploader, page_url = get_image(image_query)
def prepare_post_data(summary, title, main_topic=None):
try:
logging.info(f"Preparing post data for summary: {summary[:100]}...")
prompt = (
"Generate a concise, engaging title (5-15 words) for this food-related article summary. "
"The title should be catchy, avoid emojis, and not reproduce the original title verbatim. "
"Return the title as plain text."
)
response = client.chat.completions.create(
model=LIGHT_TASK_MODEL,
messages=[
{"role": "system", "content": prompt},
{"role": "user", "content": summary}
],
max_tokens=50,
temperature=0.7
)
new_title = response.choices[0].message.content.strip()
logging.info(f"Generated new title: '{new_title}'")
search_query, relevance_keywords, skip_flag = smart_image_and_filter(new_title, summary)
if skip_flag:
logging.info("Summary filtered out during post preparation")
return None, None, None, None, None, None, None
image_url, image_source, uploader, page_url = get_flickr_image(search_query, relevance_keywords, main_topic)
if not image_url:
logging.info(f"Pixabay fetch failed for title '{innovative_title}' - falling back to summary {context_info}")
image_query, _, _ = generate_image_query(final_summary, final_summary) # Using summary as both title and summary for fallback
image_url, image_source, uploader, page_url = get_image(image_query)
if not image_url:
logging.info(f"Image fetch failed again for '{original_title}' - proceeding without image {context_info}")
post_data = {"title": innovative_title, "content": final_summary}
selected_username = select_best_author(final_summary)
author = next((a for a in AUTHORS if a["username"] == selected_username), None)
if not author:
logging.error(f"Author '{selected_username}' not found in AUTHORS, defaulting to owenjohnson")
author = {"username": "owenjohnson", "password": "rfjk xhn6 2RPy FuQ9 cGlU K8mC"}
category = generate_category_from_summary(final_summary)
image_url, image_source, uploader, page_url = get_image(search_query)
if not image_url:
logging.warning("No image found for post, skipping")
return None, None, None, None, None, None, None
pixabay_url = page_url if image_source == "Pixabay" else None
authors = ["Aisha Patel", "Ravi Sharma", "Mei Lin", "Carlos Rivera"]
author = random.choice(authors)
categories = ["Food", "Trends", "Eats", "Culture"]
category = random.choice(categories)
post_data = {
"title": new_title,
"content": summary,
"status": "publish",
"author": author,
"categories": [category]
}
logging.info(f"Post data prepared: Title: '{new_title}', Category: {category}, Author: {author}")
return post_data, author, category, image_url, image_source, uploader, pixabay_url
return post_data, author, category, image_url, image_source, uploader, page_url
except Exception as e:
logging.error(f"Failed to prepare post data: {e}")
return None, None, None, None, None, None, None
def save_post_to_recent(post_title, post_url, author_username, timestamp):
try:

Loading…
Cancel
Save