|
|
|
|
@ -236,43 +236,62 @@ def select_best_persona(interest_score, content=""): |
|
|
|
|
return random.choice(personas) |
|
|
|
|
|
|
|
|
|
def get_image(search_query): |
|
|
|
|
global last_flickr_request_time, flickr_request_count |
|
|
|
|
headers = {'User-Agent': 'InsiderFoodieBot/1.0 (https://insiderfoodie.com; contact@insiderfoodie.com)'} |
|
|
|
|
|
|
|
|
|
reset_flickr_request_count() |
|
|
|
|
flickr_request_count += 1 |
|
|
|
|
logging.info(f"Flickr request count: {flickr_request_count}/3600") |
|
|
|
|
# Try Pixabay with the original query |
|
|
|
|
try: |
|
|
|
|
pixabay_url = f"https://pixabay.com/api/?key={PIXABAY_API_KEY}&q={quote(search_query)}&image_type=photo&per_page=10" |
|
|
|
|
response = requests.get(pixabay_url, headers=headers, timeout=10) |
|
|
|
|
response.raise_for_status() |
|
|
|
|
data = response.json() |
|
|
|
|
|
|
|
|
|
for hit in data.get('hits', []): |
|
|
|
|
img_url = hit.get('webformatURL') |
|
|
|
|
if not img_url or img_url in used_images: |
|
|
|
|
continue |
|
|
|
|
uploader = hit.get('user', 'Unknown') |
|
|
|
|
page_url = hit.get('pageURL', img_url) |
|
|
|
|
|
|
|
|
|
used_images.add(img_url) |
|
|
|
|
save_used_images() |
|
|
|
|
|
|
|
|
|
logging.info(f"Selected Pixabay image: {img_url} by {uploader} for query '{search_query}'") |
|
|
|
|
return img_url, "Pixabay", uploader, page_url |
|
|
|
|
|
|
|
|
|
logging.info(f"No valid Pixabay image found for query '{search_query}'. Trying fallback query.") |
|
|
|
|
|
|
|
|
|
current_time = time.time() |
|
|
|
|
time_since_last_request = current_time - last_flickr_request_time |
|
|
|
|
if time_since_last_request < 10: |
|
|
|
|
time.sleep(10 - time_since_last_request) |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.warning(f"Pixabay image fetch failed for query '{search_query}': {e}") |
|
|
|
|
|
|
|
|
|
last_flickr_request_time = time.time() |
|
|
|
|
# Fallback to a generic query |
|
|
|
|
fallback_query = "food dining" |
|
|
|
|
try: |
|
|
|
|
pixabay_url = f"https://pixabay.com/api/?key={PIXABAY_API_KEY}&q={quote(fallback_query)}&image_type=photo&per_page=10" |
|
|
|
|
response = requests.get(pixabay_url, headers=headers, timeout=10) |
|
|
|
|
response.raise_for_status() |
|
|
|
|
data = response.json() |
|
|
|
|
|
|
|
|
|
for hit in data.get('hits', []): |
|
|
|
|
img_url = hit.get('webformatURL') |
|
|
|
|
if not img_url or img_url in used_images: |
|
|
|
|
continue |
|
|
|
|
uploader = hit.get('user', 'Unknown') |
|
|
|
|
page_url = hit.get('pageURL', img_url) |
|
|
|
|
|
|
|
|
|
used_images.add(img_url) |
|
|
|
|
save_used_images() |
|
|
|
|
|
|
|
|
|
logging.info(f"Selected Pixabay fallback image: {img_url} by {uploader} for query '{fallback_query}'") |
|
|
|
|
return img_url, "Pixabay", uploader, page_url |
|
|
|
|
|
|
|
|
|
logging.warning(f"No valid Pixabay image found for fallback query '{fallback_query}'.") |
|
|
|
|
|
|
|
|
|
headers = {'User-Agent': 'InsiderFoodieBot/1.0 (https://insiderfoodie.com; contact@insiderfoodie.com)'} |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.warning(f"Pixabay fallback image fetch failed for query '{fallback_query}': {e}") |
|
|
|
|
|
|
|
|
|
def search_flickr(query, per_page=5): |
|
|
|
|
try: |
|
|
|
|
photos = flickr_api.Photo.search( |
|
|
|
|
text=query, |
|
|
|
|
per_page=per_page, |
|
|
|
|
sort='relevance', |
|
|
|
|
safe_search=1, |
|
|
|
|
media='photos', |
|
|
|
|
license='4,5,9,10' |
|
|
|
|
) |
|
|
|
|
return photos |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.warning(f"Flickr API error for query '{query}': {e}") |
|
|
|
|
return [] |
|
|
|
|
|
|
|
|
|
def fetch_photo_by_id(photo_id): |
|
|
|
|
try: |
|
|
|
|
photo = flickr_api.Photo(id=photo_id) |
|
|
|
|
return photo |
|
|
|
|
except Exception as e: |
|
|
|
|
logging.warning(f"Failed to fetch Flickr photo ID {photo_id}: {e}") |
|
|
|
|
return None |
|
|
|
|
# Ultimate fallback: return None but log clearly |
|
|
|
|
logging.error(f"All image fetch attempts failed for query '{search_query}'. Returning None.") |
|
|
|
|
return None, None, None, None |
|
|
|
|
|
|
|
|
|
def process_photo(photo): |
|
|
|
|
tags = [tag.text.lower() for tag in photo.getTags()] |
|
|
|
|
|