|
|
|
@ -355,6 +355,7 @@ def generate_image_query(title, summary): |
|
|
|
|
|
|
|
|
|
|
|
def smart_image_and_filter(title, summary): |
|
|
|
def smart_image_and_filter(title, summary): |
|
|
|
try: |
|
|
|
try: |
|
|
|
|
|
|
|
logging.info(f"Processing title: raw_title='{title}', summary='{summary[:100]}...'") |
|
|
|
content = f"{title}\n\n{summary}" |
|
|
|
content = f"{title}\n\n{summary}" |
|
|
|
|
|
|
|
|
|
|
|
prompt = ( |
|
|
|
prompt = ( |
|
|
|
@ -375,15 +376,18 @@ def smart_image_and_filter(title, summary): |
|
|
|
max_tokens=100 |
|
|
|
max_tokens=100 |
|
|
|
) |
|
|
|
) |
|
|
|
raw_result = response.choices[0].message.content.strip() |
|
|
|
raw_result = response.choices[0].message.content.strip() |
|
|
|
logging.debug(f"Raw GPT smart image/filter response: '{raw_result}'") |
|
|
|
logging.debug(f"Raw GPT response: '{raw_result}'") |
|
|
|
|
|
|
|
|
|
|
|
cleaned_result = re.sub(r'```json\s*|\s*```', '', raw_result).strip() |
|
|
|
cleaned_result = re.sub(r'```json\s*|\s*```', '', raw_result).strip() |
|
|
|
fixed_result = re.sub(r"(?<!\\)'(?=\s*[\w\s]*\])|(?<=\[|\{|\s)'|'(?=\s*[\]\},:])|(?<=\w)'(?=\s*:)", '"', cleaned_result) |
|
|
|
fixed_result = re.sub(r"(?<!\\)'(?=\s*[\w\s]*\])|(?<=\[|\{|\s)'|'(?=\s*[\]\},:])|(?<=\w)'(?=\s*:)", '"', cleaned_result) |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
try: |
|
|
|
result = json.loads(fixed_result) |
|
|
|
result = json.loads(fixed_result) |
|
|
|
except json.JSONDecodeError as e: |
|
|
|
if not isinstance(result, dict) or "image_query" not in result or "relevance" not in result or "action" not in result: |
|
|
|
logging.warning(f"JSON parsing failed: {e}, raw: '{fixed_result}'. Using fallback.") |
|
|
|
logging.warning(f"Invalid GPT response format: {result}, checking action before fallback") |
|
|
|
|
|
|
|
if isinstance(result, dict) and result.get("action") == "SKIP": |
|
|
|
|
|
|
|
logging.info(f"Respecting AI SKIP action for '{title}'") |
|
|
|
|
|
|
|
return extract_main_topic(title.lower() + " " + summary.lower()), ["food"], "food", True |
|
|
|
main_topic = extract_main_topic(title.lower() + " " + summary.lower()) |
|
|
|
main_topic = extract_main_topic(title.lower() + " " + summary.lower()) |
|
|
|
skip_flag = ( |
|
|
|
skip_flag = ( |
|
|
|
"homemade" in title.lower() or |
|
|
|
"homemade" in title.lower() or |
|
|
|
@ -395,9 +399,8 @@ def smart_image_and_filter(title, summary): |
|
|
|
f"homemade_in_summary={'homemade' in summary.lower()}, " |
|
|
|
f"homemade_in_summary={'homemade' in summary.lower()}, " |
|
|
|
f"recipe_keywords={any(kw in title.lower() or kw in summary.lower() for kw in RECIPE_KEYWORDS)}") |
|
|
|
f"recipe_keywords={any(kw in title.lower() or kw in summary.lower() for kw in RECIPE_KEYWORDS)}") |
|
|
|
return main_topic, [main_topic, "food"], main_topic, skip_flag |
|
|
|
return main_topic, [main_topic, "food"], main_topic, skip_flag |
|
|
|
|
|
|
|
except json.JSONDecodeError as e: |
|
|
|
if not isinstance(result, dict) or "image_query" not in result or "relevance" not in result or "action" not in result: |
|
|
|
logging.warning(f"JSON parsing failed: {e}, raw: '{fixed_result}'. Using fallback.") |
|
|
|
logging.warning(f"Invalid GPT response format: {result}, using fallback") |
|
|
|
|
|
|
|
main_topic = extract_main_topic(title.lower() + " " + summary.lower()) |
|
|
|
main_topic = extract_main_topic(title.lower() + " " + summary.lower()) |
|
|
|
skip_flag = ( |
|
|
|
skip_flag = ( |
|
|
|
"homemade" in title.lower() or |
|
|
|
"homemade" in title.lower() or |
|
|
|
|