|
|
|
|
@ -52,7 +52,7 @@ def load_json_file(file_path, expiration_hours=None): |
|
|
|
|
if not line: |
|
|
|
|
continue |
|
|
|
|
try: |
|
|
|
|
entry = json.loads(line) |
|
|
|
|
entry = json.loads(line.strip()) |
|
|
|
|
data.append(entry) |
|
|
|
|
except json.JSONDecodeError as e: |
|
|
|
|
logging.warning(f"Skipping invalid JSON line in {file_path} at line {line_number}: {e}") |
|
|
|
|
@ -81,13 +81,14 @@ def load_json_file(file_path, expiration_hours=None): |
|
|
|
|
|
|
|
|
|
logging.info(f"Loaded {len(valid_entries)} entries from {file_path}, {len(valid_entries)} valid after expiration check") |
|
|
|
|
return valid_entries |
|
|
|
|
|
|
|
|
|
except Exception as e: |
|
|
|
|
logging.error(f"Failed to load JSON file {file_path}: {e}") |
|
|
|
|
return [] |
|
|
|
|
|
|
|
|
|
def save_json_file(file_path, title, timestamp): |
|
|
|
|
try: |
|
|
|
|
entries = load_json_file(file_path, 24 if "posted_" in file_path else 7 * 24) # 24 hours for titles, 7 days for images |
|
|
|
|
entries = load_json_file(file_path, 24 if "posted_" in file_path else 7 * 24) |
|
|
|
|
entry = {"title": title, "timestamp": timestamp} |
|
|
|
|
entries.append(entry) |
|
|
|
|
|
|
|
|
|
@ -96,9 +97,15 @@ def save_json_file(file_path, title, timestamp): |
|
|
|
|
cutoff = datetime.now(timezone.utc) - timedelta(hours=expiration_hours) |
|
|
|
|
pruned_entries = [e for e in entries if datetime.fromisoformat(e["timestamp"]) > cutoff] |
|
|
|
|
|
|
|
|
|
# Write as a JSON list with each entry on a new line |
|
|
|
|
with open(file_path, 'w') as f: |
|
|
|
|
for entry in pruned_entries: |
|
|
|
|
f.write(json.dumps(entry) + '\n') |
|
|
|
|
f.write('[\n') |
|
|
|
|
for i, entry in enumerate(pruned_entries): |
|
|
|
|
f.write(' ' + json.dumps(entry)) |
|
|
|
|
if i < len(pruned_entries) - 1: |
|
|
|
|
f.write(',') |
|
|
|
|
f.write('\n') |
|
|
|
|
f.write(']') |
|
|
|
|
|
|
|
|
|
logging.info(f"Saved '{title}' to {file_path}") |
|
|
|
|
logging.info(f"Pruned {file_path} to {len(pruned_entries)} entries (older than {expiration_hours//24} days removed)") |
|
|
|
|
|