-
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathposter_cache.py
executable file
·154 lines (130 loc) · 5.07 KB
/
poster_cache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
import os
import pickle
from datetime import datetime, timedelta
import logging
# Get db_content directory from environment variable with fallback
DB_CONTENT_DIR = os.environ.get('USER_DB_CONTENT', '/user/db_content')
# Update the path to use the environment variable
CACHE_FILE = os.path.join(DB_CONTENT_DIR, 'poster_cache.pkl')
CACHE_EXPIRY_DAYS = 7 # Cache expires after 7 days
UNAVAILABLE_POSTER = "/static/images/placeholder.png"
def is_cache_healthy():
"""Check if the cache file is valid and can be loaded"""
if not os.path.exists(CACHE_FILE):
return False
try:
with open(CACHE_FILE, 'rb') as f:
# Try to read and unpickle the first few bytes
pickle.load(f)
return True
except (EOFError, pickle.UnpicklingError, UnicodeDecodeError, FileNotFoundError) as e:
logging.error(f"Cache file is corrupted: {e}")
try:
# If corrupted, attempt to remove the file
os.remove(CACHE_FILE)
logging.info("Removed corrupted cache file")
except Exception as e:
logging.error(f"Failed to remove corrupted cache file: {e}")
return False
def load_cache():
"""Load the cache, performing a health check first"""
if not is_cache_healthy():
logging.info("Creating new cache due to health check failure")
return {}
try:
with open(CACHE_FILE, 'rb') as f:
return pickle.load(f)
except Exception as e:
logging.warning(f"Error loading cache: {e}. Creating a new cache.")
return {}
def save_cache(cache):
"""Save the cache to disk with validation and atomic writing"""
if not isinstance(cache, dict):
logging.error("Invalid cache format: cache must be a dictionary")
return False
# Create a temporary file to write to first
temp_file = f"{CACHE_FILE}.tmp"
try:
os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True)
# Write to temporary file first
with open(temp_file, 'wb') as f:
pickle.dump(cache, f)
# Validate the temporary file can be read back
try:
with open(temp_file, 'rb') as f:
pickle.load(f)
except Exception as e:
logging.error(f"Validation of temporary cache file failed: {e}")
os.remove(temp_file)
return False
# If validation passed, move the temporary file to the real location
if os.path.exists(CACHE_FILE):
os.replace(temp_file, CACHE_FILE) # atomic on most systems
else:
os.rename(temp_file, CACHE_FILE)
return True
except Exception as e:
logging.error(f"Error saving cache: {e}")
if os.path.exists(temp_file):
try:
os.remove(temp_file)
except:
pass
return False
def normalize_media_type(media_type):
"""Normalize media type to either 'tv' or 'movie'"""
return 'tv' if media_type.lower() in ['tv', 'show', 'series'] else 'movie'
def get_cached_poster_url(tmdb_id, media_type):
if not tmdb_id:
return UNAVAILABLE_POSTER
cache = load_cache()
normalized_type = normalize_media_type(media_type)
cache_key = f"{tmdb_id}_{normalized_type}"
cache_item = cache.get(cache_key)
if cache_item:
url, timestamp = cache_item
if datetime.now() - timestamp < timedelta(days=CACHE_EXPIRY_DAYS):
return url
else:
logging.info(f"Cache expired for {cache_key}")
return None
def cache_poster_url(tmdb_id, media_type, url):
if not tmdb_id:
return
cache = load_cache()
normalized_type = normalize_media_type(media_type)
cache_key = f"{tmdb_id}_{normalized_type}"
cache[cache_key] = (url, datetime.now())
save_cache(cache)
logging.info(f"Cached poster URL for {cache_key}: {url}")
def clean_expired_cache():
cache = load_cache()
current_time = datetime.now()
expired_keys = [
key for key, (_, timestamp) in cache.items()
if current_time - timestamp > timedelta(days=CACHE_EXPIRY_DAYS)
]
for key in expired_keys:
del cache[key]
save_cache(cache)
def get_cached_media_meta(tmdb_id, media_type):
cache = load_cache()
cache_key = f"{tmdb_id}_{media_type}_meta"
cache_item = cache.get(cache_key)
if cache_item:
media_meta, timestamp = cache_item
if datetime.now() - timestamp < timedelta(days=CACHE_EXPIRY_DAYS):
return media_meta
else:
logging.info(f"Cache expired for media meta {cache_key}")
else:
logging.info(f"Cache miss for media meta {cache_key}")
return None
def cache_media_meta(tmdb_id, media_type, media_meta):
cache = load_cache()
cache_key = f"{tmdb_id}_{media_type}_meta"
cache[cache_key] = (media_meta, datetime.now())
save_cache(cache)
logging.info(f"Cached media meta for {cache_key}")
def cache_unavailable_poster(tmdb_id, media_type):
cache_poster_url(tmdb_id, media_type, UNAVAILABLE_POSTER)