fix caching
This commit is contained in:
parent
b8713fcc7e
commit
036ab856eb
24
app.py
24
app.py
@ -767,6 +767,9 @@ def serve_file(subpath):
|
|||||||
app.logger.error(f"File not found: {full_path}")
|
app.logger.error(f"File not found: {full_path}")
|
||||||
return "File not found", 404
|
return "File not found", 404
|
||||||
|
|
||||||
|
filesize = os.path.getsize(full_path)
|
||||||
|
filename = os.path.basename(full_path)
|
||||||
|
|
||||||
# 2) Prep request info
|
# 2) Prep request info
|
||||||
mime, _ = mimetypes.guess_type(full_path)
|
mime, _ = mimetypes.guess_type(full_path)
|
||||||
mime = mime or 'application/octet-stream'
|
mime = mime or 'application/octet-stream'
|
||||||
@ -871,7 +874,12 @@ def serve_file(subpath):
|
|||||||
cache_key = hashlib.md5(subpath.encode('utf-8')).hexdigest()
|
cache_key = hashlib.md5(subpath.encode('utf-8')).hexdigest()
|
||||||
cache_dir = os.path.join(cache.directory, cache_key[:2])
|
cache_dir = os.path.join(cache.directory, cache_key[:2])
|
||||||
os.makedirs(cache_dir, exist_ok=True)
|
os.makedirs(cache_dir, exist_ok=True)
|
||||||
cache_file_path = os.path.join(cache_dir, f"{cache_key}.tmp")
|
fd, cache_file_path = tempfile.mkstemp(
|
||||||
|
prefix=f"{cache_key}_",
|
||||||
|
suffix=".tmp",
|
||||||
|
dir=cache_dir
|
||||||
|
)
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
# Start copying to our cache file in chunks
|
# Start copying to our cache file in chunks
|
||||||
def copy_to_cache_chunked():
|
def copy_to_cache_chunked():
|
||||||
@ -886,6 +894,11 @@ def serve_file(subpath):
|
|||||||
|
|
||||||
# Once complete, register with diskcache for proper management
|
# Once complete, register with diskcache for proper management
|
||||||
try:
|
try:
|
||||||
|
if subpath in cache:
|
||||||
|
if os.path.exists(cache_file_path):
|
||||||
|
os.remove(cache_file_path)
|
||||||
|
app.logger.info(f"Cache already populated for {subpath}, skipped duplicate registration")
|
||||||
|
return
|
||||||
with open(cache_file_path, 'rb') as f:
|
with open(cache_file_path, 'rb') as f:
|
||||||
cache.set(subpath, f, read=True)
|
cache.set(subpath, f, read=True)
|
||||||
# Remove our temp file since diskcache now has it
|
# Remove our temp file since diskcache now has it
|
||||||
@ -924,9 +937,6 @@ def serve_file(subpath):
|
|||||||
abort(503, description="Service temporarily unavailable - cache initialization failed")
|
abort(503, description="Service temporarily unavailable - cache initialization failed")
|
||||||
|
|
||||||
# 6) Build response for non-image
|
# 6) Build response for non-image
|
||||||
filesize = os.path.getsize(full_path)
|
|
||||||
filename = os.path.basename(full_path)
|
|
||||||
|
|
||||||
if as_attachment:
|
if as_attachment:
|
||||||
download_name = filename
|
download_name = filename
|
||||||
mimetype = 'application/octet-stream'
|
mimetype = 'application/octet-stream'
|
||||||
@ -952,6 +962,9 @@ def serve_file(subpath):
|
|||||||
# No data available yet, wait a bit
|
# No data available yet, wait a bit
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
if request.method == 'HEAD':
|
||||||
|
response = make_response('', 200)
|
||||||
|
else:
|
||||||
response = make_response(generate())
|
response = make_response(generate())
|
||||||
response.headers['Content-Type'] = mimetype
|
response.headers['Content-Type'] = mimetype
|
||||||
response.headers['Content-Length'] = str(filesize)
|
response.headers['Content-Length'] = str(filesize)
|
||||||
@ -979,6 +992,9 @@ def serve_file(subpath):
|
|||||||
|
|
||||||
response.headers['Cache-Control'] = 'public, max-age=86400'
|
response.headers['Cache-Control'] = 'public, max-age=86400'
|
||||||
|
|
||||||
|
if request.method == 'HEAD':
|
||||||
|
response.set_data(b'')
|
||||||
|
|
||||||
# 7) Logging
|
# 7) Logging
|
||||||
if do_log:
|
if do_log:
|
||||||
a.log_file_access(
|
a.log_file_access(
|
||||||
|
|||||||
8
auth.py
8
auth.py
@ -152,17 +152,17 @@ def require_secret(f):
|
|||||||
for token_in_session in session.get('valid_tokens', []):
|
for token_in_session in session.get('valid_tokens', []):
|
||||||
try:
|
try:
|
||||||
token_item = decode_token(token_in_session)
|
token_item = decode_token(token_in_session)
|
||||||
print(f"DEBUG: Decoded token: {token_item}")
|
# print(f"DEBUG: Decoded token: {token_item}")
|
||||||
for folder_info in token_item.get('folders', []):
|
for folder_info in token_item.get('folders', []):
|
||||||
print(f"DEBUG: Adding folder '{folder_info['foldername']}' -> '{folder_info['folderpath']}'")
|
# print(f"DEBUG: Adding folder '{folder_info['foldername']}' -> '{folder_info['folderpath']}'")
|
||||||
session['folders'][folder_info['foldername']] = folder_info['folderpath']
|
session['folders'][folder_info['foldername']] = folder_info['folderpath']
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"ERROR: Failed to process token: {e}")
|
print(f"ERROR: Failed to process token: {e}")
|
||||||
|
|
||||||
# Mark session as modified to ensure it's saved
|
# Mark session as modified to ensure it's saved
|
||||||
session.modified = True
|
session.modified = True
|
||||||
print(f"DEBUG: Final session['folders'] keys: {list(session['folders'].keys())}")
|
# print(f"DEBUG: Final session['folders'] keys: {list(session['folders'].keys())}")
|
||||||
print(f"DEBUG: session['valid_tokens']: {session.get('valid_tokens', [])}")
|
# print(f"DEBUG: session['valid_tokens']: {session.get('valid_tokens', [])}")
|
||||||
|
|
||||||
# 6) If we have folders, proceed; otherwise show index
|
# 6) If we have folders, proceed; otherwise show index
|
||||||
if session['folders']:
|
if session['folders']:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user