fix caching with gunicorn

This commit is contained in:
lelo 2025-12-22 15:37:49 +00:00
parent 55a0a2dce1
commit cd2592a4b1
2 changed files with 36 additions and 24 deletions

48
app.py
View File

@ -705,7 +705,10 @@ def api_browse(subpath):
root, *relative_parts = subpath.split('/')
base_path = session['folders'][root]
base_path = session.get('folders', {}).get(root)
if not base_path:
app.logger.warning(f"Requested root '{root}' not found in session folders")
return jsonify({'error': 'Directory not found'}), 404
directory = os.path.join(base_path, *relative_parts)
playfile = None
@ -783,6 +786,7 @@ def serve_file(subpath):
do_log = (
not is_cache_request # skip if upstream CDN asked us to cache
and not is_audio_get # skip audio GETs
and request.method != 'HEAD'
)
# 3) Pick cache
@ -882,10 +886,29 @@ def serve_file(subpath):
)
os.close(fd)
# Write an initial chunk synchronously so the temp file exists with data
initial_bytes = 0
try:
with open(full_path, 'rb') as source, open(cache_file_path, 'wb') as dest:
chunk = source.read(1024 * 1024) # 1MB
if chunk:
dest.write(chunk)
dest.flush()
initial_bytes = len(chunk)
except Exception as e:
app.logger.error(f"Failed to prime cache file for {subpath}: {e}")
if os.path.exists(cache_file_path):
try:
os.remove(cache_file_path)
except:
pass
abort(503, description="Service temporarily unavailable - cache initialization failed")
# Start copying to our cache file in chunks
def copy_to_cache_chunked():
def copy_to_cache_chunked(start_offset):
try:
with open(full_path, 'rb') as source, open(cache_file_path, 'wb') as dest:
with open(full_path, 'rb') as source, open(cache_file_path, 'ab') as dest:
source.seek(start_offset)
while True:
chunk = source.read(1024 * 1024) # 1MB chunks
if not chunk:
@ -918,24 +941,9 @@ def serve_file(subpath):
pass
# Start the background copy
cache_thread = threading.Thread(target=copy_to_cache_chunked, daemon=True)
cache_thread = threading.Thread(target=copy_to_cache_chunked, args=(initial_bytes,), daemon=True)
cache_thread.start()
# Wait for initial data to be written
max_wait = 5.0 # Maximum 5 seconds
wait_interval = 0.05 # Check every 50ms
elapsed = 0
while elapsed < max_wait:
if os.path.exists(cache_file_path) and os.path.getsize(cache_file_path) > 0:
file_path = cache_file_path
break
time.sleep(wait_interval)
elapsed += wait_interval
else:
# Cache file not ready - abort
app.logger.error(f"Cache file not created in time for {subpath}")
abort(503, description="Service temporarily unavailable - cache initialization failed")
file_path = cache_file_path
# 6) Build response for non-image
if as_attachment:

View File

@ -16,8 +16,7 @@ services:
propagation: rshared
environment:
- FLASK_APP=app.py
- FLASK_ENV=development
- FLASK_DEBUG=1
- APP_ENV=${APP_ENV:-production}
networks:
- traefik
labels:
@ -38,10 +37,15 @@ services:
# Internal port
- "traefik.http.services.${CONTAINER_NAME}.loadbalancer.server.port=5000"
# Dev server with autoreload for live code changes
command: >
sh -c "pip install -r requirements.txt &&
flask run --host=0.0.0.0 --port=5000 --reload"
if [ \"$APP_ENV\" = \"development\" ]; then
export FLASK_ENV=development FLASK_DEBUG=1 &&
flask run --host=0.0.0.0 --port=5000 --reload;
else
export FLASK_ENV=production FLASK_DEBUG=0 &&
gunicorn -w 1 -k eventlet -b 0.0.0.0:5000 app:app;
fi"
networks: