implement multisecret function
This commit is contained in:
parent
5ace803cc0
commit
663d5b50f9
2
.gitignore
vendored
2
.gitignore
vendored
@ -2,4 +2,4 @@
|
||||
/filecache
|
||||
/__pycache__
|
||||
/access_log.db
|
||||
/allowed_secrets.json
|
||||
/folder_config.json
|
||||
@ -1,6 +0,0 @@
|
||||
{
|
||||
"dev_key_f83745ft0g5rg3": {
|
||||
"expiry" : "31.12.2000",
|
||||
"file_root": "\\\\path\\if\\using\\windows"
|
||||
}
|
||||
}
|
||||
194
app.py
194
app.py
@ -1,4 +1,4 @@
|
||||
from flask import Flask, render_template, send_file, url_for, jsonify, request, session, send_from_directory
|
||||
from flask import Flask, render_template, send_file, url_for, jsonify, request, session, send_from_directory, abort
|
||||
import os
|
||||
from PIL import Image
|
||||
import io
|
||||
@ -12,7 +12,7 @@ import geoip2.database
|
||||
from functools import lru_cache
|
||||
from urllib.parse import urlparse, unquote
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
cache = diskcache.Cache('./filecache', size_limit= 48 * 1024**3) # 32 GB limit
|
||||
cache = diskcache.Cache('./filecache', size_limit= 48 * 1024**3) # 48 GB limit
|
||||
|
||||
app = Flask(__name__)
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1)
|
||||
@ -23,57 +23,73 @@ if os.environ.get('FLASK_ENV') == 'production':
|
||||
app.config['SESSION_COOKIE_SAMESITE'] = 'None'
|
||||
app.config['SESSION_COOKIE_SECURE'] = True
|
||||
|
||||
def load_allowed_secrets(filename='allowed_secrets.json'):
|
||||
with open(filename) as f:
|
||||
secrets = json.load(f)
|
||||
for key, value in secrets.items():
|
||||
if 'expiry' in value:
|
||||
value['expiry'] = datetime.strptime(value['expiry'], '%d.%m.%Y').date()
|
||||
return secrets
|
||||
with open('folder_config.json') as file:
|
||||
app.config['folder_config'] = json.load(file)
|
||||
|
||||
def require_secret(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
allowed_secrets = load_allowed_secrets()
|
||||
today = date.today()
|
||||
# Your config list:
|
||||
folder_config = app.config['folder_config']
|
||||
|
||||
def is_valid(secret_data):
|
||||
expiry_date = secret_data.get('expiry')
|
||||
is_valid = expiry_date and today <= expiry_date
|
||||
return is_valid
|
||||
def is_valid(config_item, provided_secret):
|
||||
"""
|
||||
Checks if today's date is <= validity date
|
||||
AND if the provided secret matches config_item['secret'].
|
||||
"""
|
||||
folder_validity = config_item['validity']
|
||||
# Convert string to a date if necessary:
|
||||
if isinstance(folder_validity, str):
|
||||
folder_validity = datetime.strptime(folder_validity, '%d.%m.%Y').date()
|
||||
|
||||
# Check if a secret was provided via GET parameter
|
||||
get_secret = request.args.get('secret')
|
||||
if get_secret is not None:
|
||||
secret_data = allowed_secrets.get(get_secret)
|
||||
if secret_data:
|
||||
if is_valid(secret_data):
|
||||
# Valid secret provided in URL: update session and config
|
||||
session['secret'] = get_secret
|
||||
session.permanent = True
|
||||
app.config['FILE_ROOT'] = secret_data.get('file_root')
|
||||
print("session:", session['secret'])
|
||||
# Return whether it's still valid and secrets match:
|
||||
return (
|
||||
date.today() <= folder_validity and
|
||||
provided_secret == config_item['secret']
|
||||
)
|
||||
|
||||
# 1) Get secret from query params (if any)
|
||||
args_secret = request.args.get('secret')
|
||||
|
||||
# 2) Initialize 'allowed_secrets' in the session if missing
|
||||
if 'allowed_secrets' not in session:
|
||||
session['allowed_secrets'] = []
|
||||
|
||||
# 3) If a new secret is provided, check if it’s valid, and add to session if so
|
||||
if args_secret:
|
||||
for config_item in folder_config:
|
||||
if is_valid(config_item, args_secret):
|
||||
if args_secret not in session['allowed_secrets']:
|
||||
session['allowed_secrets'].append(args_secret)
|
||||
session.permanent = True # Make the session permanent
|
||||
|
||||
# 4) Re-check validity of each secret in session['allowed_secrets']
|
||||
# If a secret is no longer valid (or not in config), remove it.
|
||||
for secret_in_session in session['allowed_secrets'][:]:
|
||||
# Find the current config item with matching secret
|
||||
config_item = next(
|
||||
(c for c in folder_config if c['secret'] == secret_in_session),
|
||||
None
|
||||
)
|
||||
# If the config item doesn’t exist or is invalid, remove secret
|
||||
if config_item is None or not is_valid(config_item, secret_in_session):
|
||||
session['allowed_secrets'].remove(secret_in_session)
|
||||
|
||||
# 5) Build session['folders'] fresh from the valid secrets
|
||||
session['folders'] = {}
|
||||
for secret_in_session in session.get('allowed_secrets', []):
|
||||
config_item = next(
|
||||
(c for c in folder_config if c['secret'] == secret_in_session),
|
||||
None
|
||||
)
|
||||
if config_item:
|
||||
for folder_info in config_item['folders']:
|
||||
session['folders'][folder_info['foldername']] = folder_info['folderpath']
|
||||
|
||||
# 6) If we have folders, proceed; otherwise show index
|
||||
if session['folders']:
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
# Secret provided via URL is expired or invalid
|
||||
return render_template('error.html', message="Invalid or expired secret."), 403
|
||||
|
||||
|
||||
|
||||
# If no secret provided via GET, check the session
|
||||
session_secret = session.get('secret')
|
||||
if session_secret is not None:
|
||||
secret_data = allowed_secrets.get(session_secret)
|
||||
if secret_data:
|
||||
if is_valid(secret_data):
|
||||
session.permanent = True
|
||||
app.config['FILE_ROOT'] = secret_data.get('file_root')
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
# Session secret exists but is expired
|
||||
return render_template('error.html', message="Invalid or expired secret."), 403
|
||||
|
||||
# No secret provided at all; show the public index page
|
||||
return render_template('index.html')
|
||||
|
||||
return decorated_function
|
||||
@ -165,7 +181,7 @@ def list_directory_contents(directory, subpath):
|
||||
file_entry['has_transcript'] = True
|
||||
transcript_rel_path = os.path.join(subpath, "Transkription", transcript_filename) if subpath else os.path.join("Transkription", transcript_filename)
|
||||
transcript_rel_path = transcript_rel_path.replace(os.sep, '/')
|
||||
file_entry['transcript_url'] = url_for('get_transcript', filename=transcript_rel_path)
|
||||
file_entry['transcript_url'] = url_for('get_transcript', subpath=transcript_rel_path)
|
||||
else:
|
||||
file_entry['has_transcript'] = False
|
||||
else:
|
||||
@ -176,7 +192,7 @@ def list_directory_contents(directory, subpath):
|
||||
return directories, files
|
||||
|
||||
|
||||
def generate_breadcrumbs(subpath):
|
||||
def generate_breadcrumbs(subpath=None):
|
||||
breadcrumbs = [{'name': 'Home', 'path': ''}]
|
||||
if subpath:
|
||||
parts = subpath.split('/')
|
||||
@ -191,8 +207,20 @@ def generate_breadcrumbs(subpath):
|
||||
@app.route('/api/path/<path:subpath>')
|
||||
@require_secret
|
||||
def api_browse(subpath):
|
||||
file_root = app.config['FILE_ROOT']
|
||||
directory = os.path.join(file_root, subpath.replace('/', os.sep))
|
||||
if subpath == '': # root directory
|
||||
foldernames = []
|
||||
for foldername, folderpath in session['folders'].items():
|
||||
foldernames.append({'name': foldername, 'path': foldername})
|
||||
|
||||
return jsonify({
|
||||
'breadcrumbs': generate_breadcrumbs(),
|
||||
'directories': foldernames,
|
||||
'files': []
|
||||
})
|
||||
|
||||
root, *relative_parts = subpath.split('/')
|
||||
base_path = session['folders'][root]
|
||||
directory = os.path.join(base_path, *relative_parts)
|
||||
|
||||
if not os.path.isdir(directory):
|
||||
return jsonify({'error': 'Directory not found'}), 404
|
||||
@ -399,11 +427,12 @@ def log_file_access(full_path):
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@app.route("/media/<path:filename>")
|
||||
@app.route("/media/<path:subpath>")
|
||||
@require_secret
|
||||
def serve_file(filename):
|
||||
decoded_filename = unquote(filename).replace('/', os.sep)
|
||||
full_path = os.path.normpath(os.path.join(app.config['FILE_ROOT'], decoded_filename))
|
||||
def serve_file(subpath):
|
||||
root, *relative_parts = subpath.split('/')
|
||||
base_path = session['folders'][root]
|
||||
full_path = os.path.join(base_path, *relative_parts)
|
||||
|
||||
if not os.path.isfile(full_path):
|
||||
app.logger.error(f"File not found: {full_path}")
|
||||
@ -424,7 +453,7 @@ def serve_file(filename):
|
||||
|
||||
# Check cache first (using diskcache)
|
||||
response = None
|
||||
cached = cache.get(filename)
|
||||
cached = cache.get(subpath)
|
||||
if cached:
|
||||
cached_file_bytes, mime = cached
|
||||
cached_file = io.BytesIO(cached_file_bytes)
|
||||
@ -438,20 +467,20 @@ def serve_file(filename):
|
||||
img_bytes = io.BytesIO()
|
||||
img.save(img_bytes, format='PNG', quality=85)
|
||||
img_bytes = img_bytes.getvalue()
|
||||
cache.set(filename, (img_bytes, mime))
|
||||
cache.set(subpath, (img_bytes, mime))
|
||||
response = send_file(io.BytesIO(img_bytes), mimetype=mime)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Image processing failed for {filename}: {e}")
|
||||
app.logger.error(f"Image processing failed for {subpath}: {e}")
|
||||
abort(500)
|
||||
else:
|
||||
# Cache non-image files: read bytes and cache
|
||||
try:
|
||||
with open(full_path, 'rb') as f:
|
||||
file_bytes = f.read()
|
||||
cache.set(filename, (file_bytes, mime))
|
||||
cache.set(subpath, (file_bytes, mime))
|
||||
response = send_file(io.BytesIO(file_bytes), mimetype=mime)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Failed to read file {filename}: {e}")
|
||||
app.logger.error(f"Failed to read file {subpath}: {e}")
|
||||
abort(500)
|
||||
|
||||
# Set Cache-Control header (browser caching for 1 day)
|
||||
@ -459,11 +488,13 @@ def serve_file(filename):
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/transcript/<path:filename>")
|
||||
@app.route("/transcript/<path:subpath>")
|
||||
@require_secret
|
||||
def get_transcript(filename):
|
||||
fs_filename = filename.replace('/', os.sep)
|
||||
full_path = os.path.join(app.config['FILE_ROOT'], fs_filename)
|
||||
def get_transcript(subpath):
|
||||
|
||||
root, *relative_parts = subpath.split('/')
|
||||
base_path = session['folders'][root]
|
||||
full_path = os.path.join(base_path, *relative_parts)
|
||||
|
||||
if not os.path.isfile(full_path):
|
||||
return "Transcription not found", 404
|
||||
@ -472,34 +503,27 @@ def get_transcript(filename):
|
||||
content = f.read()
|
||||
return content, 200, {'Content-Type': 'text/markdown; charset=utf-8'}
|
||||
|
||||
@app.route("/crawl/<path:start_relative_path>")
|
||||
@app.route("/crawl/<path:subpath>")
|
||||
@require_secret
|
||||
def crawl_and_cache(start_relative_path):
|
||||
def crawl_and_cache(subpath):
|
||||
"""
|
||||
Crawls through a directory (relative to app.config['FILE_ROOT']) and caches each file.
|
||||
Crawls through a directory and caches each file.
|
||||
For images, it creates a thumbnail (max 1200x1200) and caches the processed image.
|
||||
For non-images, it simply reads and caches the file bytes.
|
||||
|
||||
:param start_relative_path: The folder (relative to FILE_ROOT) to start crawling.
|
||||
"""
|
||||
# Compute the absolute path for the starting directory
|
||||
base_dir = os.path.normpath(os.path.join(app.config['FILE_ROOT'], start_relative_path))
|
||||
|
||||
# Check that base_dir is under FILE_ROOT to prevent directory traversal
|
||||
if not base_dir.startswith(os.path.abspath(app.config['FILE_ROOT'])):
|
||||
return jsonify({"error": "Invalid path"}), 400
|
||||
root, *relative_parts = subpath.split('/')
|
||||
base_path = session['folders'][root]
|
||||
full_path = os.path.join(base_path, *relative_parts)
|
||||
|
||||
cached_files = [] # List to hold cached file relative paths
|
||||
|
||||
# Walk through all subdirectories and files
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
for root, dirs, files in os.walk(full_path):
|
||||
for filename in files:
|
||||
full_path = os.path.join(root, filename)
|
||||
# Compute the relative key used for caching
|
||||
rel_key = os.path.relpath(full_path, app.config['FILE_ROOT'])
|
||||
full_path_file = os.path.join(root, filename)
|
||||
|
||||
# Skip if this file is already in the cache
|
||||
if cache.get(rel_key):
|
||||
if cache.get(full_path_file):
|
||||
continue
|
||||
|
||||
# Determine the MIME type
|
||||
@ -517,19 +541,19 @@ def crawl_and_cache(start_relative_path):
|
||||
img.save(img_bytes_io, format='PNG', quality=85)
|
||||
img_bytes = img_bytes_io.getvalue()
|
||||
# Cache the processed image bytes along with its mime type
|
||||
cache.set(rel_key, (img_bytes, mime))
|
||||
cached_files.append(rel_key)
|
||||
cache.set(full_path_file, (img_bytes, mime))
|
||||
cached_files.append(full_path_file)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Image processing failed for {rel_key}: {e}")
|
||||
app.logger.error(f"Image processing failed for {full_path_file}: {e}")
|
||||
else:
|
||||
# Process non-image files
|
||||
try:
|
||||
with open(full_path, 'rb') as f:
|
||||
with open(full_path_file, 'rb') as f:
|
||||
file_bytes = f.read()
|
||||
cache.set(rel_key, (file_bytes, mime))
|
||||
cached_files.append(rel_key)
|
||||
cache.set(full_path_file, (file_bytes, mime))
|
||||
cached_files.append(full_path_file)
|
||||
except Exception as e:
|
||||
app.logger.error(f"Failed to read file {rel_key}: {e}")
|
||||
app.logger.error(f"Failed to read file {full_path_file}: {e}")
|
||||
|
||||
# Return the list of cached files as a JSON response
|
||||
return json.dumps({"cached_files": cached_files}, indent=4), 200
|
||||
|
||||
13
folder_config.json.example.json
Normal file
13
folder_config.json.example.json
Normal file
@ -0,0 +1,13 @@
|
||||
[
|
||||
{
|
||||
"secret" : "dev_key_f83745ft0g5rg3",
|
||||
"validity" : "31.12.2030",
|
||||
"folders": [
|
||||
{
|
||||
"foldername": "My Folder",
|
||||
"folderpath": "\\\\path\\if\\using\\windows"
|
||||
}
|
||||
]
|
||||
|
||||
}
|
||||
]
|
||||
Loading…
x
Reference in New Issue
Block a user