Compare commits

..

3 Commits

Author SHA1 Message Date
4411a9e96e Merge branch 'development' of gitea.centx.de:lelo/bethaus-app into development 2025-06-01 09:36:07 +00:00
199e4003a0 collapsable cards 2025-06-01 09:36:04 +00:00
05cbd5efa1 cuda with cpu fallback 2025-06-01 09:35:42 +00:00
5 changed files with 85 additions and 8 deletions

View File

@ -347,3 +347,24 @@ footer {
@keyframes spin { @keyframes spin {
to { transform: rotate(360deg); } to { transform: rotate(360deg); }
} }
.card-body.collapsable {
display: none;
transition: max-height 0.3s ease;
overflow: hidden;
}
.card-body.collapsable.show {
display: block;
max-height: 1000px;
}
.toggle-icon {
font-size: 1.25rem;
user-select: none;
}
.toggle-icon:hover {
color: #007bff;
}

View File

@ -499,3 +499,55 @@ function syncThemeColor() {
} }
document.addEventListener('DOMContentLoaded', syncThemeColor); document.addEventListener('DOMContentLoaded', syncThemeColor);
// toggle card visibility
document.addEventListener('DOMContentLoaded', function() {
// 1. Select all plus/minus icons
var icons = document.querySelectorAll('.card .toggle-icon');
icons.forEach(function(icon) {
// 2. For each icon, find its corresponding collapsable body
var card = icon.closest('.card');
if (!card) return;
var body = card.querySelector('.card-body.collapsable');
if (!body) return;
// 3. Initialize ariaexpanded & icon text based on whether .show is present
if (body.classList.contains('show')) {
icon.textContent = '';
icon.setAttribute('aria-expanded', 'true');
} else {
icon.textContent = '+';
icon.setAttribute('aria-expanded', 'false');
}
function toggleSection() {
// Toggle the 'show' class
if (body.classList.contains('show')) {
body.classList.remove('show');
icon.textContent = '+';
icon.setAttribute('aria-expanded', 'false');
} else {
body.classList.add('show');
icon.textContent = '';
icon.setAttribute('aria-expanded', 'true');
}
}
// 4. Click listener
icon.addEventListener('click', function(e) {
e.stopPropagation();
toggleSection();
});
// 5. Keydown listener (Enter/Space for accessibility)
icon.addEventListener('keydown', function(e) {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
toggleSection();
}
});
});
});

View File

@ -26,7 +26,7 @@
{% block content %} {% block content %}
<div class="container-fluid"> <div class="container-fluid">
<h2>Übersicht deiner gültigen Links</h2> <h2>Verbindungen der letzten 10 Minuten</h2>
<span class="ms-3"> <span class="ms-3">
Anzahl Verbindungen: <strong id="totalConnections">0</strong> Anzahl Verbindungen: <strong id="totalConnections">0</strong>
</span> </span>

View File

@ -62,10 +62,11 @@
<!-- Detailed Table of Top File Accesses --> <!-- Detailed Table of Top File Accesses -->
{% for top20_item in top20 %} {% for top20_item in top20 %}
<div class="card mb-4"> <div class="card mb-4">
<div class="card-header"> <div class="card-header d-flex justify-content-between align-items-center">
Top 20 Dateizugriffe ({{ top20_item['category'] }}) <span>Top 20 Dateizugriffe ({{ top20_item['category'] }})</span>
<span class="toggle-icon" aria-label="collapse" role="button" tabindex="0">+</span>
</div> </div>
<div class="card-body"> <div class="card-body collapsable">
<div class="table-responsive"> <div class="table-responsive">
<table class="table table-striped"> <table class="table table-striped">
<thead> <thead>

View File

@ -1,6 +1,7 @@
import os import os
import sys import sys
import time import time
import torch
import whisper import whisper
import concurrent.futures import concurrent.futures
import json import json
@ -18,7 +19,6 @@ with open("transcription_config.yml", "r", encoding="utf-8") as file:
settings = yaml.safe_load(file) settings = yaml.safe_load(file)
folder_list = settings.get("folder_list") folder_list = settings.get("folder_list")
model_name = settings.get("model_name") model_name = settings.get("model_name")
device = settings.get("device")
def load_audio_librosa(path: str, sr: int = 16_000) -> np.ndarray: def load_audio_librosa(path: str, sr: int = 16_000) -> np.ndarray:
audio, orig_sr = librosa.load(path, sr=sr) # load + resample to 16 kHz audio, orig_sr = librosa.load(path, sr=sr) # load + resample to 16 kHz
@ -222,7 +222,10 @@ def process_folder(root_folder):
else: else:
print(f"Checked {checked_files} files. Start to transcribe {len(valid_files)} files.") print(f"Checked {checked_files} files. Start to transcribe {len(valid_files)} files.")
print("Loading Whisper model...") # Choose “cuda” if available, otherwise “cpu”
device = "cuda" if torch.cuda.is_available() else "cpu"
print(f"Loading Whisper model on {device}")
model = whisper.load_model(model_name, device=device) model = whisper.load_model(model_name, device=device)
# Use a thread pool to pre-load files concurrently. # Use a thread pool to pre-load files concurrently.