Update log

This commit is contained in:
MacRimi
2026-02-16 11:43:12 +01:00
parent f27c7fdf31
commit 92b0a1478a
2 changed files with 1 additions and 25 deletions

View File

@@ -162,10 +162,8 @@ export function SystemLogs() {
const clampedDays = Math.max(1, Math.min(daysAgo || 1, 90)) const clampedDays = Math.max(1, Math.min(daysAgo || 1, 90))
const apiUrl = `/api/logs?since_days=${clampedDays}` const apiUrl = `/api/logs?since_days=${clampedDays}`
console.log(`[v0] Fetching logs for ${clampedDays} days...`)
const data = await fetchApi(apiUrl) const data = await fetchApi(apiUrl)
const logsArray = Array.isArray(data) ? data : data.logs || [] const logsArray = Array.isArray(data) ? data : data.logs || []
console.log(`[v0] Logs: parsed=${logsArray.length}, journal_total=${data.journal_total || 'N/A'}, skipped=${data.skipped || 0} for ${clampedDays} day(s)`)
return logsArray return logsArray
} catch { } catch {
setError("Failed to load logs. Please try again.") setError("Failed to load logs. Please try again.")

View File

@@ -5418,30 +5418,10 @@ def api_logs():
# Longer timeout for date-range queries which may return many entries # Longer timeout for date-range queries which may return many entries
query_timeout = 120 if since_days else 30 query_timeout = 120 if since_days else 30
# First, get a quick count of how many lines the journal has for this period
# This helps diagnose if the journal itself has fewer entries than expected
real_count = 0
try:
count_cmd = cmd[:] # clone
# Replace --output json with a simpler format for counting
if '--output' in count_cmd:
idx = count_cmd.index('--output')
count_cmd[idx + 1] = 'cat'
count_result = subprocess.run(
count_cmd, capture_output=True, text=True, timeout=30
)
if count_result.returncode == 0:
real_count = count_result.stdout.count('\n')
except Exception:
pass # counting is optional, continue with the real fetch
app.logger.info(f"[Logs API] Fetching logs: cmd={' '.join(cmd)}, journal_real_count={real_count}")
result = subprocess.run(cmd, capture_output=True, text=True, timeout=query_timeout) result = subprocess.run(cmd, capture_output=True, text=True, timeout=query_timeout)
if result.returncode == 0: if result.returncode == 0:
logs = [] logs = []
skipped = 0
priority_map = { priority_map = {
'0': 'emergency', '1': 'alert', '2': 'critical', '3': 'error', '0': 'emergency', '1': 'alert', '2': 'critical', '3': 'error',
'4': 'warning', '5': 'notice', '6': 'info', '7': 'debug' '4': 'warning', '5': 'notice', '6': 'info', '7': 'debug'
@@ -5474,11 +5454,9 @@ def api_logs():
'hostname': log_entry.get('_HOSTNAME', '') 'hostname': log_entry.get('_HOSTNAME', '')
}) })
except (json.JSONDecodeError, ValueError): except (json.JSONDecodeError, ValueError):
skipped += 1
continue continue
app.logger.info(f"[Logs API] Parsed {len(logs)} logs, skipped {skipped} unparseable, journal_real={real_count}") return jsonify({'logs': logs, 'total': len(logs)})
return jsonify({'logs': logs, 'total': len(logs), 'journal_total': real_count, 'skipped': skipped})
else: else:
return jsonify({ return jsonify({
'error': 'journalctl not available or failed', 'error': 'journalctl not available or failed',