diff --git a/client/src/js/logger.js b/client/src/js/logger.js new file mode 100644 index 00000000..5cfbf58f --- /dev/null +++ b/client/src/js/logger.js @@ -0,0 +1,127 @@ +import log from 'loglevel'; +import { makeURL } from '@/js/utils'; +import store from '@/store/store'; + +let isInitialized = false; + +const FLUSH_INTERVAL_MS = 1000; +const MAX_QUEUE_SIZE = 50; + +let logQueue = []; +let flushTimer = null; + +/** + * Buffer a log entry and schedule a debounced flush. + * Flushes immediately if the queue reaches MAX_QUEUE_SIZE. + */ +function enqueueLog(level, message, extra) { + logQueue.push({ level, message, extra }); + if (logQueue.length >= MAX_QUEUE_SIZE) { + flushQueue(); + return; + } + clearTimeout(flushTimer); + flushTimer = setTimeout(flushQueue, FLUSH_INTERVAL_MS); +} + +/** + * Send all queued log entries to the server as a single batch request. + * Uses fetch directly to avoid potential infinite loops with HTTP interceptors. + */ +function flushQueue() { + clearTimeout(flushTimer); + flushTimer = null; + if (logQueue.length === 0) return; + + const batch = logQueue.splice(0); + + const token = store.getters.AUTH_TOKEN; + const headers = { 'Content-Type': 'application/json' }; + if (token) headers['Authorization'] = `Bearer ${token}`; + + fetch(makeURL('/api/v1/logs/batch'), { + method: 'POST', + headers, + body: JSON.stringify({ batch: batch }), + }).catch(() => { + // Intentionally ignore errors to prevent log flooding or infinite loops + }); +} + +/** + * Initialize remote logging. + * This hooks into the loglevel library to send logs to the server. + */ +export function initRemoteLogging() { + if (isInitialized) return; + isInitialized = true; + + const originalFactory = log.methodFactory; + + const levels = { TRACE: 0, DEBUG: 1, INFO: 2, WARN: 3, ERROR: 4, SILENT: 5 }; + + log.methodFactory = function (methodName, logLevel, loggerName) { + const rawMethod = originalFactory(methodName, logLevel, loggerName); + + return function (message, ...args) { + // Always call the standard log method first + rawMethod(message, ...args); + + const settings = store.getters.SETTINGS; + if (!settings || !settings.client_log_enabled) return; + + const currentLevelName = methodName.toUpperCase(); + const currentLevel = levels[currentLevelName] ?? 2; + const minLevel = levels[(settings.client_log_level || 'INFO').toUpperCase()] ?? 2; + if (currentLevel < minLevel) return; + + let finalMessage = message; + if (typeof message !== 'string') { + try { + finalMessage = JSON.stringify(message); + } catch { + finalMessage = String(message); + } + } + + const extra = {}; + if (args.length > 0) { + extra.args = args.map((arg) => + arg instanceof Error ? { message: arg.message, stack: arg.stack, name: arg.name } : arg + ); + } + + enqueueLog(currentLevelName, finalMessage, extra); + }; + }; + + // Re-apply the level to trigger the factory update + log.setLevel(log.getLevel()); + + // Keep browser console level in sync with the user's per-account preference. + // The false arg prevents loglevel from persisting this to localStorage. + store.watch( + (state, getters) => getters.USER_SETTINGS?.console_log_level, + (newLevel) => { + if (newLevel) log.setLevel(newLevel.toLowerCase(), false); + }, + { immediate: true } + ); + + // Intercept global errors — enqueue directly instead of routing through + // log.error() so the factory is never re-entered from here. + window.addEventListener('error', (event) => { + enqueueLog('ERROR', `Unhandled Error: ${event.message}`, { + filename: event.filename, + lineno: event.lineno, + colno: event.colno, + stack: event.error ? event.error.stack : null, + }); + }); + + window.addEventListener('unhandledrejection', (event) => { + enqueueLog('ERROR', `Unhandled Promise Rejection: ${event.reason}`, {}); + }); + + enqueueLog('INFO', 'Remote logging initialized', {}); +} diff --git a/client/src/main.js b/client/src/main.js index 9463ed85..c3e1d153 100644 --- a/client/src/main.js +++ b/client/src/main.js @@ -12,6 +12,7 @@ import App from './App.vue'; import router from './router'; import setupHttpInterceptor from './js/http-interceptor'; import { getWebSocketURL, isElectron } from '@/js/platform'; +import { initRemoteLogging } from '@/js/logger'; import './assets/styles/dark.scss'; import 'vue-toast-notification/dist/theme-sugar.css'; @@ -19,6 +20,7 @@ import 'vue-multiselect/dist/vue-multiselect.min.css'; import 'splitpanes/dist/splitpanes.css'; setupHttpInterceptor(); +initRemoteLogging(); Vue.use(BootstrapVue); Vue.use(IconsPlugin); diff --git a/client/src/views/config/ConfigView.vue b/client/src/views/config/ConfigView.vue index 351d7461..f3a55154 100644 --- a/client/src/views/config/ConfigView.vue +++ b/client/src/views/config/ConfigView.vue @@ -3,7 +3,7 @@

DigiScript Config

- + @@ -16,6 +16,9 @@ + + +
@@ -27,9 +30,10 @@ import ConfigSystem from '@/vue_components/config/ConfigSystem.vue'; import ConfigSettings from '@/vue_components/config/ConfigSettings.vue'; import ConfigUsers from '@/vue_components/config/ConfigUsers.vue'; import ConfigShows from '@/vue_components/config/ConfigShows.vue'; +import ConfigLogs from '@/vue_components/config/ConfigLogs.vue'; export default { name: 'ConfigView', - components: { ConfigShows, ConfigUsers, ConfigSettings, ConfigSystem }, + components: { ConfigShows, ConfigUsers, ConfigSettings, ConfigSystem, ConfigLogs }, }; diff --git a/client/src/vue_components/config/ConfigLogs.vue b/client/src/vue_components/config/ConfigLogs.vue new file mode 100644 index 00000000..e91cda29 --- /dev/null +++ b/client/src/vue_components/config/ConfigLogs.vue @@ -0,0 +1,403 @@ + + + diff --git a/client/src/vue_components/user/settings/Settings.vue b/client/src/vue_components/user/settings/Settings.vue index e9253e31..747de65e 100644 --- a/client/src/vue_components/user/settings/Settings.vue +++ b/client/src/vue_components/user/settings/Settings.vue @@ -60,6 +60,24 @@ :state="validateState('script_text_alignment')" /> + + + + Reset @@ -95,12 +113,21 @@ export default { script_auto_save_interval: 10, cue_position_right: false, script_text_alignment: TEXT_ALIGNMENT.CENTER, + console_log_level: 'WARN', }, textAlignmentOptions: [ { value: TEXT_ALIGNMENT.LEFT, text: 'Left' }, { value: TEXT_ALIGNMENT.CENTER, text: 'Center' }, { value: TEXT_ALIGNMENT.RIGHT, text: 'Right' }, ], + consoleLogLevelOptions: [ + { value: 'TRACE', text: 'TRACE' }, + { value: 'DEBUG', text: 'DEBUG' }, + { value: 'INFO', text: 'INFO' }, + { value: 'WARN', text: 'WARN' }, + { value: 'ERROR', text: 'ERROR' }, + { value: 'SILENT', text: 'SILENT' }, + ], toggle: 0, }; }, @@ -127,6 +154,7 @@ export default { required, integer, }, + console_log_level: { required }, }, }, mounted() { diff --git a/server/alembic_config/versions/c1db8c1f4e37_add_console_log_level_to_user_settings.py b/server/alembic_config/versions/c1db8c1f4e37_add_console_log_level_to_user_settings.py new file mode 100644 index 00000000..b79a73cc --- /dev/null +++ b/server/alembic_config/versions/c1db8c1f4e37_add_console_log_level_to_user_settings.py @@ -0,0 +1,47 @@ +"""Add console_log_level to user_settings + +Revision ID: c1db8c1f4e37 +Revises: fbb1b6bd8707 +Create Date: 2026-02-20 22:51:22.081714 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "c1db8c1f4e37" +down_revision: Union[str, None] = "fbb1b6bd8707" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("user_settings", schema=None) as batch_op: + batch_op.add_column( + sa.Column( + "console_log_level", + sa.String(), + nullable=False, + server_default="WARN", + ) + ) + batch_op.create_check_constraint( + "ck_user_settings_console_log_level", + "console_log_level IN ('TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'SILENT')", + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("user_settings", schema=None) as batch_op: + batch_op.drop_constraint("ck_user_settings_console_log_level", type_="check") + batch_op.drop_column("console_log_level") + + # ### end Alembic commands ### diff --git a/server/controllers/api/logging.py b/server/controllers/api/logging.py new file mode 100644 index 00000000..75856952 --- /dev/null +++ b/server/controllers/api/logging.py @@ -0,0 +1,109 @@ +from typing import Any, Dict, List + +from tornado import escape, httputil + +from digi_server.app_server import DigiScriptServer +from digi_server.logger import get_logger, map_client_level +from utils.web.base_controller import BaseAPIController +from utils.web.route import ApiRoute, ApiVersion + + +class ClientLoggingBase(BaseAPIController): + def __init__( + self, + application: DigiScriptServer, + request: httputil.HTTPServerRequest, + **kwargs: Any, + ): + super().__init__(application, request, **kwargs) + self.client_logger = get_logger("Client") + + def process_logs(self, entries: List[Dict]): + # Build request-level context added to every entry's extra + request_extra = { + "remote_ip": self.request.remote_ip, + "agent": self.request.headers.get("User-Agent", "Unknown"), + } + if self.current_user: + request_extra["user_id"] = self.current_user.get("id") + request_extra["username"] = self.current_user.get("username") + + for entry in entries: + level = entry.get("level", "INFO").upper() + message = entry.get("message", "") + extra = {**entry.get("extra", {}), **request_extra} + + log_level = map_client_level(level) + + log_msg = f"[Client] {message}" + if extra: + log_msg += f" | Extra: {extra}" + + # Pass structured fields via extra so LogBufferHandler can read them. + # Tornado's LogFormatter ignores unknown extra fields, so file output + # is unaffected. + self.client_logger.log( + log_level, + log_msg, + extra={ + "user_id": request_extra.get("user_id"), + "username": request_extra.get("username"), + "remote_ip": request_extra.get("remote_ip"), + }, + ) + + self.set_status(200) + self.write({"status": "OK"}) + + +@ApiRoute("logs/batch", ApiVersion.V1, ignore_logging=True) +class ClientLoggingBatchController(ClientLoggingBase): + async def post(self): + client_log_enabled = await self.application.digi_settings.get( + "client_log_enabled" + ) + if not client_log_enabled: + self.set_status(403) + self.write({"message": "Client logging is disabled"}) + return + + try: + data = escape.json_decode(self.request.body) + except ValueError: + self.set_status(400) + self.write({"message": "Invalid JSON"}) + return + + if not isinstance(data, dict): + self.set_status(400) + self.write({"message": "Expected JSON object with 'batch' key"}) + return + + batch = data.get("batch") + if not isinstance(batch, list): + self.set_status(400) + self.write({"message": "Expected 'batch' to be a list of log entries"}) + return + + self.process_logs(batch) + + +@ApiRoute("logs", ApiVersion.V1, ignore_logging=True) +class ClientLoggingController(ClientLoggingBase): + async def post(self): + client_log_enabled = await self.application.digi_settings.get( + "client_log_enabled" + ) + if not client_log_enabled: + self.set_status(403) + self.write({"message": "Client logging is disabled"}) + return + + try: + data = escape.json_decode(self.request.body) + except ValueError: + self.set_status(400) + self.write({"message": "Invalid JSON"}) + return + + self.process_logs([data]) diff --git a/server/controllers/api/logs_viewer.py b/server/controllers/api/logs_viewer.py new file mode 100644 index 00000000..d934548b --- /dev/null +++ b/server/controllers/api/logs_viewer.py @@ -0,0 +1,241 @@ +"""Log viewer API endpoints. + +Provides two admin-only endpoints backed by the in-memory log buffers +populated by :class:`~digi_server.log_buffer.LogBufferHandler`: + +* ``GET /api/v1/logs/view`` — paginated one-shot snapshot +* ``GET /api/v1/logs/stream`` — Server-Sent Events (SSE) live stream with + backfill of existing entries +""" + +import asyncio +import json +import logging + +from digi_server.log_buffer import get_client_buffer, get_server_buffer +from utils.web.base_controller import BaseAPIController +from utils.web.route import ApiRoute, ApiVersion +from utils.web.web_decorators import require_admin + + +# Map level name → minimum level_no for "greater-than-or-equal" filtering. +# WARN is accepted as an alias for WARNING (mirrors loglevel npm behaviour). +_LEVEL_ALIASES = { + "WARN": logging.WARNING, + "WARNING": logging.WARNING, + "DEBUG": logging.DEBUG, + "INFO": logging.INFO, + "ERROR": logging.ERROR, + "CRITICAL": logging.CRITICAL, + "TRACE": 5, # Custom level registered in main.py +} + +_MAX_LIMIT = 1000 + +# Sentinel placed on the SSE queue when the client disconnects, so the +# awaiting coroutine can exit cleanly without waiting for the next timeout. +_STREAM_CLOSED = object() + + +def _parse_source(raw: str) -> str: + """Normalise the ``source`` query parameter. + + :param raw: Raw value from the query string. + :returns: ``"client"`` or ``"server"``. + """ + return "client" if raw.lower() == "client" else "server" + + +def _filter_entries( + entries: list, + level_name: str, + search: str, + username_filter: str, + source: str, +) -> list: + """Apply all active filters to *entries* and return the matching subset. + + :param entries: List of entry dicts to filter. + :param level_name: Uppercase level name (e.g. ``"ERROR"``); empty means + no level filter. + :param search: Lowercase search string; empty means no search filter. + :param username_filter: Lowercase username substring; only applied when + *source* is ``"client"``; empty means no filter. + :param source: ``"server"`` or ``"client"``. + :returns: Filtered list of entry dicts. + """ + if level_name: + min_level_no = _LEVEL_ALIASES.get(level_name) + if min_level_no is not None: + entries = [e for e in entries if e["level_no"] >= min_level_no] + + if search: + entries = [e for e in entries if search in e["message"].lower()] + + if username_filter and source == "client": + entries = [ + e + for e in entries + if e.get("username") and username_filter in e["username"].lower() + ] + + return entries + + +@ApiRoute("logs/view", ApiVersion.V1, ignore_logging=True) +class LogViewerController(BaseAPIController): + """Return a filtered snapshot of the in-memory log buffer. + + Query parameters + ---------------- + source : str + ``"server"`` (default) or ``"client"``. + level : str + Minimum level name. Empty string (default) means all levels. + Accepts ``TRACE``, ``DEBUG``, ``INFO``, ``WARN``/``WARNING``, + ``ERROR``, ``CRITICAL``. + search : str + Case-insensitive substring match on the ``message`` field. + username : str + (Client source only) case-insensitive substring match on the + ``username`` field. + limit : int + Maximum number of entries to return (capped at 1000, default 500). + offset : int + Number of entries to skip before returning (default 0). + """ + + @require_admin + async def get(self): + """Handle ``GET /api/v1/logs/view``. + + :raises tornado.web.HTTPError: 401 if not authenticated, + 403 if not admin. + """ + source = _parse_source(self.get_argument("source", "server")) + level_name = self.get_argument("level", "").upper() + search = self.get_argument("search", "").lower() + username_filter = self.get_argument("username", "").lower() + + try: + limit = min(int(self.get_argument("limit", "500")), _MAX_LIMIT) + except ValueError: + limit = 500 + + try: + offset = max(int(self.get_argument("offset", "0")), 0) + except ValueError: + offset = 0 + + if source == "client": + entries = get_client_buffer().get_entries() + else: + entries = get_server_buffer().get_entries() + + entries = _filter_entries(entries, level_name, search, username_filter, source) + + total = len(entries) + page = entries[offset : offset + limit] + + self.write( + { + "entries": page, + "total": total, + "returned": len(page), + "source": source, + } + ) + + +@ApiRoute("logs/stream", ApiVersion.V1, ignore_logging=True) +class LogStreamController(BaseAPIController): + """Stream log entries to the client using Server-Sent Events (SSE). + + On connection the handler first sends all existing (backfill) entries from + the buffer that match the active filters, then emits new entries in + real-time as they arrive. A ``: keepalive`` comment is written every 20 s + to prevent proxies and browsers from closing an idle connection. + + Query parameters + ---------------- + Same as :class:`LogViewerController` except ``limit`` and ``offset`` which + are not applicable to a live stream. + + SSE event format + ---------------- + Each event is a single ``data:`` line containing a JSON-encoded entry dict, + followed by a blank line:: + + data: {"ts": "...", "level": "INFO", ...} + + """ + + def on_connection_close(self): + """Called by Tornado when the client closes the connection. + + Sets a flag and pushes the :data:`_STREAM_CLOSED` sentinel onto the + queue so the suspended ``get()`` coroutine wakes up and exits. + """ + self._sse_closed = True + if hasattr(self, "_sse_queue"): + self._sse_queue.put_nowait(_STREAM_CLOSED) + + @require_admin + async def get(self): + """Handle ``GET /api/v1/logs/stream``. + + :raises tornado.web.HTTPError: 401 if not authenticated, + 403 if not admin. + """ + source = _parse_source(self.get_argument("source", "server")) + level_name = self.get_argument("level", "").upper() + search = self.get_argument("search", "").lower() + username_filter = self.get_argument("username", "").lower() + + self._sse_closed = False + + self.set_header("Content-Type", "text/event-stream; charset=utf-8") + self.set_header("Cache-Control", "no-cache") + # Instruct nginx / other reverse proxies not to buffer this response. + self.set_header("X-Accel-Buffering", "no") + + buffer = get_client_buffer() if source == "client" else get_server_buffer() + + # Send backfill — all existing entries that pass the current filters. + for entry in _filter_entries( + buffer.get_entries(), level_name, search, username_filter, source + ): + self.write(f"data: {json.dumps(entry)}\n\n") + await self.flush() + + # Subscribe to future entries. + queue: asyncio.Queue = asyncio.Queue() + self._sse_queue = queue + + unsubscribe = buffer.subscribe(queue.put_nowait) + + try: + while True: + try: + entry = await asyncio.wait_for(queue.get(), timeout=20.0) + except asyncio.TimeoutError: + # Send a keepalive comment so proxies don't time out. + if self._sse_closed: + break + self.write(": keepalive\n\n") + await self.flush() + continue + + if entry is _STREAM_CLOSED: + break + + matched = _filter_entries( + [entry], level_name, search, username_filter, source + ) + if matched: + self.write(f"data: {json.dumps(matched[0])}\n\n") + await self.flush() + except Exception: # noqa: BLE001 + pass + finally: + unsubscribe() diff --git a/server/digi_server/app_server.py b/server/digi_server/app_server.py index 9e283908..6e6a9c54 100644 --- a/server/digi_server/app_server.py +++ b/server/digi_server/app_server.py @@ -18,9 +18,12 @@ from controllers import controllers from controllers.ws_controller import WebSocketController from digi_server.logger import ( + configure_client_buffer, + configure_client_logging, configure_db_logging, configure_file_logging, configure_log_level, + configure_server_buffer, get_logger, ) from digi_server.settings import Settings @@ -55,6 +58,9 @@ def __init__( self.digi_settings: Settings = Settings(self, settings_path) self.app_log_handler = None self.db_file_handler = None + self.client_file_handler = None + self.server_buffer = None + self.client_buffer = None # Controller imports (needed to trigger the decorator) controllers.import_all_controllers() @@ -377,6 +383,24 @@ async def _configure_logging(self): ) configure_log_level(log_level) + # Client logging + client_log_path = await self.digi_settings.get("client_log_path") + client_file_size = await self.digi_settings.get("client_max_log_mb") + client_backups = await self.digi_settings.get("client_log_backups") + client_log_level = await self.digi_settings.get("client_log_level") + self.client_file_handler = configure_client_logging( + log_path=client_log_path, + max_size_mb=client_file_size, + log_backups=client_backups, + handler=self.client_file_handler, + log_level=client_log_level, + ) + + # In-memory log buffers (for the log viewer UI) + buffer_maxlen = await self.digi_settings.get("log_buffer_size") + self.server_buffer = configure_server_buffer(buffer_maxlen) + self.client_buffer = configure_client_buffer(buffer_maxlen) + # Database logging use_db_logging = await self.digi_settings.get("db_log_enabled") db_log_path = await self.digi_settings.get("db_log_path") diff --git a/server/digi_server/log_buffer.py b/server/digi_server/log_buffer.py new file mode 100644 index 00000000..3f238978 --- /dev/null +++ b/server/digi_server/log_buffer.py @@ -0,0 +1,133 @@ +"""In-memory circular log buffer. + +Provides a :class:`LogBufferHandler` that stores the last N log records as +structured dicts in a :class:`collections.deque`. Two module-level singletons +are maintained — one for server-side logs and one for client-side logs — so +the log viewer endpoint can read from either without constructing new objects. + +Design rationale +---------------- +* Tornado is single-threaded; ``emit()`` is always called from the IOLoop + thread, so no mutex is required. +* ``deque(maxlen=N)`` provides O(1) append with automatic eviction of the + oldest entry when the buffer is full. +* ``list(deque)`` is GIL-atomic in CPython, making snapshot reads safe even + if a background thread were to emit a record concurrently. +""" + +import logging +from collections import deque +from datetime import datetime, timezone +from typing import Optional + + +class LogBufferHandler(logging.Handler): + """A logging handler that stores records in an in-memory circular buffer. + + :param maxlen: Maximum number of entries to keep. When full, the oldest + entry is evicted automatically. + """ + + def __init__(self, maxlen: int = 2000): + super().__init__() + self._buffer: deque = deque(maxlen=maxlen) + self._subscribers: set = set() + + def emit(self, record: logging.LogRecord) -> None: + """Build a structured dict from *record* and append it to the buffer. + + Extra attributes ``user_id``, ``username``, and ``remote_ip`` are read + via :func:`getattr` so they are optional; missing attributes become + ``None`` in the entry dict. + + :param record: The log record to store. + """ + try: + ts = datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat( + timespec="milliseconds" + ) + entry = { + "ts": ts, + "level": record.levelname, + "level_no": record.levelno, + "logger": record.name, + "message": record.getMessage(), + "filename": record.filename, + "lineno": record.lineno, + "user_id": getattr(record, "user_id", None), + "username": getattr(record, "username", None), + "remote_ip": getattr(record, "remote_ip", None), + } + self._buffer.append(entry) + for cb in list(self._subscribers): + try: + cb(entry) + except Exception: # noqa: BLE001 + pass + except Exception: # noqa: BLE001 + self.handleError(record) + + def get_entries(self) -> list: + """Return a snapshot of all buffered entries as a plain list. + + The returned list is independent of the internal deque; subsequent + ``emit()`` calls do not affect it. + + :returns: List of entry dicts ordered oldest-first. + """ + return list(self._buffer) + + def subscribe(self, callback) -> "callable": + """Register *callback* to be called with each new entry dict. + + The callback is invoked synchronously inside :meth:`emit`, on the + Tornado IOLoop thread, immediately after the entry is appended to the + buffer. Callbacks must be non-blocking. + + :param callback: A callable that accepts one positional argument (the + entry dict). + :returns: An unsubscribe callable. Call it to deregister *callback*. + """ + self._subscribers.add(callback) + + def _unsubscribe(): + self._subscribers.discard(callback) + + return _unsubscribe + + def resize(self, maxlen: int) -> None: + """Resize the buffer, preserving as many recent entries as possible. + + When shrinking, the *newest* ``maxlen`` entries are kept. + + :param maxlen: New maximum number of entries. + """ + new_buf: deque = deque(self._buffer, maxlen=maxlen) + self._buffer = new_buf + + +# Module-level singletons — one per log source. +_server_buffer: Optional[LogBufferHandler] = None +_client_buffer: Optional[LogBufferHandler] = None + + +def get_server_buffer() -> LogBufferHandler: + """Return (creating if necessary) the server-side log buffer singleton. + + :returns: The server :class:`LogBufferHandler` instance. + """ + global _server_buffer # noqa: PLW0603 + if _server_buffer is None: + _server_buffer = LogBufferHandler() + return _server_buffer + + +def get_client_buffer() -> LogBufferHandler: + """Return (creating if necessary) the client-side log buffer singleton. + + :returns: The client :class:`LogBufferHandler` instance. + """ + global _client_buffer # noqa: PLW0603 + if _client_buffer is None: + _client_buffer = LogBufferHandler() + return _client_buffer diff --git a/server/digi_server/logger.py b/server/digi_server/logger.py index d63a69e6..a5bd38a1 100644 --- a/server/digi_server/logger.py +++ b/server/digi_server/logger.py @@ -4,6 +4,12 @@ from tornado.log import LogFormatter +from digi_server.log_buffer import ( + LogBufferHandler, + get_client_buffer, + get_server_buffer, +) + logger = logging.getLogger("DigiScript") ALL_LOGGERS = [ @@ -78,6 +84,54 @@ def configure_db_logging( return file_handler +CLIENT_LEVEL_MAP = { + "TRACE": 5, # Registered via add_logging_level("TRACE", logging.DEBUG - 5) in main.py + "DEBUG": logging.DEBUG, + "INFO": logging.INFO, + "WARN": logging.WARNING, # loglevel npm uses WARN; Python uses WARNING + "ERROR": logging.ERROR, + "SILENT": logging.CRITICAL + 1, # No Python equivalent; suppress all +} + + +def map_client_level(level_name: str) -> int: + """Map a loglevel npm level name to a Python logging integer. + + :param level_name: Level name from the loglevel npm package (e.g. TRACE, WARN). + :returns: The corresponding Python logging integer level. + """ + return CLIENT_LEVEL_MAP.get(level_name.upper(), logging.INFO) + + +def configure_client_logging( + log_path, + max_size_mb=100, + log_backups=5, + handler=None, + log_level=logging.DEBUG, +): + size_bytes = max_size_mb * 1024 * 1024 + client_logger = get_logger("Client") + + if handler: + client_logger.removeHandler(handler) + + if isinstance(log_level, str): + log_level = map_client_level(log_level) + + client_logger.setLevel(log_level) + file_handler = None + if log_path: + file_handler = RotatingFileHandler( + log_path, maxBytes=size_bytes, backupCount=log_backups + ) + file_handler.setFormatter(LogFormatter(color=False)) + client_logger.addHandler(file_handler) + # Prevent propagation to avoid polluting the server console + client_logger.propagate = False + return file_handler + + def add_logging_level(level_name, level_num, method_name=None): if not method_name: method_name = level_name.lower() @@ -102,6 +156,40 @@ def log_to_root(message, *args, **kwargs): setattr(logging, method_name, log_to_root) +def configure_server_buffer(maxlen: int) -> LogBufferHandler: + """Attach (or resize) the server log buffer to all server loggers. + + Idempotent: if the handler is already attached, only the buffer size is + updated. + + :param maxlen: Maximum number of entries to keep in the buffer. + :returns: The :class:`LogBufferHandler` singleton for server logs. + """ + handler = get_server_buffer() + handler.resize(maxlen) + for _logger in ALL_LOGGERS: + if handler not in _logger.handlers: + _logger.addHandler(handler) + return handler + + +def configure_client_buffer(maxlen: int) -> LogBufferHandler: + """Attach (or resize) the client log buffer to the DigiScript.Client logger. + + Idempotent: if the handler is already attached, only the buffer size is + updated. + + :param maxlen: Maximum number of entries to keep in the buffer. + :returns: The :class:`LogBufferHandler` singleton for client logs. + """ + handler = get_client_buffer() + handler.resize(maxlen) + client_logger = get_logger("Client") + if handler not in client_logger.handlers: + client_logger.addHandler(handler) + return handler + + def get_level_names_by_order(): levels = logging.getLevelNamesMapping() sorted_levels = sorted(levels.items(), key=lambda x: x[1]) diff --git a/server/digi_server/settings.py b/server/digi_server/settings.py index dfe386d4..b23b49d6 100644 --- a/server/digi_server/settings.py +++ b/server/digi_server/settings.py @@ -279,6 +279,61 @@ def __init__(self, application: DigiScriptServer, settings_path=None): display_name="Enable Network Discovery (mDNS)", help_text="Advertise this server on the local network for automatic discovery by desktop clients.", ) + self.define( + "client_log_enabled", + bool, + True, + True, + display_name="Enable Client Log Forwarding", + help_text="When enabled, client browsers will forward their logs to the server.", + ) + self.define( + "client_log_level", + str, + "INFO", + True, + self._application.regen_logging, + display_name="Client Log Level", + help_text="Minimum log level that clients will forward to the server.", + choice_options=["TRACE", "DEBUG", "INFO", "WARN", "ERROR"], + ) + self.define( + "client_log_path", + str, + os.path.join(self._base_path, "digiscript_client.log"), + True, + self._application.regen_logging, + display_name="Client Log Path", + help_text="Path to the log file for client-side log messages.", + ) + self.define( + "client_max_log_mb", + int, + 100, + True, + self._application.regen_logging, + display_name="Max Client Log Size (MB)", + help_text="Maximum size in MB of the client log file before it is rotated.", + ) + self.define( + "client_log_backups", + int, + 5, + True, + self._application.regen_logging, + display_name="Client Log Backups", + help_text="Number of rotated client log file backups to retain.", + ) + self.define( + "log_buffer_size", + int, + 2000, + True, + self._application.regen_logging, + display_name="Log Buffer Size", + help_text="Number of recent log entries to keep in memory for the log viewer. " + "Larger values use more memory. Changes take effect after restart.", + ) self._load(spawn_callbacks=False) diff --git a/server/models/user.py b/server/models/user.py index aae2b45f..258655c9 100644 --- a/server/models/user.py +++ b/server/models/user.py @@ -6,7 +6,7 @@ from functools import partial from typing import TYPE_CHECKING, List, Union -from sqlalchemy import ForeignKey, Integer, Text, TypeDecorator, select +from sqlalchemy import CheckConstraint, ForeignKey, Integer, Text, TypeDecorator, select from sqlalchemy.orm import Mapped, mapped_column, relationship from models.models import db @@ -86,6 +86,14 @@ class UserSettings(db.Model): script_text_alignment: Mapped[TextAlignment] = mapped_column( TextAlignmentCol, default=TextAlignment.CENTER ) + console_log_level: Mapped[str] = mapped_column(default="WARN") + + __table_args__ = ( + CheckConstraint( + "console_log_level IN ('TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'SILENT')", + name="ck_user_settings_console_log_level", + ), + ) # Hidden Properties (None user editable, marked with _) # Make sure to also mark these as hidden in the Schema for this in schemas/schemas.py diff --git a/server/test/controllers/api/test_logging.py b/server/test/controllers/api/test_logging.py new file mode 100644 index 00000000..905ed3ed --- /dev/null +++ b/server/test/controllers/api/test_logging.py @@ -0,0 +1,72 @@ +import json + +from tornado import escape +from tornado.testing import gen_test + +from test.conftest import DigiScriptTestCase + + +class TestLoggingController(DigiScriptTestCase): + @gen_test + async def test_logging_endpoint_success(self): + """Test that the logging endpoint successfully receives logs.""" + payload = { + "level": "INFO", + "message": "Test message from client", + "extra": {"source": "unit-test"}, + } + response = await self.http_client.fetch( + self.get_url("/api/v1/logs"), + method="POST", + body=json.dumps(payload), + headers={"Content-Type": "application/json"}, + ) + self.assertEqual(200, response.code) + data = escape.json_decode(response.body) + self.assertEqual("OK", data["status"]) + + @gen_test + async def test_logging_endpoint_disabled(self): + """Test that the logging endpoint returns 403 when disabled in settings.""" + # Disable client logging in settings + await self._app.digi_settings.set("client_log_enabled", False) + + payload = {"level": "INFO", "message": "Test message from client"} + response = await self.http_client.fetch( + self.get_url("/api/v1/logs"), + method="POST", + body=json.dumps(payload), + headers={"Content-Type": "application/json"}, + raise_error=False, + ) + self.assertEqual(403, response.code) + data = escape.json_decode(response.body) + self.assertEqual("Client logging is disabled", data["message"]) + + # Re-enable for other tests + await self._app.digi_settings.set("client_log_enabled", True) + + @gen_test + async def test_logging_endpoint_invalid_json(self): + """Test that the logging endpoint returns 400 for invalid JSON.""" + response = await self.http_client.fetch( + self.get_url("/api/v1/logs"), + method="POST", + body="invalid json", + headers={"Content-Type": "application/json"}, + raise_error=False, + ) + self.assertEqual(400, response.code) + + @gen_test + async def test_logging_endpoint_different_levels(self): + """Test that the logging endpoint accepts various log levels.""" + for level in ["DEBUG", "INFO", "WARN", "ERROR"]: + payload = {"level": level, "message": f"Test level {level}"} + response = await self.http_client.fetch( + self.get_url("/api/v1/logs"), + method="POST", + body=json.dumps(payload), + headers={"Content-Type": "application/json"}, + ) + self.assertEqual(200, response.code) diff --git a/server/test/controllers/api/test_logs_viewer.py b/server/test/controllers/api/test_logs_viewer.py new file mode 100644 index 00000000..bf68508a --- /dev/null +++ b/server/test/controllers/api/test_logs_viewer.py @@ -0,0 +1,396 @@ +"""Integration tests for GET /api/v1/logs/view.""" + +import logging + +from tornado import escape + +from digi_server.log_buffer import get_client_buffer, get_server_buffer +from test.conftest import DigiScriptTestCase + + +def _make_record(msg, level=logging.INFO, name="test", **extra_attrs): + record = logging.LogRecord( + name=name, + level=level, + pathname="fake.py", + lineno=1, + msg=msg, + args=(), + exc_info=None, + ) + for k, v in extra_attrs.items(): + setattr(record, k, v) + return record + + +class TestLogViewerController(DigiScriptTestCase): + """Tests for the log viewer endpoint.""" + + # ------------------------------------------------------------------ + # Helpers + # ------------------------------------------------------------------ + + def _create_and_login_admin(self, username="admin", password="adminpass"): + self.fetch( + "/api/v1/auth/create", + method="POST", + body=escape.json_encode( + {"username": username, "password": password, "is_admin": True} + ), + ) + resp = self.fetch( + "/api/v1/auth/login", + method="POST", + body=escape.json_encode({"username": username, "password": password}), + ) + return escape.json_decode(resp.body)["access_token"] + + def _create_and_login_user(self, admin_token, username="user", password="userpass"): + self.fetch( + "/api/v1/auth/create", + method="POST", + body=escape.json_encode( + {"username": username, "password": password, "is_admin": False} + ), + headers={"Authorization": f"Bearer {admin_token}"}, + ) + resp = self.fetch( + "/api/v1/auth/login", + method="POST", + body=escape.json_encode({"username": username, "password": password}), + ) + return escape.json_decode(resp.body)["access_token"] + + def _inject_server_entry(self, msg, level=logging.INFO): + get_server_buffer().emit(_make_record(msg, level=level)) + + def _inject_client_entry(self, msg, level=logging.INFO, **extra): + get_client_buffer().emit(_make_record(msg, level=level, **extra)) + + def _fetch_view(self, token=None, **params): + qs = "&".join(f"{k}={v}" for k, v in params.items()) + url = f"/api/v1/logs/view?{qs}" if qs else "/api/v1/logs/view" + headers = {} + if token: + headers["Authorization"] = f"Bearer {token}" + return self.fetch(url, method="GET", headers=headers, raise_error=False) + + # ------------------------------------------------------------------ + # Authentication / authorisation + # ------------------------------------------------------------------ + + def test_unauthenticated_returns_401(self): + resp = self._fetch_view() + self.assertEqual(401, resp.code) + + def test_non_admin_returns_401(self): + admin_token = self._create_and_login_admin() + user_token = self._create_and_login_user(admin_token) + resp = self._fetch_view(token=user_token) + self.assertEqual(401, resp.code) + + def test_admin_returns_200(self): + token = self._create_and_login_admin() + resp = self._fetch_view(token=token) + self.assertEqual(200, resp.code) + + def test_admin_response_structure(self): + """Response must contain entries, total, returned, source.""" + token = self._create_and_login_admin() + resp = self._fetch_view(token=token) + body = escape.json_decode(resp.body) + for key in ("entries", "total", "returned", "source"): + self.assertIn(key, body, f"Missing key: {key}") + + # ------------------------------------------------------------------ + # Source selection + # ------------------------------------------------------------------ + + def test_source_defaults_to_server(self): + token = self._create_and_login_admin() + resp = self._fetch_view(token=token) + body = escape.json_decode(resp.body) + self.assertEqual("server", body["source"]) + + def test_source_server_explicit(self): + token = self._create_and_login_admin() + resp = self._fetch_view(token=token, source="server") + body = escape.json_decode(resp.body) + self.assertEqual("server", body["source"]) + + def test_source_client(self): + token = self._create_and_login_admin() + resp = self._fetch_view(token=token, source="client") + body = escape.json_decode(resp.body) + self.assertEqual("client", body["source"]) + + def test_server_and_client_buffers_are_independent(self): + """Entries injected into server buffer must not appear in client view.""" + self._inject_server_entry("server_only_msg") + token = self._create_and_login_admin() + + server_resp = escape.json_decode( + self._fetch_view(token=token, source="server").body + ) + client_resp = escape.json_decode( + self._fetch_view(token=token, source="client").body + ) + + server_msgs = [e["message"] for e in server_resp["entries"]] + client_msgs = [e["message"] for e in client_resp["entries"]] + + self.assertIn("server_only_msg", server_msgs) + self.assertNotIn("server_only_msg", client_msgs) + + # ------------------------------------------------------------------ + # Level filter + # ------------------------------------------------------------------ + + def test_level_filter_excludes_lower_levels(self): + """Requesting ERROR+ should hide INFO entries.""" + self._inject_server_entry("info_entry", level=logging.INFO) + self._inject_server_entry("error_entry", level=logging.ERROR) + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view(token=token, source="server", level="ERROR").body + ) + messages = [e["message"] for e in resp["entries"]] + self.assertIn("error_entry", messages) + self.assertNotIn("info_entry", messages) + + def test_level_filter_empty_returns_all(self): + """Empty level param returns entries at all levels.""" + self._inject_server_entry("debug_entry", level=logging.DEBUG) + self._inject_server_entry("critical_entry", level=logging.CRITICAL) + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view(token=token, source="server", level="").body + ) + messages = [e["message"] for e in resp["entries"]] + self.assertIn("debug_entry", messages) + self.assertIn("critical_entry", messages) + + def test_warn_alias(self): + """level=WARN should behave identically to level=WARNING.""" + self._inject_server_entry("warn_entry", level=logging.WARNING) + self._inject_server_entry("debug_entry_warn_alias", level=logging.DEBUG) + token = self._create_and_login_admin() + + warn_resp = escape.json_decode( + self._fetch_view(token=token, source="server", level="WARN").body + ) + warning_resp = escape.json_decode( + self._fetch_view(token=token, source="server", level="WARNING").body + ) + + warn_msgs = [e["message"] for e in warn_resp["entries"]] + warning_msgs = [e["message"] for e in warning_resp["entries"]] + + self.assertIn("warn_entry", warn_msgs) + self.assertNotIn("debug_entry_warn_alias", warn_msgs) + self.assertEqual(set(warn_msgs), set(warning_msgs)) + + # ------------------------------------------------------------------ + # Search filter + # ------------------------------------------------------------------ + + def test_search_filter(self): + self._inject_server_entry("unique_search_term_xyz found here") + self._inject_server_entry("unrelated log entry") + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view( + token=token, source="server", search="unique_search_term_xyz" + ).body + ) + messages = [e["message"] for e in resp["entries"]] + self.assertTrue(any("unique_search_term_xyz" in m for m in messages)) + self.assertFalse(any("unrelated log entry" in m for m in messages)) + + def test_search_filter_case_insensitive(self): + self._inject_server_entry("CaseSensitiveTest message") + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view( + token=token, source="server", search="casesensitivetest" + ).body + ) + messages = [e["message"] for e in resp["entries"]] + self.assertTrue(any("CaseSensitiveTest" in m for m in messages)) + + # ------------------------------------------------------------------ + # Username filter (client source only) + # ------------------------------------------------------------------ + + def test_username_filter_client_source(self): + self._inject_client_entry("alice log", username="alice") + self._inject_client_entry("bob log", username="bob") + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view(token=token, source="client", username="alice").body + ) + messages = [e["message"] for e in resp["entries"]] + self.assertIn("alice log", messages) + self.assertNotIn("bob log", messages) + + def test_username_filter_ignored_for_server_source(self): + """username param should have no effect on the server source.""" + self._inject_server_entry("server_entry_for_username_test") + token = self._create_and_login_admin() + + resp_no_filter = escape.json_decode( + self._fetch_view(token=token, source="server").body + ) + resp_with_filter = escape.json_decode( + self._fetch_view(token=token, source="server", username="alice").body + ) + self.assertEqual(resp_no_filter["total"], resp_with_filter["total"]) + + # ------------------------------------------------------------------ + # Pagination + # ------------------------------------------------------------------ + + def test_limit(self): + for i in range(20): + self._inject_server_entry(f"limit_test_entry {i}") + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view(token=token, source="server", limit=5).body + ) + self.assertLessEqual(len(resp["entries"]), 5) + self.assertEqual(len(resp["entries"]), resp["returned"]) + + def test_offset(self): + """offset should skip entries.""" + # Inject 10 entries that are easy to identify + for i in range(10): + self._inject_server_entry(f"offset_test_msg {i}") + token = self._create_and_login_admin() + + resp_all = escape.json_decode( + self._fetch_view( + token=token, source="server", search="offset_test_msg", limit=10 + ).body + ) + resp_offset = escape.json_decode( + self._fetch_view( + token=token, + source="server", + search="offset_test_msg", + limit=10, + offset=5, + ).body + ) + # total should be the same regardless of offset + self.assertEqual(resp_all["total"], resp_offset["total"]) + # offset result should have 5 fewer entries + self.assertEqual(resp_all["returned"] - 5, resp_offset["returned"]) + + def test_limit_capped_at_1000(self): + """limit > 1000 should be silently capped, not error.""" + token = self._create_and_login_admin() + # Just verify a large limit doesn't cause an error + self.assertEqual( + 200, + self.fetch( + "/api/v1/logs/view?limit=9999", + method="GET", + headers={"Authorization": f"Bearer {token}"}, + raise_error=False, + ).code, + ) + + # ------------------------------------------------------------------ + # total vs returned + # ------------------------------------------------------------------ + + def test_total_reflects_filtered_count(self): + """total should count all matching entries, not just the page.""" + for i in range(15): + self._inject_server_entry(f"total_test_entry {i}") + token = self._create_and_login_admin() + + resp = escape.json_decode( + self._fetch_view( + token=token, source="server", search="total_test_entry", limit=5 + ).body + ) + self.assertEqual(15, resp["total"]) + self.assertEqual(5, resp["returned"]) + + # ------------------------------------------------------------------ + # SSE stream endpoint — auth and header checks + # ------------------------------------------------------------------ + # Full streaming content tests require async tooling; these tests verify + # the auth layer before any streaming begins. + + def _fetch_stream_headers(self, token=None, **params): + """Perform a GET /api/v1/logs/stream with a very short request timeout. + + The stream never sends a final Content-Length, so self.fetch() would + hang indefinitely without a timeout. We use request_timeout=0.5 s; + the server returns an HTTP 401/403 immediately for auth failures, but + for a valid admin request the timeout fires after headers are received + and some initial data may have been sent. + """ + qs = "&".join(f"{k}={v}" for k, v in params.items()) + url = f"/api/v1/logs/stream?{qs}" if qs else "/api/v1/logs/stream" + headers = {} + if token: + headers["Authorization"] = f"Bearer {token}" + return self.fetch( + url, + method="GET", + headers=headers, + raise_error=False, + request_timeout=0.5, + ) + + def test_stream_unauthenticated_returns_401(self): + resp = self._fetch_stream_headers() + self.assertEqual(401, resp.code) + + def test_stream_non_admin_returns_401(self): + admin_token = self._create_and_login_admin() + user_token = self._create_and_login_user(admin_token) + resp = self._fetch_stream_headers(token=user_token) + self.assertEqual(401, resp.code) + + def test_stream_backfill_contains_existing_entries(self): + """Entries injected before connection must appear in the first chunk. + + SSE connections never send a terminal response, so self.fetch() raises + HTTPTimeoutError when the request_timeout expires. We use + streaming_callback to collect chunks as they arrive and inspect the + accumulated body in the except handler. + """ + self._inject_server_entry("sse_backfill_unique_xyz") + token = self._create_and_login_admin() + + received_chunks = [] + + try: + self.fetch( + "/api/v1/logs/stream?source=server", + method="GET", + headers={"Authorization": f"Bearer {token}"}, + raise_error=False, + request_timeout=1.0, + streaming_callback=received_chunks.append, + ) + except Exception: + # HTTPTimeoutError is expected — the stream stays open indefinitely. + pass + + self.assertTrue(len(received_chunks) > 0, "No data received from SSE stream") + full_body = b"".join(received_chunks).decode() + self.assertIn("sse_backfill_unique_xyz", full_body) + # Verify SSE wire format: events must begin with "data: " + data_lines = [ln for ln in full_body.splitlines() if ln.startswith("data: ")] + self.assertTrue(len(data_lines) > 0) diff --git a/server/test/digi_server/test_log_buffer.py b/server/test/digi_server/test_log_buffer.py new file mode 100644 index 00000000..08c94a23 --- /dev/null +++ b/server/test/digi_server/test_log_buffer.py @@ -0,0 +1,238 @@ +import logging +import unittest +from datetime import datetime + +from digi_server.log_buffer import ( + LogBufferHandler, + get_client_buffer, + get_server_buffer, +) + + +def _make_record(msg="hello", level=logging.INFO, name="test", **extra_attrs): + """Helper: create a :class:`logging.LogRecord` with optional extra attrs.""" + record = logging.LogRecord( + name=name, + level=level, + pathname="test_file.py", + lineno=42, + msg=msg, + args=(), + exc_info=None, + ) + for key, value in extra_attrs.items(): + setattr(record, key, value) + return record + + +class TestLogBufferHandler(unittest.TestCase): + def setUp(self): + self.handler = LogBufferHandler(maxlen=100) + + # ------------------------------------------------------------------ + # Basic emit / retrieval + # ------------------------------------------------------------------ + + def test_captures_entry(self): + """An emitted record should appear in get_entries().""" + self.handler.emit(_make_record("test message")) + entries = self.handler.get_entries() + self.assertEqual(1, len(entries)) + self.assertEqual("test message", entries[0]["message"]) + + def test_entry_schema(self): + """Every required key must be present in an emitted entry.""" + self.handler.emit( + _make_record("schema check", level=logging.WARNING, name="DigiScript") + ) + entry = self.handler.get_entries()[0] + for key in ( + "ts", + "level", + "level_no", + "logger", + "message", + "filename", + "lineno", + ): + self.assertIn(key, entry, f"Missing key: {key}") + self.assertEqual("WARNING", entry["level"]) + self.assertEqual(logging.WARNING, entry["level_no"]) + self.assertEqual("DigiScript", entry["logger"]) + self.assertEqual("schema check", entry["message"]) + + def test_empty_buffer_returns_empty_list(self): + self.assertEqual([], self.handler.get_entries()) + + # ------------------------------------------------------------------ + # Circular buffer (maxlen eviction) + # ------------------------------------------------------------------ + + def test_maxlen_eviction(self): + """When the buffer is full, the oldest entry is dropped.""" + handler = LogBufferHandler(maxlen=5) + for i in range(6): + handler.emit(_make_record(f"msg {i}")) + entries = handler.get_entries() + self.assertEqual(5, len(entries)) + # Oldest entry (msg 0) must have been evicted + messages = [e["message"] for e in entries] + self.assertNotIn("msg 0", messages) + self.assertIn("msg 5", messages) + + def test_maxlen_not_exceeded(self): + handler = LogBufferHandler(maxlen=10) + for i in range(10): + handler.emit(_make_record(f"msg {i}")) + self.assertEqual(10, len(handler.get_entries())) + + # ------------------------------------------------------------------ + # Resize + # ------------------------------------------------------------------ + + def test_resize_shrink_keeps_newest(self): + """Shrinking retains the newest entries.""" + handler = LogBufferHandler(maxlen=100) + for i in range(100): + handler.emit(_make_record(f"msg {i}")) + handler.resize(50) + entries = handler.get_entries() + self.assertEqual(50, len(entries)) + # Newest entry must still be present + self.assertEqual("msg 99", entries[-1]["message"]) + + def test_resize_grow_retains_all(self): + """Growing preserves all existing entries.""" + handler = LogBufferHandler(maxlen=10) + for i in range(10): + handler.emit(_make_record(f"msg {i}")) + handler.resize(200) + self.assertEqual(10, len(handler.get_entries())) + + def test_resize_same_size(self): + """Resizing to the current size should be a no-op for content.""" + handler = LogBufferHandler(maxlen=5) + for i in range(5): + handler.emit(_make_record(f"msg {i}")) + handler.resize(5) + self.assertEqual(5, len(handler.get_entries())) + + # ------------------------------------------------------------------ + # Snapshot isolation + # ------------------------------------------------------------------ + + def test_get_entries_snapshot(self): + """The returned list is independent of subsequent emits.""" + self.handler.emit(_make_record("first")) + snapshot = self.handler.get_entries() + self.handler.emit(_make_record("second")) + # Snapshot should still only contain the first entry + self.assertEqual(1, len(snapshot)) + + # ------------------------------------------------------------------ + # Client extra fields + # ------------------------------------------------------------------ + + def test_client_extra_fields_present(self): + """Extra attrs user_id / username / remote_ip are stored in the entry.""" + record = _make_record( + "client log", + user_id=7, + username="alice", + remote_ip="192.168.1.1", + ) + self.handler.emit(record) + entry = self.handler.get_entries()[0] + self.assertEqual(7, entry["user_id"]) + self.assertEqual("alice", entry["username"]) + self.assertEqual("192.168.1.1", entry["remote_ip"]) + + def test_missing_extra_fields_are_none(self): + """Records without client extra fields should store None, not raise.""" + self.handler.emit(_make_record("plain record")) + entry = self.handler.get_entries()[0] + self.assertIsNone(entry["user_id"]) + self.assertIsNone(entry["username"]) + self.assertIsNone(entry["remote_ip"]) + + # ------------------------------------------------------------------ + # Singletons + # ------------------------------------------------------------------ + + def test_get_server_buffer_singleton(self): + """get_server_buffer() returns the same instance on repeated calls.""" + a = get_server_buffer() + b = get_server_buffer() + self.assertIs(a, b) + + def test_get_client_buffer_singleton(self): + """get_client_buffer() returns the same instance on repeated calls.""" + a = get_client_buffer() + b = get_client_buffer() + self.assertIs(a, b) + + def test_server_and_client_buffers_are_distinct(self): + """Server and client buffers must be separate objects.""" + self.assertIsNot(get_server_buffer(), get_client_buffer()) + + # ------------------------------------------------------------------ + # Pub/sub (subscribe / unsubscribe) + # ------------------------------------------------------------------ + + def test_subscribe_called_on_emit(self): + """A registered callback is invoked when a record is emitted.""" + received = [] + self.handler.subscribe(received.append) + self.handler.emit(_make_record("sub test")) + self.assertEqual(1, len(received)) + self.assertEqual("sub test", received[0]["message"]) + + def test_subscribe_receives_correct_entry(self): + """The callback receives the same dict that appears in get_entries().""" + received = [] + self.handler.subscribe(received.append) + self.handler.emit(_make_record("match test", level=logging.ERROR)) + self.assertEqual(received[0], self.handler.get_entries()[-1]) + + def test_unsubscribe_stops_callbacks(self): + """Calling the returned unsubscribe callable stops future callbacks.""" + received = [] + unsubscribe = self.handler.subscribe(received.append) + self.handler.emit(_make_record("before")) + unsubscribe() + self.handler.emit(_make_record("after")) + # Only the first emit should have reached the callback. + self.assertEqual(1, len(received)) + self.assertEqual("before", received[0]["message"]) + + def test_multiple_subscribers_all_notified(self): + """All registered callbacks receive each emitted entry.""" + received_a, received_b = [], [] + self.handler.subscribe(received_a.append) + self.handler.subscribe(received_b.append) + self.handler.emit(_make_record("multi")) + self.assertEqual(1, len(received_a)) + self.assertEqual(1, len(received_b)) + + def test_subscriber_exception_does_not_break_emit(self): + """A callback that raises must not prevent the entry being buffered.""" + + def bad_callback(_entry): + raise RuntimeError("boom") + + self.handler.subscribe(bad_callback) + # Should not raise — the exception is caught internally. + self.handler.emit(_make_record("resilient")) + self.assertEqual(1, len(self.handler.get_entries())) + + # ------------------------------------------------------------------ + # Timestamp format + # ------------------------------------------------------------------ + + def test_timestamp_is_iso8601(self): + """The ts field should be parseable as an ISO-8601 UTC timestamp.""" + self.handler.emit(_make_record("ts test")) + ts = self.handler.get_entries()[0]["ts"] + # Should not raise + parsed = datetime.fromisoformat(ts) + self.assertIsNotNone(parsed) diff --git a/server/test/digi_server/test_logger.py b/server/test/digi_server/test_logger.py new file mode 100644 index 00000000..617be374 --- /dev/null +++ b/server/test/digi_server/test_logger.py @@ -0,0 +1,40 @@ +import logging +import unittest + +from digi_server.logger import CLIENT_LEVEL_MAP, map_client_level + + +class TestMapClientLevel(unittest.TestCase): + def test_trace_maps_to_level_5(self): + self.assertEqual(5, map_client_level("TRACE")) + + def test_debug_maps_to_logging_debug(self): + self.assertEqual(logging.DEBUG, map_client_level("DEBUG")) + + def test_info_maps_to_logging_info(self): + self.assertEqual(logging.INFO, map_client_level("INFO")) + + def test_warn_maps_to_logging_warning(self): + """loglevel npm uses WARN; Python uses WARNING (30).""" + self.assertEqual(logging.WARNING, map_client_level("WARN")) + + def test_error_maps_to_logging_error(self): + self.assertEqual(logging.ERROR, map_client_level("ERROR")) + + def test_silent_maps_above_critical(self): + """SILENT has no Python equivalent; should suppress all output.""" + self.assertEqual(logging.CRITICAL + 1, map_client_level("SILENT")) + + def test_unknown_level_falls_back_to_info(self): + self.assertEqual(logging.INFO, map_client_level("UNKNOWN")) + self.assertEqual(logging.INFO, map_client_level("")) + self.assertEqual(logging.INFO, map_client_level("NOTSET")) + + def test_case_insensitive(self): + self.assertEqual(logging.WARNING, map_client_level("warn")) + self.assertEqual(logging.INFO, map_client_level("info")) + self.assertEqual(5, map_client_level("trace")) + + def test_client_level_map_contains_all_expected_keys(self): + expected = {"TRACE", "DEBUG", "INFO", "WARN", "ERROR", "SILENT"} + self.assertEqual(expected, set(CLIENT_LEVEL_MAP.keys())) diff --git a/server/utils/web/base_controller.py b/server/utils/web/base_controller.py index 22a04e2f..8fff65c8 100644 --- a/server/utils/web/base_controller.py +++ b/server/utils/web/base_controller.py @@ -171,9 +171,16 @@ def _unimplemented_method(self, *args: str, **kwargs: str) -> None: self.write({"message": "405 not allowed"}) def on_finish(self): + from utils.web.route import Route # noqa: PLC0415 + + if self.request.path in Route.ignored_logging_routes(): + log_method = get_logger().trace + else: + log_method = get_logger().debug + if self.request.body: try: - get_logger().debug( + log_method( f"{self.request.method} " f"{self.request.path} " f"{escape.json_decode(self.request.body)}" diff --git a/server/utils/web/route.py b/server/utils/web/route.py index 2bdb7bfd..464b455d 100644 --- a/server/utils/web/route.py +++ b/server/utils/web/route.py @@ -56,9 +56,15 @@ class ApiVersion(Enum): class ApiRoute(Route): - def __init__(self, route: str, api_version: ApiVersion, name: str = None): + def __init__( + self, + route: str, + api_version: ApiVersion, + name: str = None, + ignore_logging: bool = False, + ): route = f"/api/v{api_version.value}/{route.removeprefix('/')}" - super().__init__(route, name) + super().__init__(route, name, ignore_logging) def __call__(self, controller): if not issubclass(controller, (BaseAPIController, WebSocketHandler)):