Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions nodescraper/interfaces/dataplugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,9 @@ def analyze(
)
return self.analysis_result

if data:
self.data = data

if self.data is None:
self.analysis_result = TaskResult(
task=self.ANALYZER.__name__,
Expand All @@ -261,9 +264,6 @@ def analyze(
)
return self.analysis_result

if data:
self.data = data

analyzer_task = self.ANALYZER(
self.system_info,
logger=self.logger,
Expand Down
43 changes: 43 additions & 0 deletions test/functional/fixtures/dmesg_sample.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
kern :info : 2026-01-07T10:00:00,123456-06:00 Linux version 5.15.0-91-generic (buildd@amd64-builder) (gcc version 11.4.0) #101-Ubuntu SMP
kern :info : 2026-01-07T10:00:00,234567-06:00 Command line: BOOT_IMAGE=/boot/vmlinuz-5.15.0-91-generic root=UUID=a1b2c3d4 ro quiet splash vt.handoff=7
kern :info : 2026-01-07T10:00:01,345678-06:00 KERNEL supported cpus:
kern :info : 2026-01-07T10:00:01,456789-06:00 Intel GenuineIntel
kern :info : 2026-01-07T10:00:01,567890-06:00 AMD AuthenticAMD
kern :info : 2026-01-07T10:00:02,678901-06:00 x86/fpu: Supporting XSAVE feature 0x001: 'x87 floating point registers'
kern :info : 2026-01-07T10:00:02,789012-06:00 x86/fpu: Supporting XSAVE feature 0x002: 'SSE registers'
kern :info : 2026-01-07T10:00:03,890123-06:00 Memory: 32823616K/33554432K available
kern :warn : 2026-01-07T10:00:05,123456-06:00 pci 0000:00:01.0: BAR 0: failed to assign [mem size 0x01000000]
kern :info : 2026-01-07T10:00:06,234567-06:00 PCI: Using ACPI for IRQ routing
kern :info : 2026-01-07T10:00:07,345678-06:00 NetLabel: Initializing
kern :info : 2026-01-07T10:00:08,456789-06:00 DMA: preallocated 4096 KiB GFP_KERNEL pool for atomic allocations
kern :err : 2026-01-07T10:00:10,567890-06:00 WARNING: CPU: 0 PID: 1 at drivers/gpu/drm/amd/amdgpu/amdgpu_device.c:123 amdgpu_device_init+0x456/0x789
kern :info : 2026-01-07T10:00:11,678901-06:00 Modules linked in: amdgpu drm_ttm_helper ttm drm_kms_helper
kern :info : 2026-01-07T10:00:12,789012-06:00 CPU: 0 PID: 1 Comm: swapper/0 Not tainted 5.15.0-91-generic #101-Ubuntu
kern :err : 2026-01-07T10:00:15,890123-06:00 AMD-Vi: Event logged [IO_PAGE_FAULT device=00:14.0 domain=0x0000 address=0xfffffffffffffef0 flags=0x0010]
kern :info : 2026-01-07T10:00:16,123456-06:00 SCSI subsystem initialized
kern :info : 2026-01-07T10:00:17,234567-06:00 libata version 3.00 loaded
kern :info : 2026-01-07T10:00:18,345678-06:00 ACPI: Added _OSI(Module Device)
kern :info : 2026-01-07T10:00:19,456789-06:00 ACPI: Added _OSI(Processor Device)
kern :err : 2026-01-07T10:00:20,567890-06:00 ACPI Error: Method parse/execution failed \_SB.PCI0.GPP0.SWUS.SWDS.VGA.LCD._BCM, AE_NOT_FOUND
kern :info : 2026-01-07T10:00:22,678901-06:00 [drm] amdgpu kernel modesetting enabled
kern :info : 2026-01-07T10:00:23,789012-06:00 [drm] initializing kernel modesetting (NAVI21 0x1002:0x73BF)
kern :info : 2026-01-07T10:00:25,890123-06:00 amdgpu 0000:03:00.0: amdgpu: Fetched VBIOS from VFCT
kern :info : 2026-01-07T10:00:26,123456-06:00 amdgpu 0000:03:00.0: amdgpu: ATOM BIOS: 113-D4120100-O04
kern :info : 2026-01-07T10:00:28,234567-06:00 [drm] GPU posting now...
kern :warn : 2026-01-07T10:00:30,345678-06:00 [drm] *ERROR* Timeout waiting for DMCUB auto-load
kern :info : 2026-01-07T10:00:32,456789-06:00 [drm] Display Core initialized with v3.2.149!
kern :info : 2026-01-07T10:00:35,567890-06:00 [drm] VCN decode and encode initialized successfully
kern :info : 2026-01-07T10:00:38,678901-06:00 [drm] fb0: amdgpudrmfb frame buffer device
kern :info : 2026-01-07T10:00:40,789012-06:00 amdgpu 0000:03:00.0: amdgpu: ring gfx_0.0.0 uses VM inv eng 0 on hub 0
kern :info : 2026-01-07T10:00:42,890123-06:00 [drm] Initialized amdgpu 3.42.0 20150101 for 0000:03:00.0 on minor 0
kern :info : 2026-01-07T10:00:45,123456-06:00 EXT4-fs (nvme0n1p2): mounted filesystem with ordered data mode
kern :info : 2026-01-07T10:00:48,234567-06:00 systemd[1]: systemd 249.11-0ubuntu3.6 running in system mode
kern :info : 2026-01-07T10:00:50,345678-06:00 systemd[1]: Detected architecture x86-64
kern :info : 2026-01-07T10:00:55,456789-06:00 audit: type=1400 audit(1704636055.456:2): apparmor="STATUS" operation="profile_load"
kern :info : 2026-01-07T10:01:00,567890-06:00 Adding 33554428k swap on /swapfile
kern :info : 2026-01-07T10:01:05,678901-06:00 IPv6: ADDRCONF(NETDEV_CHANGE): enp5s0: link becomes ready
kern :info : 2026-01-07T10:01:10,789012-06:00 NFSD: Using UMH upcall client tracking operations
kern :info : 2026-01-07T10:01:15,890123-06:00 NFSD: starting 90-second grace period (net f0000098)
kern :info : 2026-01-07T10:01:20,123456-06:00 Bluetooth: BNEP (Ethernet Emulation) ver 1.3
kern :info : 2026-01-07T10:01:25,234567-06:00 Bluetooth: BNEP filters: protocol multicast
kern :info : 2026-01-07T10:01:30,345678-06:00 System operational - all services started successfully
58 changes: 58 additions & 0 deletions test/functional/test_run_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
###############################################################################
"""Functional tests for running individual plugins."""

import csv
from pathlib import Path

import pytest

from nodescraper.pluginregistry import PluginRegistry
Expand Down Expand Up @@ -114,3 +117,58 @@ def test_run_comma_separated_plugins_with_invalid(run_cli_command):
assert "Running plugin AmdSmiPlugin" in output
# Verify it didn't crash
assert "Data written to csv file" in output


def test_run_plugin_with_data_file_no_collection(run_cli_command, tmp_path):
"""Test running plugin with --data argument and --collection False."""
fixtures_dir = Path(__file__).parent / "fixtures"
dmesg_fixture = fixtures_dir / "dmesg_sample.log"

assert dmesg_fixture.exists(), f"Fixture file not found: {dmesg_fixture}"

analyze_log_path = str(tmp_path / "analyze_logs")
result = run_cli_command(
[
"--log-path",
analyze_log_path,
"run-plugins",
"DmesgPlugin",
"--data",
str(dmesg_fixture),
"--collection",
"False",
],
check=False,
)

output = result.stdout + result.stderr
assert (
result.returncode == 1
), f"Expected return code 1 (errors found), got: {result.returncode}"
assert "Running data analyzer: DmesgAnalyzer" in output, "Analyzer should have run"
assert "Data written to csv file" in output, "CSV file should be created"

if "Plugin tasks not ran" in output:
pytest.fail(
"Bug regression: Plugin reported 'tasks not ran' with --data file. "
"Analysis should load data from --data parameter before checking if data is None."
)

analyze_path = Path(analyze_log_path)
csv_files = list(analyze_path.glob("*/nodescraper.csv"))
assert len(csv_files) > 0, "CSV results file should exist"

csv_file = csv_files[0]
with open(csv_file, "r", encoding="utf-8") as f:
reader = csv.DictReader(f)
rows = list(reader)

dmesg_rows = [row for row in rows if "DmesgPlugin" in row.get("plugin", "")]
assert len(dmesg_rows) > 0, "DmesgPlugin should have results in CSV"

dmesg_row = dmesg_rows[0]
status = dmesg_row.get("status", "")
assert status != "NOT_RAN", (
f"Bug regression: DmesgPlugin status is NOT_RAN with --data file. "
f"Analysis should have run on provided data. Status: {status}"
)
74 changes: 74 additions & 0 deletions test/unit/framework/test_dataplugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,3 +329,77 @@ def test_collect_preserve_connection(self, plugin_with_conn):

# Verify disconnect WAS called when preserve_connection=False
mock_disconnect.assert_called_once()

def test_run_with_data_file_no_collection(self, plugin_with_conn, tmp_path):
"""Test running plugin with data file and collection=False."""
data_file = tmp_path / "test_data.json"
data_file.write_text('{"value": "from_file"}')

with (
patch.object(CoreDataPlugin, "collect") as mock_collect,
patch.object(StandardAnalyzer, "analyze_data") as mock_analyze,
):
mock_analyze.return_value = TaskResult(status=ExecutionStatus.OK)

result = plugin_with_conn.run(collection=False, analysis=True, data=str(data_file))

mock_collect.assert_not_called()
mock_analyze.assert_called_once()

call_args = mock_analyze.call_args
analyzed_data = call_args[0][0]
assert isinstance(analyzed_data, StandardDataModel)
assert analyzed_data.value == "from_file"
assert result.status == ExecutionStatus.OK
assert plugin_with_conn.analysis_result.status == ExecutionStatus.OK

def test_run_with_data_dict_no_collection(self, plugin_with_conn):
"""Test running plugin with data dict and collection=False."""
data_dict = {"value": "from_dict"}

with (
patch.object(CoreDataPlugin, "collect") as mock_collect,
patch.object(StandardAnalyzer, "analyze_data") as mock_analyze,
):
mock_analyze.return_value = TaskResult(status=ExecutionStatus.OK)

result = plugin_with_conn.run(collection=False, analysis=True, data=data_dict)

mock_collect.assert_not_called()
mock_analyze.assert_called_once()

call_args = mock_analyze.call_args
analyzed_data = call_args[0][0]
assert isinstance(analyzed_data, StandardDataModel)
assert analyzed_data.value == "from_dict"
assert result.status == ExecutionStatus.OK

def test_run_with_data_model_no_collection(self, plugin_with_conn):
"""Test running plugin with data model instance and collection=False."""
data_model = StandardDataModel(value="from_model")

with (
patch.object(CoreDataPlugin, "collect") as mock_collect,
patch.object(StandardAnalyzer, "analyze_data") as mock_analyze,
):
mock_analyze.return_value = TaskResult(status=ExecutionStatus.OK)

result = plugin_with_conn.run(collection=False, analysis=True, data=data_model)

mock_collect.assert_not_called()
mock_analyze.assert_called_once()

call_args = mock_analyze.call_args
analyzed_data = call_args[0][0]
assert analyzed_data is data_model
assert analyzed_data.value == "from_model"
assert result.status == ExecutionStatus.OK

def test_analyze_no_data_available(self, plugin_with_conn):
"""Test analyze returns NOT_RAN when no data is available."""
plugin_with_conn._data = None

result = plugin_with_conn.analyze()

assert result.status == ExecutionStatus.NOT_RAN
assert "No data available" in result.message