Skip to content

Commit 756b592

Browse files
authored
Add loging into file. Update results with compliance results fixture (#1214)
Update logging in tests with correct format. Fix some RC checking problems. Update fixture for results.csv preparation. Add template fixture for compliance results update"
1 parent 090032a commit 756b592

20 files changed

+475
-330
lines changed

tests/validation/conftest.py

Lines changed: 112 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,34 @@
11
# # SPDX-License-Identifier: BSD-3-Clause
22
# # Copyright 2024-2025 Intel Corporation
33
# # Media Communications Mesh
4+
import datetime
45
import logging
56
import os
7+
import shutil
68
import time
79
from typing import Dict
810

911
import pytest
1012
from common.nicctl import Nicctl
1113
from create_pcap_file.ramdisk import RamdiskPreparer
12-
from mtl_engine.stash import clear_result_media, remove_result_media
14+
from mfd_common_libs.custom_logger import add_logging_level
15+
from mfd_common_libs.log_levels import TEST_FAIL, TEST_INFO, TEST_PASS
16+
from mtl_engine.const import LOG_FOLDER, TESTCMD_LVL
17+
from mtl_engine.csv_report import (
18+
csv_add_test,
19+
csv_write_report,
20+
update_compliance_result,
21+
)
22+
from mtl_engine.stash import (
23+
clear_issue,
24+
clear_result_log,
25+
clear_result_media,
26+
clear_result_note,
27+
get_issue,
28+
get_result_note,
29+
remove_result_media,
30+
)
31+
from pytest_mfd_logging.amber_log_formatter import AmberLogFormatter
1332

1433
logger = logging.getLogger(__name__)
1534
phase_report_key = pytest.StashKey[Dict[str, pytest.CollectReport]]()
@@ -88,32 +107,14 @@ def dma_port_list(request):
88107

89108

90109
@pytest.fixture(scope="session")
91-
def nic_port_list(hosts: dict, mtl_path) -> list:
110+
def nic_port_list(hosts: dict, mtl_path) -> None:
92111
for host in hosts.values():
93-
# connection = host.connection
94-
# connection.enable_sudo()
95112
nicctl = Nicctl(mtl_path, host)
96113
if int(host.network_interfaces[0].virtualization.get_current_vfs()) == 0:
97114
vfs = nicctl.create_vfs(host.network_interfaces[0].pci_address)
98115
vfs = nicctl.vfio_list()
99116
# Store VFs on the host object for later use
100117
host.vfs = vfs
101-
# connection.disable_sudo()
102-
103-
104-
# def nic_port_list(request, media_config, hosts: dict = None) -> list:
105-
# vfs = []
106-
# if hosts:
107-
# for host in hosts.values():
108-
# if hasattr(host, "vfs"):
109-
# vfs.extend(host.vfs)
110-
# if vfs:
111-
# return vfs
112-
# # Fallback: use --nic parameter
113-
# nic_option = request.config.getoption("--nic")
114-
# if nic_option:
115-
# return [nic.strip() for nic in nic_option.split(",") if nic.strip()]
116-
# raise RuntimeError("No VFs found and --nic parameter not provided!")
117118

118119

119120
@pytest.fixture(scope="session")
@@ -164,3 +165,94 @@ def pytest_addoption(parser):
164165
parser.addoption("--nic", help="list of PCI IDs of network devices")
165166
parser.addoption("--dma", help="list of PCI IDs of DMA devices")
166167
parser.addoption("--time", help="seconds to run every test (default=15)")
168+
169+
170+
@pytest.fixture(scope="session", autouse=True)
171+
def log_session():
172+
add_logging_level("TESTCMD", TESTCMD_LVL)
173+
174+
today = datetime.datetime.today()
175+
folder = today.strftime("%Y-%m-%dT%H:%M:%S")
176+
path = os.path.join(LOG_FOLDER, folder)
177+
path_symlink = os.path.join(LOG_FOLDER, "latest")
178+
try:
179+
os.remove(path_symlink)
180+
except FileNotFoundError:
181+
pass
182+
os.makedirs(path, exist_ok=True)
183+
os.symlink(folder, path_symlink)
184+
yield
185+
shutil.copy("pytest.log", f"{LOG_FOLDER}/latest/pytest.log")
186+
csv_write_report(f"{LOG_FOLDER}/latest/report.csv")
187+
188+
189+
@pytest.fixture(scope="session", autouse=True)
190+
def compliance_report(request, log_session, test_config):
191+
"""
192+
This function is used for compliance check and report.
193+
"""
194+
# TODO: Implement compliance check logic. When tcpdump pcap is enabled, at the end of the test session all pcaps
195+
# shall be send into EBU list.
196+
# Pcaps shall be stored in the ramdisk, and then moved to the compliance
197+
# folder or send into EBU list after each test finished and remove it from the ramdisk.
198+
# Compliance report generation logic goes here after yield. Or in another class / function but triggered here.
199+
# AFAIK names of pcaps contains test name so it can be matched with result of each test like in code below.
200+
yield
201+
if test_config.get("compliance", False):
202+
logging.info("Compliance mode enabled, updating compliance results")
203+
for item in request.session.items:
204+
test_case = item.nodeid
205+
update_compliance_result(test_case, "Fail")
206+
207+
208+
@pytest.fixture(scope="function", autouse=True)
209+
def log_case(request, caplog: pytest.LogCaptureFixture):
210+
case_id = request.node.nodeid
211+
case_folder = os.path.dirname(case_id)
212+
os.makedirs(os.path.join(LOG_FOLDER, "latest", case_folder), exist_ok=True)
213+
logfile = os.path.join(LOG_FOLDER, "latest", f"{case_id}.log")
214+
fh = logging.FileHandler(logfile)
215+
formatter = request.session.config.pluginmanager.get_plugin(
216+
"logging-plugin"
217+
).formatter
218+
format = AmberLogFormatter(formatter)
219+
fh.setFormatter(format)
220+
fh.setLevel(logging.DEBUG)
221+
logger = logging.getLogger()
222+
logger.addHandler(fh)
223+
clear_result_log()
224+
clear_issue()
225+
yield
226+
report = request.node.stash[phase_report_key]
227+
if report["setup"].failed:
228+
logging.log(level=TEST_FAIL, msg=f"Setup failed for {case_id}")
229+
os.chmod(logfile, 0o4755)
230+
result = "Fail"
231+
elif ("call" not in report) or report["call"].failed:
232+
logging.log(level=TEST_FAIL, msg=f"Test failed for {case_id}")
233+
os.chmod(logfile, 0o4755)
234+
result = "Fail"
235+
elif report["call"].passed:
236+
logging.log(level=TEST_PASS, msg=f"Test passed for {case_id}")
237+
os.chmod(logfile, 0o755)
238+
result = "Pass"
239+
else:
240+
logging.log(level=TEST_INFO, msg=f"Test skipped for {case_id}")
241+
result = "Skip"
242+
243+
logger.removeHandler(fh)
244+
245+
commands = []
246+
for record in caplog.get_records("call"):
247+
if record.levelno == TESTCMD_LVL:
248+
commands.append(record.message)
249+
250+
csv_add_test(
251+
test_case=case_id,
252+
commands="\n".join(commands),
253+
result=result,
254+
issue=get_issue(),
255+
result_note=get_result_note(),
256+
)
257+
258+
clear_result_note()

tests/validation/mtl_engine/GstreamerApp.py

Lines changed: 22 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,15 @@
22
# Copyright(c) 2024-2025 Intel Corporation
33

44
import hashlib
5+
import logging
56
import os
67
import time
78

89
from mtl_engine.RxTxApp import prepare_tcpdump
910

10-
from .execute import log_fail, log_info, run
11+
from .execute import log_fail, run
12+
13+
logger = logging.getLogger(__name__)
1114

1215

1316
def create_connection_params(
@@ -322,8 +325,8 @@ def execute_test(
322325

323326
remote_host = host
324327

325-
log_info(f"TX Command: {' '.join(tx_command)}")
326-
log_info(f"RX Command: {' '.join(rx_command)}")
328+
logger.info(f"TX Command: {' '.join(tx_command)}")
329+
logger.info(f"RX Command: {' '.join(rx_command)}")
327330

328331
tx_process = None
329332
rx_process = None
@@ -332,7 +335,7 @@ def execute_test(
332335
try:
333336
if tx_first:
334337
# Start TX pipeline first
335-
log_info("Starting TX pipeline...")
338+
logger.info("Starting TX pipeline...")
336339
tx_process = run(
337340
" ".join(tx_command),
338341
cwd=build,
@@ -345,7 +348,7 @@ def execute_test(
345348
time.sleep(sleep_interval)
346349

347350
# Start RX pipeline
348-
log_info("Starting RX pipeline...")
351+
logger.info("Starting RX pipeline...")
349352
rx_process = run(
350353
" ".join(rx_command),
351354
cwd=build,
@@ -357,7 +360,7 @@ def execute_test(
357360
)
358361
else:
359362
# Start RX pipeline first
360-
log_info("Starting RX pipeline...")
363+
logger.info("Starting RX pipeline...")
361364
rx_process = run(
362365
" ".join(rx_command),
363366
cwd=build,
@@ -370,7 +373,7 @@ def execute_test(
370373
time.sleep(sleep_interval)
371374

372375
# Start TX pipeline
373-
log_info("Starting TX pipeline...")
376+
logger.info("Starting TX pipeline...")
374377
tx_process = run(
375378
" ".join(tx_command),
376379
cwd=build,
@@ -382,15 +385,15 @@ def execute_test(
382385
)
383386
# --- Start tcpdump after pipelines are running ---
384387
if tcpdump:
385-
log_info("Starting tcpdump capture...")
388+
logger.info("Starting tcpdump capture...")
386389
tcpdump.capture(capture_time=capture_cfg.get("capture_time", test_time))
387390

388391
# Let the test run for the specified duration
389-
log_info(f"Running test for {test_time} seconds...")
392+
logger.info(f"Running test for {test_time} seconds...")
390393
time.sleep(test_time)
391394

392395
# Terminate processes gracefully
393-
log_info("Terminating processes...")
396+
logger.info("Terminating processes...")
394397
if tx_process:
395398
try:
396399
tx_process.terminate()
@@ -410,17 +413,17 @@ def execute_test(
410413
if rx_process and hasattr(rx_process, "stdout_text"):
411414
output_rx = rx_process.stdout_text.splitlines()
412415
for line in output_rx:
413-
log_info(f"RX Output: {line}")
416+
logger.info(f"RX Output: {line}")
414417
except Exception:
415-
log_info("Could not retrieve RX output")
418+
logger.info("Could not retrieve RX output")
416419

417420
try:
418421
if tx_process and hasattr(tx_process, "stdout_text"):
419422
output_tx = tx_process.stdout_text.splitlines()
420423
for line in output_tx:
421-
log_info(f"TX Output: {line}")
424+
logger.info(f"TX Output: {line}")
422425
except Exception:
423-
log_info("Could not retrieve TX output")
426+
logger.info("Could not retrieve TX output")
424427

425428
except Exception as e:
426429
log_fail(f"Error during test execution: {e}")
@@ -444,7 +447,7 @@ def execute_test(
444447

445448
# Compare files for validation
446449
file_compare = compare_files(input_file, output_file)
447-
log_info(f"File comparison: {file_compare}")
450+
logger.info(f"File comparison: {file_compare}")
448451

449452
return file_compare
450453

@@ -473,17 +476,17 @@ def compare_files(input_file, output_file):
473476
if os.path.exists(input_file) and os.path.exists(output_file):
474477
input_file_size = os.path.getsize(input_file)
475478
output_file_size = os.path.getsize(output_file)
476-
log_info(f"Input file size: {input_file_size}")
477-
log_info(f"Output file size: {output_file_size}")
479+
logger.info(f"Input file size: {input_file_size}")
480+
logger.info(f"Output file size: {output_file_size}")
478481
if input_file_size != output_file_size:
479482
log_fail("File size is different")
480483
return False
481484

482485
with open(input_file, "rb") as i_file, open(output_file, "rb") as o_file:
483486
i_hash = hashlib.md5(i_file.read()).hexdigest()
484487
o_hash = hashlib.md5(o_file.read()).hexdigest()
485-
log_info(f"Input file hash: {i_hash}")
486-
log_info(f"Output file hash: {o_hash}")
488+
logger.info(f"Input file hash: {i_hash}")
489+
logger.info(f"Output file hash: {o_hash}")
487490
if i_hash == o_hash:
488491
return True
489492

0 commit comments

Comments
 (0)