Skip to content

Commit 7662a11

Browse files
Include statistics metrics
1 parent 8ba351a commit 7662a11

File tree

3 files changed

+54
-0
lines changed

3 files changed

+54
-0
lines changed

nebula/frontend/app.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -757,6 +757,9 @@ def stop_scenario(scenario_name):
757757
ScenarioManagement.stop_participants()
758758
ScenarioManagement.stop_blockchain()
759759
scenario_set_status_to_finished(scenario_name)
760+
# Generate statistics for the scenario
761+
path = Utils.check_path(settings.log_dir, scenario_name)
762+
ScenarioManagement.generate_statistics(path)
760763

761764

762765
def stop_all_scenarios():

nebula/scenarios.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from datetime import datetime
1313

1414
import docker
15+
import tensorboard_reducer as tbr
1516

1617
from nebula.addons.blockchain.blockchain_deployer import BlockchainDeployer
1718
from nebula.addons.topologymanager import TopologyManager
@@ -1034,3 +1035,51 @@ def scenario_finished(self, timeout_seconds):
10341035
return False
10351036

10361037
time.sleep(5)
1038+
1039+
@classmethod
1040+
def generate_statistics(cls, path):
1041+
try:
1042+
# Generate statistics
1043+
logging.info(f"Generating statistics for scenario {path}")
1044+
1045+
# Define input directories
1046+
input_event_dirs = sorted(glob.glob(os.path.join(path, "metrics/*")))
1047+
# Where to write reduced TB events
1048+
tb_events_output_dir = os.path.join(path, "metrics", "reduced-data")
1049+
csv_out_path = os.path.join(path, "metrics", "reduced-data-as.csv")
1050+
# Whether to abort or overwrite when csv_out_path already exists
1051+
overwrite = False
1052+
reduce_ops = ("mean", "min", "max", "median", "std", "var")
1053+
1054+
# Handle duplicate steps
1055+
handle_dup_steps = "keep-first"
1056+
# Strict steps
1057+
strict_steps = False
1058+
1059+
events_dict = tbr.load_tb_events(
1060+
input_event_dirs, handle_dup_steps=handle_dup_steps, strict_steps=strict_steps
1061+
)
1062+
1063+
# Number of recorded tags. e.g. would be 3 if you recorded loss, MAE and R^2
1064+
n_scalars = len(events_dict)
1065+
n_steps, n_events = list(events_dict.values())[0].shape
1066+
1067+
logging.info(f"Loaded {n_events} TensorBoard runs with {n_scalars} scalars and {n_steps} steps each")
1068+
logging.info(f"Events dict keys: {events_dict.keys()}")
1069+
1070+
reduced_events = tbr.reduce_events(events_dict, reduce_ops)
1071+
1072+
for op in reduce_ops:
1073+
logging.info(f"Writing '{op}' reduction to '{tb_events_output_dir}-{op}'")
1074+
1075+
tbr.write_tb_events(reduced_events, tb_events_output_dir, overwrite)
1076+
1077+
logging.info(f"Writing results to '{csv_out_path}'")
1078+
1079+
tbr.write_data_file(reduced_events, csv_out_path, overwrite)
1080+
1081+
logging.info("Reduction complete")
1082+
1083+
except Exception as e:
1084+
logging.exception(f"Error generating statistics: {e}")
1085+
return False

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,8 @@ frontend = [
124124
"setuptools==74.1.2",
125125
"tensorboard==2.17.1",
126126
"tensorboardx==2.6.2.2",
127+
"tensorboard-reducer==0.3.1",
128+
"torch==2.4.1",
127129
"uvicorn==0.30.6",
128130
"web3==6.20.0",
129131
"wheel==0.44.0",

0 commit comments

Comments
 (0)