Skip to content

Commit 5fb3dc1

Browse files
remove duplicates and comments
1 parent 26b2f7d commit 5fb3dc1

File tree

3 files changed

+0
-36
lines changed

3 files changed

+0
-36
lines changed

nebula/config/config.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -77,27 +77,6 @@ def shutdown_logging(self):
7777

7878
logging.shutdown()
7979

80-
def shutdown_logging(self):
81-
"""
82-
Properly shuts down all loggers and their handlers in the system.
83-
This ensures all buffered logs are written to their respective files.
84-
"""
85-
for handler in logging.getLogger().handlers:
86-
handler.flush()
87-
handler.close()
88-
89-
training_logger = logging.getLogger(TRAINING_LOGGER)
90-
for handler in training_logger.handlers:
91-
handler.flush()
92-
handler.close()
93-
94-
pl_logger = logging.getLogger("lightning.pytorch")
95-
for handler in pl_logger.handlers:
96-
handler.flush()
97-
handler.close()
98-
99-
logging.shutdown()
100-
10180
def __default_config(self):
10281
self.participant["device_args"]["name"] = (
10382
f"participant_{self.participant['device_args']['idx']}_{self.participant['network_args']['ip']}_{self.participant['network_args']['port']}"

nebula/core/engine.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -698,7 +698,6 @@ async def _start_learning(self):
698698
mpe = ModelPropagationEvent(await self.cm.get_addrs_current_connections(only_direct=True, myself=False), "initialization")
699699
await EventManager.get_instance().publish_node_event(mpe)
700700

701-
#await self.cm.propagator.propagate("initialization")
702701
await self.get_federation_ready_lock().release_async()
703702

704703
self.trainer.set_epochs(epochs)

nebula/core/node.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -176,20 +176,6 @@ async def main(config: Config):
176176
else:
177177
raise ValueError(f"Trainer {trainer_str} not supported")
178178

179-
# if config.participant["device_args"]["malicious"]:
180-
# node_cls = MaliciousNode
181-
# else:
182-
# if config.participant["device_args"]["role"] == Role.AGGREGATOR.value:
183-
# node_cls = AggregatorNode
184-
# elif config.participant["device_args"]["role"] == Role.TRAINER.value:
185-
# node_cls = TrainerNode
186-
# elif config.participant["device_args"]["role"] == Role.SERVER.value:
187-
# node_cls = ServerNode
188-
# elif config.participant["device_args"]["role"] == Role.IDLE.value:
189-
# node_cls = IdleNode
190-
# else:
191-
# raise ValueError(f"Role {config.participant['device_args']['role']} not supported")
192-
193179
VARIABILITY = 0.5
194180

195181
def randomize_value(value, variability):

0 commit comments

Comments
 (0)