From 8be9d95628e4c1099eb7a54a78c21cdfd0ac4676 Mon Sep 17 00:00:00 2001
From: Rishi Sharma <rishi.sharma@epfl.ch>
Date: Tue, 23 Nov 2021 15:46:36 +0100
Subject: [PATCH] DEBUG:Divide by zero

---
 config.ini                                 | 2 +-
 src/decentralizepy/datasets/Femnist.py     | 4 ++--
 src/decentralizepy/node/Node.py            | 4 +++-
 src/decentralizepy/sharing/PartialModel.py | 0
 src/decentralizepy/training/Training.py    | 1 -
 testing.py                                 | 2 +-
 6 files changed, 7 insertions(+), 6 deletions(-)
 create mode 100644 src/decentralizepy/sharing/PartialModel.py

diff --git a/config.ini b/config.ini
index c97844e..492a4c4 100644
--- a/config.ini
+++ b/config.ini
@@ -16,7 +16,7 @@ lr = 0.01
 [TRAIN_PARAMS]
 training_package = decentralizepy.training.Training
 training_class = Training
-epochs_per_round = 4
+epochs_per_round = 1
 batch_size = 1024
 shuffle = True
 loss_package = torch.nn
diff --git a/src/decentralizepy/datasets/Femnist.py b/src/decentralizepy/datasets/Femnist.py
index 8cc09f9..ccafb16 100644
--- a/src/decentralizepy/datasets/Femnist.py
+++ b/src/decentralizepy/datasets/Femnist.py
@@ -272,7 +272,7 @@ class Femnist(Dataset):
                     total_pred[label] += 1
                     total_predicted += 1
 
-        logging.debug("Predicted on the test set")
+        logging.info("Predicted on the test set")
 
         for key, value in enumerate(correct_pred):
             if total_pred[key] != 0:
@@ -283,7 +283,7 @@ class Femnist(Dataset):
 
         accuracy = 100 * float(total_correct) / total_predicted
         logging.info("Overall accuracy is: {:.1f} %".format(accuracy))
-        logging.debug("Evaluating complete.")
+        logging.info("Evaluating complete.")
 
 
 class LogisticRegression(nn.Module):
diff --git a/src/decentralizepy/node/Node.py b/src/decentralizepy/node/Node.py
index 9643496..5e15bba 100644
--- a/src/decentralizepy/node/Node.py
+++ b/src/decentralizepy/node/Node.py
@@ -159,7 +159,9 @@ class Node:
             self.trainer.train(self.dataset)
 
             self.sharing.step()
-            self.optimizer = optimizer_class(self.model.parameters(), **optimizer_params) # Reset optimizer state
+            self.optimizer = optimizer_class(
+                self.model.parameters(), **optimizer_params
+            )  # Reset optimizer state
             self.trainer.reset_optimizer(self.optimizer)
 
             rounds_to_test -= 1
diff --git a/src/decentralizepy/sharing/PartialModel.py b/src/decentralizepy/sharing/PartialModel.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/decentralizepy/training/Training.py b/src/decentralizepy/training/Training.py
index 31daa65..4f4095c 100644
--- a/src/decentralizepy/training/Training.py
+++ b/src/decentralizepy/training/Training.py
@@ -45,7 +45,6 @@ class Training:
 
     def reset_optimizer(self, optimizer):
         self.optimizer = optimizer
-        
 
     def train(self, dataset):
         """
diff --git a/testing.py b/testing.py
index 0d27c9b..5a50a3b 100644
--- a/testing.py
+++ b/testing.py
@@ -42,5 +42,5 @@ if __name__ == "__main__":
     mp.spawn(
         fn=Node,
         nprocs=procs_per_machine,
-        args=[m_id, l, g, my_config, 20, "results", logging.INFO],
+        args=[m_id, l, g, my_config, 20, "results", logging.DEBUG],
     )
-- 
GitLab