From 40fbe6f107e3671a9fa69a781ee546c304cad9f0 Mon Sep 17 00:00:00 2001 From: Rishi Sharma <rishi.sharma@epfl.ch> Date: Fri, 1 Apr 2022 20:49:40 +0200 Subject: [PATCH] Reformat --- eval/plot.py | 4 ++-- eval/plot_percentile.py | 23 +++++++++++----------- src/decentralizepy/datasets/Shakespeare.py | 1 - src/decentralizepy/node/Node.py | 5 ++++- src/decentralizepy/sharing/PartialModel.py | 4 +++- 5 files changed, 21 insertions(+), 16 deletions(-) diff --git a/eval/plot.py b/eval/plot.py index a7c8dd9..552934f 100644 --- a/eval/plot.py +++ b/eval/plot.py @@ -4,8 +4,8 @@ import sys import numpy as np import pandas as pd -from matplotlib import pyplot as plt import torch +from matplotlib import pyplot as plt def get_stats(l): @@ -238,4 +238,4 @@ def plot_parameters(path): if __name__ == "__main__": assert len(sys.argv) == 2 plot_results(sys.argv[1]) - # plot_parameters(sys.argv[1]) \ No newline at end of file + # plot_parameters(sys.argv[1]) diff --git a/eval/plot_percentile.py b/eval/plot_percentile.py index e8f0da2..11aec4b 100644 --- a/eval/plot_percentile.py +++ b/eval/plot_percentile.py @@ -4,8 +4,8 @@ import sys import numpy as np import pandas as pd -from matplotlib import pyplot as plt import torch +from matplotlib import pyplot as plt def get_stats(l): @@ -70,7 +70,7 @@ def plot_results(path): for sp in shared_params: filepath = os.path.join(mf_path, sp) with open(filepath, "r") as spf: - all_shared_params.append(np.array(json.load(spf), dtype = np.int32)) + all_shared_params.append(np.array(json.load(spf), dtype=np.int32)) # Plot Training loss plt.figure(1) @@ -78,19 +78,19 @@ def plot_results(path): mean = np.mean(all_shared_params, axis=0) std = np.std(all_shared_params, axis=0) with open( - os.path.join(path, "shared_params_avg_"+folder+".json"), "w" + os.path.join(path, "shared_params_avg_" + folder + ".json"), "w" ) as mf: json.dump(mean.tolist(), mf) with open( - os.path.join(path, "shared_params_std_"+folder+".json"), "w" + os.path.join(path, "shared_params_std_" + folder + ".json"), "w" ) as sf: json.dump(std.tolist(), sf) # copy jupyter notebook code percentile = np.percentile(mean, np.arange(0, 100, 1)) plt.plot(np.arange(0, 100, 1), percentile, label=folder) - plt.title('Shared parameters Percentiles') + plt.title("Shared parameters Percentiles") # plt.ylabel("Absolute frequency value") plt.xlabel("Percentiles") plt.xticks(np.arange(0, 110, 10)) @@ -101,14 +101,15 @@ def plot_results(path): print(sort) length = sort.shape[0] length = int(length / 20) - bins = [torch.sum(sort[length * i: length * (i + 1)]).item() for i in range(20)] + bins = [ + torch.sum(sort[length * i : length * (i + 1)]).item() for i in range(20) + ] total = np.sum(bins) - perc = bins / total #np.divide(bins, total) + perc = bins / total # np.divide(bins, total) print(perc) - plt.bar(np.arange(0, 97.5, 5), perc, width=5, align='edge', - label=folder) + plt.bar(np.arange(0, 97.5, 5), perc, width=5, align="edge", label=folder) - plt.title('Shared parameters Percentiles') + plt.title("Shared parameters Percentiles") # plt.ylabel("Absolute frequency value") plt.xlabel("Percentiles") plt.legend(loc="lower right") @@ -122,4 +123,4 @@ def plot_results(path): if __name__ == "__main__": assert len(sys.argv) == 2 - plot_results(sys.argv[1]) \ No newline at end of file + plot_results(sys.argv[1]) diff --git a/src/decentralizepy/datasets/Shakespeare.py b/src/decentralizepy/datasets/Shakespeare.py index a84b1a0..cc6f357 100644 --- a/src/decentralizepy/datasets/Shakespeare.py +++ b/src/decentralizepy/datasets/Shakespeare.py @@ -8,7 +8,6 @@ import numpy as np import torch import torch.nn.functional as F from torch import nn -from torch.autograd import Variable from torch.utils.data import DataLoader from decentralizepy.datasets.Data import Data diff --git a/src/decentralizepy/node/Node.py b/src/decentralizepy/node/Node.py index 57fa8d5..1aa0f04 100644 --- a/src/decentralizepy/node/Node.py +++ b/src/decentralizepy/node/Node.py @@ -422,7 +422,10 @@ class Node: if self.model.shared_parameters_counter is not None: logging.info("Saving the shared parameter counts") with open( - os.path.join(self.log_dir, "{}_shared_parameters.json".format(self.rank)), "w" + os.path.join( + self.log_dir, "{}_shared_parameters.json".format(self.rank) + ), + "w", ) as of: json.dump(self.model.shared_parameters_counter.numpy().tolist(), of) self.communication.disconnect_neighbors() diff --git a/src/decentralizepy/sharing/PartialModel.py b/src/decentralizepy/sharing/PartialModel.py index e898bf9..7d8e7fc 100644 --- a/src/decentralizepy/sharing/PartialModel.py +++ b/src/decentralizepy/sharing/PartialModel.py @@ -126,7 +126,9 @@ class PartialModel(Sharing): ) Path(self.folder_path).mkdir(parents=True, exist_ok=True) - self.model.shared_parameters_counter = torch.zeros(self.change_transformer(self.init_model).shape[0], dtype = torch.int32) + self.model.shared_parameters_counter = torch.zeros( + self.change_transformer(self.init_model).shape[0], dtype=torch.int32 + ) def extract_top_gradients(self): """ -- GitLab